prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>gnome_keyring.py<|end_file_name|><|fim▁begin|>""" Gnome keyring parser. Sources: - Gnome Keyring source code, function generate_file() in keyrings/gkr-keyring.c, Author: Victor Stinner Creation date: 2008-04-09 """ from hachoir_core.tools import paddingSize from hachoir_parser import Parser from hachoir_core.field import (FieldSet, Bit, NullBits, NullBytes, UInt8, UInt32, String, RawBytes, Enum, TimestampUnix64, CompressedField, SubFile) from hachoir_core.endian import BIG_ENDIAN try: import hashlib def sha256(data): hash = hashlib.new('sha256') hash.update(data) return hash.digest() except ImportError: def sha256(data): raise ImportError("hashlib module is missing") try: from Crypto.Cipher import AES class DeflateStream: def __init__(self, stream): hash_iterations = 1234 password = "x" * 8 salt = "\0" * 8 key, iv = generate_key(password, salt, hash_iterations) self.cipher = AES.new(key, AES.MODE_CBC, iv) def __call__(self, size, data=None): if data is None: return '' return self.cipher.decrypt(data) def Deflate(field): CompressedField(field, DeflateStream) return field except ImportError: def Deflate(field): return field class KeyringString(FieldSet): def createFields(self): yield UInt32(self, "length") length = self["length"].value if length == 0xffffffff: return yield String(self, "text", length, charset="UTF-8") def createValue(self): if "text" in self: return self["text"].value else: return u'' def createDescription(self): if "text" in self: return self["text"].value else: return u"(empty string)" class Attribute(FieldSet): def createFields(self): yield KeyringString(self, "name") yield UInt32(self, "type") type = self["type"].value if type == 0: yield KeyringString(self, "value") elif type == 1: yield UInt32(self, "value") else: raise TypeError("Unknown attribute type (%s)" % type) def createDescription(self): return 'Attribute "%s"' % self["name"].value class ACL(FieldSet): def createFields(self): yield UInt32(self, "types_allowed") yield KeyringString(self, "display_name") yield KeyringString(self, "pathname") yield KeyringString(self, "reserved[]") yield UInt32(self, "reserved[]") class Item(FieldSet): def createFields(self): yield UInt32(self, "id") yield UInt32(self, "type") yield UInt32(self, "attr_count") for index in xrange(self["attr_count"].value): yield Attribute(self, "attr[]") def createDescription(self): return "Item #%s: %s attributes" % (self["id"].value, self["attr_count"].value) class Items(FieldSet): def createFields(self): yield UInt32(self, "count") for index in xrange(self["count"].value): yield Item(self, "item[]") class EncryptedItem(FieldSet): def createFields(self): yield KeyringString(self, "display_name") yield KeyringString(self, "secret") yield TimestampUnix64(self, "mtime") yield TimestampUnix64(self, "ctime") yield KeyringString(self, "reserved[]") for index in xrange(4): yield UInt32(self, "reserved[]") yield UInt32(self, "attr_count") for index in xrange(self["attr_count"].value): yield Attribute(self, "attr[]") yield UInt32(self, "acl_count") for index in xrange(self["acl_count"].value): yield ACL(self, "acl[]") # size = 8 # paddingSize((self.stream.size - self.current_size) // 8, 16) # if size: # yield NullBytes(self, "hash_padding", size, "16 bytes alignment") class EncryptedData(Parser): PARSER_TAGS = { "id": "gnomeencryptedkeyring", "min_size": 16*8, "description": u"Gnome encrypted keyring", } endian = BIG_ENDIAN def validate(self): return True def createFields(self): yield RawBytes(self, "md5", 16) while True: size = (self.size - self.current_size) // 8 if size < 77: break yield EncryptedItem(self, "item[]") size = paddingSize(self.current_size // 8, 16) if size: yield NullBytes(self, "padding_align", size) class GnomeKeyring(Parser): MAGIC = "GnomeKeyring\n\r\0\n" PARSER_TAGS = { "id": "gnomekeyring", "category": "misc", "magic": ((MAGIC, 0),), "min_size": 47*8, "description": u"Gnome keyring", } CRYPTO_NAMES = { 0: u"AEL", } HASH_NAMES = { 0: u"MD5", } endian = BIG_ENDIAN def validate(self): if self.stream.readBytes(0, len(self.MAGIC)) != self.MAGIC: return u"Invalid magic string" return True def createFields(self): yield String(self, "magic", len(self.MAGIC), 'Magic string (%r)' % self.MAGIC, charset="ASCII") yield UInt8(self, "major_version") yield UInt8(self, "minor_version") yield Enum(UInt8(self, "crypto"), self.CRYPTO_NAMES)<|fim▁hole|> yield KeyringString(self, "keyring_name") yield TimestampUnix64(self, "mtime") yield TimestampUnix64(self, "ctime") yield Bit(self, "lock_on_idle") yield NullBits(self, "reserved[]", 31, "Reserved for future flags") yield UInt32(self, "lock_timeout") yield UInt32(self, "hash_iterations") yield RawBytes(self, "salt", 8) yield NullBytes(self, "reserved[]", 16) yield Items(self, "items") yield UInt32(self, "encrypted_size") yield Deflate(SubFile(self, "encrypted", self["encrypted_size"].value, "AES128 CBC", parser_class=EncryptedData)) def generate_key(password, salt, hash_iterations): sha = sha256(password+salt) for index in xrange(hash_iterations-1): sha = sha256(sha) return sha[:16], sha[16:]<|fim▁end|>
yield Enum(UInt8(self, "hash"), self.HASH_NAMES)
<|file_name|>Gruntfile.js<|end_file_name|><|fim▁begin|>module.exports = function(grunt) { "use strict"; var fs = require('fs'), pkginfo = grunt.file.readJSON("package.json"); grunt.initConfig({ pkg: pkginfo, meta: { banner: "/*! <%= pkg.title %> <%= pkg.version %> | <%= pkg.homepage %> | (c) 2014 YOOtheme | MIT License */" }, jshint: { src: { options: { jshintrc: "src/.jshintrc" }, src: ["src/js/*.js"] } }, less: (function(){ var lessconf = { "docsmin": { options: { paths: ["docs/less"], cleancss: true }, files: { "docs/css/uikit.docs.min.css": ["docs/less/uikit.less"] } } }, themes = []; //themes ["default", "custom"].forEach(function(f){ if(grunt.option('quick') && f=="custom") return; if(fs.existsSync('themes/'+f)) { fs.readdirSync('themes/'+f).forEach(function(t){ var themepath = 'themes/'+f+'/'+t, distpath = f=="default" ? "dist/css" : themepath+"/dist"; // Is it a directory? if (fs.lstatSync(themepath).isDirectory() && t!=="blank" && t!=='.git') { var files = {}; if(t=="default") { files[distpath+"/uikit.css"] = [themepath+"/uikit.less"]; } else { files[distpath+"/uikit."+t+".css"] = [themepath+"/uikit.less"]; } lessconf[t] = { "options": { paths: [themepath] }, "files": files }; var filesmin = {}; if(t=="default") { filesmin[distpath+"/uikit.min.css"] = [themepath+"/uikit.less"]; } else { filesmin[distpath+"/uikit."+t+".min.css"] = [themepath+"/uikit.less"]; } lessconf[t+"min"] = { "options": { paths: [themepath], cleancss: true}, "files": filesmin }; themes.push({ "path":themepath, "name":t, "dir":f }); } }); } }); //addons themes.forEach(function(theme){ if(fs.existsSync(theme.path+'/uikit-addons.less')) { var name = (theme.dir == 'default' && theme.name == 'default') ? 'uikit.addons' : 'uikit.'+theme.name+'.addons', dest = (theme.dir == 'default') ? 'dist/css/addons' : theme.path+'/dist/addons'; lessconf["addons-"+theme.name] = {options: { paths: ['src/less/addons'] }, files: {} }; lessconf["addons-"+theme.name].files[dest+"/"+name+".css"] = [theme.path+'/uikit-addons.less']; lessconf["addons-min-"+theme.name] = {options: { paths: ['src/less/addons'], cleancss: true }, files: {} }; lessconf["addons-min-"+theme.name].files[dest+"/"+name+".min.css"] = [theme.path+'/uikit-addons.less']; } }); return lessconf; })(), copy: { fonts: { files: [{ expand: true, cwd: "src/fonts", src: ["*"], dest: "dist/fonts/" }] } }, concat: { dist: { options: { separator: "\n\n" }, src: [ "src/js/core.js", "src/js/utility.js", "src/js/touch.js", "src/js/alert.js", "src/js/button.js", "src/js/dropdown.js", "src/js/grid.js", "src/js/modal.js", "src/js/offcanvas.js", "src/js/nav.js", "src/js/tooltip.js", "src/js/switcher.js", "src/js/tab.js", "src/js/scrollspy.js", "src/js/smooth-scroll.js", "src/js/toggle.js", ], dest: "dist/js/uikit.js" } }, usebanner: { dist: { options: { position: 'top', banner: "<%= meta.banner %>\n" }, files: { src: [ 'dist/css/**/*.css', 'dist/js/**/*.js' ] } }<|fim▁hole|> options: { //banner: "<%= meta.banner %>\n" }, files: { "dist/js/uikit.min.js": ["dist/js/uikit.js"] } }, addonsmin: { files: (function(){ var files = {}; fs.readdirSync('src/js/addons').forEach(function(f){ if(f.match(/\.js/)) { var addon = f.replace(".js", ""); grunt.file.copy('src/js/addons/'+f, 'dist/js/addons/'+addon+'.js'); files['dist/js/addons/'+addon+'.min.js'] = ['src/js/addons/'+f]; } }); return files; })() } }, compress: { dist: { options: { archive: ("dist/uikit-"+pkginfo.version+".zip") }, files: [ { expand: true, cwd: "dist/", src: ["css/*", "js/*", "fonts/*", "addons/**/*"], dest: "" } ] } }, watch: { src: { files: ["src/**/*.less", "themes/**/*.less","src/js/*.js"], tasks: ["build"] } } }); grunt.registerTask('indexthemes', 'Rebuilding theme index.', function() { var themes = []; ["default", "custom"].forEach(function(f){ if(fs.existsSync('themes/'+f)) { fs.readdirSync('themes/'+f).forEach(function(t){ var themepath = 'themes/'+f+'/'+t; // Is it a directory? if (fs.lstatSync(themepath).isDirectory() && t!=="blank" && t!=='.git') { var theme = { "name" : t.split("-").join(" ").replace(/^([a-z\u00E0-\u00FC])|\s+([a-z\u00E0-\u00FC])/g, function ($1) { return $1.toUpperCase(); }), "url" : "../"+themepath+"/uikit.less", "config": (fs.existsSync(themepath+"/customizer.json") ? "../"+themepath+"/customizer.json" : "../themes/default/uikit/customizer.json"), "styles": {} }; if(fs.existsSync(themepath+'/styles')) { var styles = {}; fs.readdirSync(themepath+'/styles').forEach(function(sf){ var stylepath = [themepath, 'styles', sf, 'style.less'].join('/'); if(fs.existsSync(stylepath)) { styles[sf] = "../"+themepath+"/styles/"+sf+"/style.less"; } }); theme.styles = styles; } themes.push(theme); } }); } }); grunt.log.writeln(themes.length+' themes found: ' + themes.map(function(theme){ return theme.name;}).join(", ")); fs.writeFileSync("themes/themes.json", JSON.stringify(themes, " ", 4)); }); grunt.registerTask('sublime', 'Building Sublime Text Package', function() { // generates a python list (returns string representation) var pythonList = function(classes) { var result = []; classes.forEach(function(cls) { // wrap class name in double quotes, add comma (except for last element) result.push(['"', cls, '"', (i !== classes.length-1 ? ", " : "")].join('')); // break lines every n elements if ((i !== 0) && (i%20 === 0)) { result.push("\n "); } }); return "[" + result.join("") + "]"; }; // css core var filepath = 'dist/css/uikit.css', cssFiles = [filepath]; if (!fs.existsSync(filepath)) { grunt.log.error("Not found: " + filepath); return; } // css addons fs.readdirSync('dist/css/addons').forEach(function(f){ if (f.match(/\.css$/)) { cssFiles.push('dist/css/addons/'+f); } }); var cssContent = ""; for (var i in cssFiles) { cssContent += grunt.file.read(cssFiles[i])+' '; } var classesList = cssContent.match(/\.(uk-[a-z\d\-]+)/g), classesSet = {}, pystring = '# copy & paste into sublime plugin code:\n'; // use object as set (no duplicates) classesList.forEach(function(c) { c = c.substr(1); // remove leading dot classesSet[c] = true; }); // convert set back to list classesList = Object.keys(classesSet); pystring += 'uikit_classes = ' + pythonList(classesList) + '\n'; // JS core filepath = 'dist/js/uikit.js'; if (!fs.existsSync(filepath)) { grunt.log.error("Not found: " + filepath); return; } var jsFiles = [filepath]; // JS addons fs.readdirSync('dist/js/addons').forEach(function(f){ if (f.match(/\.js$/)) { jsFiles.push('dist/js/addons/'+f); } }); var jsContent = ""; for (var i in jsFiles) { jsContent += grunt.file.read(jsFiles[i]) + ' '; } var dataList = jsContent.match(/data-uk-[a-z\d\-]+/g), dataSet = {}; dataList.forEach(function(s) { dataSet[s] = true; }); dataList = Object.keys(dataSet); pystring += 'uikit_data = ' + pythonList(dataList) + '\n'; grunt.file.write('dist/uikit_completions.py', pystring); grunt.log.writeln('Written: dist/uikit_completions.py'); }); // Load grunt tasks from NPM packages grunt.loadNpmTasks("grunt-contrib-less"); grunt.loadNpmTasks("grunt-contrib-copy"); grunt.loadNpmTasks("grunt-contrib-concat"); grunt.loadNpmTasks("grunt-contrib-jshint"); grunt.loadNpmTasks("grunt-contrib-uglify"); grunt.loadNpmTasks("grunt-contrib-compress"); grunt.loadNpmTasks("grunt-contrib-watch"); grunt.loadNpmTasks("grunt-banner"); // Register grunt tasks grunt.registerTask("build", ["jshint", "indexthemes", "less", "concat", "copy", "uglify", "usebanner"]); grunt.registerTask("default", ["build", "compress"]); };<|fim▁end|>
}, uglify: { distmin: {
<|file_name|>dri_cursor.cc<|end_file_name|><|fim▁begin|>// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ui/ozone/platform/dri/dri_cursor.h" #include "ui/base/cursor/ozone/bitmap_cursor_factory_ozone.h" #include "ui/gfx/geometry/point.h" #include "ui/gfx/geometry/point_conversions.h" #include "ui/gfx/geometry/point_f.h" #include "ui/ozone/platform/dri/dri_surface_factory.h" #include "ui/ozone/platform/dri/dri_window.h" #include "ui/ozone/platform/dri/dri_window_manager.h" #include "ui/ozone/platform/dri/hardware_cursor_delegate.h" namespace ui { DriCursor::DriCursor(HardwareCursorDelegate* hardware, DriWindowManager* window_manager) : hardware_(hardware), window_manager_(window_manager), cursor_window_(gfx::kNullAcceleratedWidget) { } DriCursor::~DriCursor() { } void DriCursor::SetCursor(gfx::AcceleratedWidget widget, PlatformCursor platform_cursor) { DCHECK_NE(widget, gfx::kNullAcceleratedWidget); scoped_refptr<BitmapCursorOzone> cursor = BitmapCursorFactoryOzone::GetBitmapCursor(platform_cursor); if (cursor_ == cursor || cursor_window_ != widget) return; cursor_ = cursor; ShowCursor(); } void DriCursor::ShowCursor() { DCHECK_NE(cursor_window_, gfx::kNullAcceleratedWidget); if (cursor_.get()) hardware_->SetHardwareCursor(cursor_window_, cursor_->bitmaps(), bitmap_location(), cursor_->frame_delay_ms()); else HideCursor(); } void DriCursor::HideCursor() { DCHECK_NE(cursor_window_, gfx::kNullAcceleratedWidget); hardware_->SetHardwareCursor( cursor_window_, std::vector<SkBitmap>(), gfx::Point(), 0); } void DriCursor::MoveCursorTo(gfx::AcceleratedWidget widget, const gfx::PointF& location) { if (widget != cursor_window_ && cursor_window_ != gfx::kNullAcceleratedWidget) HideCursor(); cursor_window_ = widget; cursor_location_ = location; if (cursor_window_ == gfx::kNullAcceleratedWidget)<|fim▁hole|> cursor_location_.SetToMax(gfx::PointF(0, 0)); // Right and bottom edges are exclusive. cursor_location_.SetToMin(gfx::PointF(size.width() - 1, size.height() - 1)); if (cursor_.get()) hardware_->MoveHardwareCursor(cursor_window_, bitmap_location()); } void DriCursor::MoveCursor(const gfx::Vector2dF& delta) { MoveCursorTo(cursor_window_, cursor_location_ + delta); } gfx::AcceleratedWidget DriCursor::GetCursorWindow() { return cursor_window_; } bool DriCursor::IsCursorVisible() { return cursor_.get(); } gfx::PointF DriCursor::location() { return cursor_location_; } gfx::Point DriCursor::bitmap_location() { return gfx::ToFlooredPoint(cursor_location_) - cursor_->hotspot().OffsetFromOrigin(); } } // namespace ui<|fim▁end|>
return; DriWindow* window = window_manager_->GetWindow(cursor_window_); const gfx::Size& size = window->GetBounds().size();
<|file_name|>CVQualifiers.cpp<|end_file_name|><|fim▁begin|>// -*- mode: C++ -*- // // Copyright (c) 2007, 2008, 2009, 2010, 2011 The University of Utah // All rights reserved. // // This file is part of `csmith', a random generator of C programs. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. #include <assert.h> #include <iostream> #include "CVQualifiers.h" #include "Type.h" #include "Effect.h" #include "CGContext.h" #include "CGOptions.h" #include "random.h" #include "Error.h" #include "Probabilities.h" #include "DepthSpec.h" #include "Enumerator.h" ////////////////////////////////////////////////////////////////////// // Construction/Destruction ////////////////////////////////////////////////////////////////////// CVQualifiers::CVQualifiers(void) : wildcard(false), accept_stricter(false) { // nothing else to do } CVQualifiers::CVQualifiers(bool wild, bool accept_stricter) : wildcard(wild), accept_stricter(accept_stricter) { // nothing else to do } CVQualifiers::CVQualifiers(const vector<bool>& isConsts, const vector<bool>& isVolatiles) : wildcard(false), accept_stricter(false), is_consts(isConsts), is_volatiles(isVolatiles) { // nothing else to do } CVQualifiers::CVQualifiers(const CVQualifiers &qfer) : wildcard(qfer.wildcard), accept_stricter(qfer.accept_stricter), is_consts(qfer.get_consts()), is_volatiles(qfer.get_volatiles()) { // nothing else to do } CVQualifiers::~CVQualifiers() { } CVQualifiers & CVQualifiers::operator=(const CVQualifiers &qfer) { if (this == &qfer) { return *this; } wildcard = qfer.wildcard; accept_stricter = qfer.accept_stricter; is_consts = qfer.get_consts(); is_volatiles = qfer.get_volatiles(); return *this; } // -------------------------------------------------------------- /* return true if this variable is more const-volatile qualified than v * some examples are: * const is more qualified than none * volatile is more qualified than none * const volatile is more qualified than const * const is NOT more qualified than volatile * ... * notice "const int**" is not convertable from "int**" * as explained in * http://www.embedded.com/columns/programmingpointers/180205632?_requestid=488055 **************************************************************/ bool CVQualifiers::stricter_than(const CVQualifiers& qfer) const { size_t i; assert(is_consts.size() == is_volatiles.size()); const vector<bool>& v_consts = qfer.get_consts(); const vector<bool>& v_volatiles = qfer.get_volatiles(); if (is_consts.size() != v_consts.size() || is_volatiles.size() != v_volatiles.size()) { return false; } size_t depth = is_consts.size(); // check "const" qualifier first for (i=0; i<depth; i++) { // for special rule: "const int**" is not convertable from "int**" // actually for a level that is followed by two "*"s, we have to match // "const" qualifier if (depth - i > 2 && is_consts[i] != v_consts[i]) { return false; } if (v_consts[i] && !is_consts[i]) { return false; } } // check "volatile" qualifier second // special rule: the volatile property on storage (1st in vector) must match // can be relaxed??? if (depth > 1 && is_volatiles[0] != v_volatiles[0]) { return false; } for (i=0; i<depth; i++) { // similiar to const: "volatile int**" is not convertable from "int**" // actually for a level that is followed by two "*"s, we have to match if (depth - i > 2 && is_volatiles[i] != v_volatiles[i]) { return false; } if (v_volatiles[i] && !is_volatiles[i]) { return false; } } return true; } bool CVQualifiers::match(const CVQualifiers& qfer) const { if (wildcard) { return true; } if (CGOptions::match_exact_qualifiers()) { return is_consts == qfer.get_consts() && is_volatiles == qfer.get_volatiles(); } // return true if both variables are non-pointer (has only one level qualifier) if (is_consts.size() == qfer.get_consts().size() && is_consts.size()==1) { assert(is_consts.size() == is_volatiles.size()); return true; } return (!accept_stricter && stricter_than(qfer)) || (accept_stricter && qfer.stricter_than(*this)); } bool CVQualifiers::match_indirect(const CVQualifiers& qfer) const { if (wildcard) { return true; } if (is_consts.size() == qfer.get_consts().size()) { return match(qfer); } int deref = qfer.get_consts().size() - is_consts.size(); if (deref < -1) { return false; } return match(qfer.indirect_qualifiers(deref)); } /* * make sure no volatile-pointers if volatile-pointers is false */ void CVQualifiers::make_scalar_volatiles(std::vector<bool> &volatiles) { if (!CGOptions::volatile_pointers()) { for (size_t i=1; i<volatiles.size(); i++) volatiles[i] = false; } } /* * make sure no const-pointers if const_pointers is false */ void CVQualifiers::make_scalar_consts(std::vector<bool> &consts) { if (!CGOptions::const_pointers()) { for (size_t i=1; i<consts.size(); i++) consts[i] = false; } } /* * generate a random CV qualifier vector that is looser or stricter than this one */ CVQualifiers CVQualifiers::random_qualifiers(bool no_volatile, Effect::Access access, const CGContext &cg_context) const { std::vector<bool> volatiles; std::vector<bool> consts; if (wildcard) { return CVQualifiers(true, accept_stricter); } // use non-volatile for all levels if requested if (no_volatile) { for (size_t i=0; i<is_volatiles.size(); i++) { volatiles.push_back(false); } } else { volatiles = !accept_stricter ? random_looser_volatiles() : random_stricter_volatiles(); ERROR_GUARD(CVQualifiers(consts, volatiles)); if (!cg_context.get_effect_context().is_side_effect_free()) { volatiles[volatiles.size() - 1] = false; } } ERROR_GUARD(CVQualifiers(consts, volatiles)); make_scalar_volatiles(volatiles); consts = !accept_stricter ? random_looser_consts() : random_stricter_consts(); make_scalar_consts(consts); ERROR_GUARD(CVQualifiers(consts, volatiles)); if (access == Effect::WRITE) { consts[consts.size() - 1] = false; } return CVQualifiers(consts, volatiles); } /* * generate a random CV qualifier vector that is looser than this one */ CVQualifiers CVQualifiers::random_loose_qualifiers(bool no_volatile, Effect::Access access, const CGContext &cg_context) const { std::vector<bool> volatiles; std::vector<bool> consts; if (wildcard) { return CVQualifiers(true, accept_stricter); } // use non-volatile for all levels if requested if (no_volatile) { for (size_t i=0; i<is_volatiles.size(); i++) { volatiles.push_back(false); } } else { volatiles = random_looser_volatiles(); ERROR_GUARD(CVQualifiers(consts, volatiles)); if (!cg_context.get_effect_context().is_side_effect_free()) { volatiles[volatiles.size() - 1] = false; } } ERROR_GUARD(CVQualifiers(consts, volatiles)); make_scalar_volatiles(volatiles); consts = random_looser_consts(); make_scalar_consts(consts); ERROR_GUARD(CVQualifiers(consts, volatiles)); if (access == Effect::WRITE) { consts[consts.size() - 1] = false; } return CVQualifiers(consts, volatiles); } CVQualifiers CVQualifiers::random_qualifiers(const Type* t, Effect::Access access, const CGContext &cg_context, bool no_volatile) { return random_qualifiers(t, access, cg_context, no_volatile, RegularConstProb, RegularVolatileProb); } CVQualifiers CVQualifiers::random_qualifiers(const Type* t, Effect::Access access, const CGContext &cg_context, bool no_volatile, unsigned int const_prob, unsigned int volatile_prob) { CVQualifiers ret_qfer; if (t==0) { return ret_qfer; } bool isVolatile = false; bool isConst = false; std::vector<bool> is_consts, is_volatiles; const Effect &effect_context = cg_context.get_effect_context(); // set random volatile/const properties for each level of indirection for pointers const Type* tmp = t->ptr_type; while (tmp) { DEPTH_GUARD_BY_DEPTH_RETURN(2, ret_qfer); isVolatile = rnd_flipcoin(volatile_prob); ERROR_GUARD(ret_qfer); isConst = rnd_flipcoin(const_prob); ERROR_GUARD(ret_qfer); if (isVolatile && isConst && !CGOptions::allow_const_volatile()) { isConst = false; } is_consts.push_back(isConst); is_volatiles.push_back(isVolatile); tmp = tmp->ptr_type; } // set random volatile/const properties for variable itself bool volatile_ok = effect_context.is_side_effect_free(); bool const_ok = (access != Effect::WRITE); isVolatile = false; isConst = false; if (volatile_ok && const_ok) { DEPTH_GUARD_BY_DEPTH_RETURN(2, ret_qfer); isVolatile = rnd_flipcoin(volatile_prob); ERROR_GUARD(ret_qfer); isConst = rnd_flipcoin(const_prob); ERROR_GUARD(ret_qfer); } else if (volatile_ok) { DEPTH_GUARD_BY_DEPTH_RETURN(1, ret_qfer); isVolatile = rnd_flipcoin(volatile_prob); ERROR_GUARD(ret_qfer); } else if (const_ok) { DEPTH_GUARD_BY_DEPTH_RETURN(1, ret_qfer); isConst = rnd_flipcoin(const_prob); ERROR_GUARD(ret_qfer); } if (isVolatile && isConst && !CGOptions::allow_const_volatile()) { isConst = false; } is_consts.push_back(isConst); is_volatiles.push_back(isVolatile); // use non-volatile for all levels if requested if (no_volatile) { for (size_t i=0; i<is_volatiles.size(); i++) { is_volatiles[i] = false; } } make_scalar_volatiles(is_volatiles); make_scalar_consts(is_consts); return CVQualifiers(is_consts, is_volatiles); } /* * make a random qualifier for type t, assuming non context, * and no volatile allowed */ CVQualifiers CVQualifiers::random_qualifiers(const Type* t) { return random_qualifiers(t, Effect::READ, CGContext::get_empty_context(), true); } /* * be careful to use it because it will generate volatile without knowing the context. * Only used to generate qulifiers for struct/unions */ CVQualifiers CVQualifiers::random_qualifiers(const Type* t, unsigned int const_prob, unsigned int volatile_prob) { return random_qualifiers(t, Effect::READ, CGContext::get_empty_context(), false, const_prob, volatile_prob); } vector<bool> CVQualifiers::random_stricter_consts(void) const { vector<bool> consts; size_t i; size_t depth = is_consts.size(); if (CGOptions::match_exact_qualifiers()) return is_consts; for (i=0; i<depth; i++) { // special case // const int** is not stricter than int** // int * const ** is not stricter than int*** // and so on... if (is_consts[i] || (depth - i > 2)) { consts.push_back(is_consts[i]); } else if (is_volatiles[i] && !CGOptions::allow_const_volatile()) { consts.push_back(false); } else { DEPTH_GUARD_BY_DEPTH_RETURN(1, consts); bool index = rnd_flipcoin(StricterConstProb); ERROR_GUARD(consts); consts.push_back(index); } } return consts; } vector<bool> CVQualifiers::random_stricter_volatiles(void) const { vector<bool> volatiles; size_t i; size_t depth = is_volatiles.size(); if (CGOptions::match_exact_qualifiers()) return is_volatiles; for (i=0; i<depth; i++) { // first one (storage must match, any level followed by at least two more // indirections must match if (is_volatiles[i] || (i==0 && depth>1) || (depth - i > 2)) { volatiles.push_back(is_volatiles[i]); } else if (is_consts[i] && !CGOptions::allow_const_volatile()) { volatiles.push_back(false); } else { DEPTH_GUARD_BY_DEPTH_RETURN(1, volatiles); bool index = rnd_flipcoin(RegularVolatileProb); ERROR_GUARD(volatiles); volatiles.push_back(index); } } make_scalar_volatiles(volatiles); return volatiles; } vector<bool> CVQualifiers::random_looser_consts(void) const { vector<bool> consts; size_t i; size_t depth = is_consts.size(); if (CGOptions::match_exact_qualifiers()) return is_consts; for (i=0; i<depth; i++) { // special case if (!is_consts[i] || (depth - i > 2)) { consts.push_back(is_consts[i]); } else { DEPTH_GUARD_BY_DEPTH_RETURN(1, consts); bool index = rnd_flipcoin(LooserConstProb); ERROR_GUARD(consts); consts.push_back(index); } } return consts; } vector<bool> CVQualifiers::random_looser_volatiles(void) const { vector<bool> volatiles; size_t i; size_t depth = is_volatiles.size(); if (CGOptions::match_exact_qualifiers()) return is_volatiles; for (i=0; i<depth; i++) { if (!is_volatiles[i] || (i==0 && depth>1) || (depth - i > 2)) { volatiles.push_back(is_volatiles[i]); } else { DEPTH_GUARD_BY_DEPTH_RETURN(1, volatiles); bool index = rnd_flipcoin(RegularVolatileProb); ERROR_GUARD(volatiles); volatiles.push_back(index); } } make_scalar_volatiles(volatiles); return volatiles; } void CVQualifiers::add_qualifiers(bool is_const, bool is_volatile) { is_consts.push_back(is_const); is_volatiles.push_back(is_volatile); } // actually add qualifiers to pointers CVQualifiers CVQualifiers::random_add_qualifiers(bool no_volatile) const { CVQualifiers qfer = *this; if (CGOptions::match_exact_qualifiers()) { qfer.add_qualifiers(false, false); return qfer; } //bool is_const = rnd_upto(50); if (no_volatile) { DEPTH_GUARD_BY_DEPTH_RETURN(1, qfer); } else { DEPTH_GUARD_BY_DEPTH_RETURN(2, qfer); } bool is_const; if (!CGOptions::const_pointers()) is_const = false; else is_const = rnd_flipcoin(RegularConstProb); ERROR_GUARD(qfer); //bool is_volatile = no_volatile ? false : rnd_upto(RegularVolatileProb); bool is_volatile; if (no_volatile || !CGOptions::volatile_pointers()) is_volatile = false; else is_volatile = rnd_flipcoin(RegularVolatileProb); ERROR_GUARD(qfer); qfer.add_qualifiers(is_const, is_volatile); return qfer; } void CVQualifiers::remove_qualifiers(int len) { int i; for (i=0; i<len; i++) { is_consts.pop_back(); is_volatiles.pop_back(); } } CVQualifiers CVQualifiers::indirect_qualifiers(int level) const { if (level == 0 || wildcard) { return *this; } // taking address else if (level < 0) { assert(level == -1); CVQualifiers qfer = *this; qfer.add_qualifiers(false, false); return qfer; } // dereference else { CVQualifiers qfer = *this; qfer.remove_qualifiers(level); return qfer; } } /* * check if the indirect depth of type matches qualifier size */ bool CVQualifiers::sanity_check(const Type* t) const { assert(t); int level = t->get_indirect_level(); assert(level >= 0); return wildcard || (is_consts.size() == is_volatiles.size() && (static_cast<size_t>(level)+1) == is_consts.size()); } void CVQualifiers::output_qualified_type(const Type* t, std::ostream &out) const { assert(t); assert(sanity_check(t)); size_t i; const Type* base = t->get_base_type(); for (i=0; i<is_consts.size(); i++) { if (i>0) { out << "*"; } if (is_consts[i]) { if (!CGOptions::consts()) assert(0); if (i > 0) out << " "; out << "const "; } if (is_volatiles[i]) { if (!CGOptions::volatiles()) assert(0); if (i > 0) out << " "; out << "volatile "; } if (i==0) { base->Output(out); out << " "; } } } void CVQualifiers::output_qualified_type_with_deputy_annotation(const Type* t, std::ostream &out, const vector<string>& annotations) const { assert(t); assert(sanity_check(t)); assert(is_consts.size() == annotations.size()+1); size_t i; const Type* base = t->get_base_type(); for (i=0; i<is_consts.size(); i++) { if (i>0) { out << "* "; out << annotations[i-1] << " "; } if (is_consts[i]) { if (!CGOptions::consts()) assert(0); out << "const "; } if (is_volatiles[i]) { if (!CGOptions::volatiles()) assert(0); out << "volatile "; } if (i==0) { base->Output(out); out << " "; } } } bool CVQualifiers::is_const_after_deref(int deref_level) const { if (deref_level < 0) { return false; } size_t len = is_consts.size(); assert(len > static_cast<size_t>(deref_level)); return is_consts[len - deref_level - 1]; } bool CVQualifiers::is_volatile_after_deref(int deref_level) const { if (deref_level < 0) { return false; } size_t len = is_volatiles.size(); assert(len > static_cast<size_t>(deref_level)); /* if (len <= static_cast<size_t>(deref_level)) { cout << "len = " << len << ", deref_level = " << deref_level << std::endl; assert(0); } */ return is_volatiles[len - deref_level - 1]; } void CVQualifiers::set_const(bool is_const, int pos) { int len = is_consts.size(); if (len > 0) { is_consts[len - pos - 1] = is_const; } } void CVQualifiers::set_volatile(bool is_volatile, int pos) { int len = is_volatiles.size(); if (len > 0) { is_volatiles[len - pos - 1] = is_volatile; } } void CVQualifiers::restrict(Effect::Access access, const CGContext& cg_context) { if (access == Effect::WRITE) { set_const(false); } if (!cg_context.get_effect_context().is_side_effect_free()) { set_volatile(false); } } /* * For now, only used to generate all qualifiers for struct fields. * Also, since we don't support fields with pointer types, we only * enumerate the first level of qualifiers. */ void CVQualifiers::get_all_qualifiers(vector<CVQualifiers> &quals, unsigned int const_prob, unsigned int volatile_prob) { Enumerator<string> qual_enumerator; qual_enumerator.add_bool_elem("const_prob", const_prob); qual_enumerator.add_bool_elem("volatile_prob", volatile_prob); Enumerator<string> *i; for (i = qual_enumerator.begin(); i != qual_enumerator.end(); i = i->next()) { bool isConst = i->get_elem("const_prob"); bool isVolatile = i->get_elem("volatile_prob"); vector<bool> consts; vector<bool> volatiles; consts.push_back(isConst); volatiles.push_back(isVolatile);<|fim▁hole|> quals.push_back(qual); } } void CVQualifiers::OutputFirstQuals(std::ostream &out) const { if (is_consts.size() > 0 && is_consts[0]) { if (!CGOptions::consts()) assert(0); out << "const "; } if (is_volatiles.size() > 0 && is_volatiles[0]) { if (!CGOptions::volatiles()) assert(0); out << "volatile "; } } void CVQualifiers::output() const { size_t i; for (i=0; i<is_consts.size(); i++) { cout << is_consts[i] << " "; } cout << ", "; for (i=0; i<is_volatiles.size(); i++) { cout << is_volatiles[i] << " "; } cout << endl; }<|fim▁end|>
CVQualifiers qual(consts, volatiles);
<|file_name|>pacemaker.py<|end_file_name|><|fim▁begin|># Copyright (C) 2009 Yan Gao <[email protected]> # See COPYING for license information. import os import tempfile import copy from lxml import etree class PacemakerError(Exception): '''PacemakerError exceptions''' def get_validate_name(cib_elem): if cib_elem is not None: return cib_elem.get("validate-with") else: return None def get_validate_type(cib_elem): return "rng" def get_schema_filename(validate_name): if not validate_name.endswith('.rng'): return "%s.rng" % (validate_name) return validate_name def read_schema_local(validate_name, file_path): try: with open(file_path) as f: return f.read() except IOError, msg: raise PacemakerError("Cannot read schema file '%s': %s" % (file_path, msg)) def delete_dir(dir_path): real_path = os.path.realpath(dir_path) if real_path.count(os.sep) == len(real_path): raise PacemakerError("Do not delete the root directory") for root, dirs, files in os.walk(dir_path, False): for name in files: try: os.unlink(os.path.join(root, name)) except OSError: continue for name in dirs: try: os.rmdir(os.path.join(root, name)) except OSError: continue os.rmdir(dir_path) def subset_select(sub_set, optional): "Helper used to select attributes/elements based on subset and optional flag" if sub_set == 'r': # required return not optional if sub_set == 'o': # optional return optional return True def CrmSchema(cib_elem, local_dir): return RngSchema(cib_elem, local_dir) class Schema(object): validate_name = None def __init__(self, cib_elem, local_dir, is_local=True, get_schema_fn=None): self.is_local = is_local if get_schema_fn is not None: self.get_schema_fn = get_schema_fn else: self.get_schema_fn = read_schema_local self.local_dir = local_dir self.refresh(cib_elem) self.schema_str_docs = {} self.schema_filename = None def update_schema(self): 'defined in subclasses' raise NotImplementedError def find_elem(self, elem_name): 'defined in subclasses' raise NotImplementedError def refresh(self, cib_elem): saved_validate_name = self.validate_name self.validate_name = get_validate_name(cib_elem) self.schema_filename = get_schema_filename(self.validate_name) if self.validate_name != saved_validate_name: return self.update_schema() def validate_cib(self, new_cib_elem): detail_msg = "" if self.is_local: schema_f = os.path.join(self.local_dir, self.schema_filename) else: try: tmp_f = self.tmp_schema_f() except EnvironmentError, msg: raise PacemakerError("Cannot expand the Relax-NG schema: " + str(msg)) if tmp_f is None: raise PacemakerError("Cannot expand the Relax-NG schema") else: schema_f = tmp_f try: cib_elem = etree.fromstring(etree.tostring(new_cib_elem)) except etree.Error, msg: raise PacemakerError("Failed to parse the CIB XML: " + str(msg)) try: schema = etree.RelaxNG(file=schema_f) except etree.Error, msg: raise PacemakerError("Failed to parse the Relax-NG schema: " + str(msg)) #try: # schema.assertValid(cib_elem) #except etree.DocumentInvalid, err_msg: # print err_msg # print schema.error_log try: etree.clear_error_log() except: pass is_valid = schema.validate(cib_elem) if not is_valid: for error_entry in schema.error_log: detail_msg += error_entry.level_name + ": " + error_entry.message + "\n" if not self.is_local: try: delete_dir(os.path.dirname(tmp_f)) except: pass return (is_valid, detail_msg) def tmp_schema_f(self): tmp_dir = tempfile.mkdtemp() for schema_doc_name in self.schema_str_docs: schema_doc_filename = os.path.join(tmp_dir, schema_doc_name) fd = os.open(schema_doc_filename, os.O_RDWR | os.O_CREAT | os.O_TRUNC, 0644) schema_doc_str = self.schema_str_docs[schema_doc_name] os.write(fd, schema_doc_str) os.close(fd) <|fim▁hole|> return os.path.join(tmp_dir, self.schema_filename) else: return None def get_sub_elems_by_obj(self, obj, sub_set='a'): '''defined in subclasses''' raise NotImplementedError def get_elem_attrs_by_obj(self, obj, sub_set='a'): '''defined in subclasses''' raise NotImplementedError # sub_set: 'a'(all), 'r'(required), 'o'(optional) def get_elem_attrs(self, elem_name, sub_set='a'): elem_obj = self.find_elem(elem_name) if elem_obj is None: return None return self.get_elem_attrs_by_obj(elem_obj, sub_set) # sub_set: 'a'(all), 'r'(required), 'o'(optional) def get_sub_elems(self, elem_name, sub_set='a'): elem_obj = self.find_elem(elem_name) if elem_obj is None: return None return self.get_sub_elems_by_obj(elem_obj, sub_set) def supported_rsc_types(self): return self.get_sub_elems("resources") def get_local_tag(el): return el.tag.replace("{%s}" % el.nsmap[None], "") class RngSchema(Schema): expr = '//*[local-name() = $name]' def __init__(self, cib_elem, local_dir, is_local=True, get_schema_fn=None): self.rng_docs = {} Schema.__init__(self, cib_elem, local_dir, is_local=is_local, get_schema_fn=get_schema_fn) def update_schema(self): self.rng_docs = {} self.schema_str_docs = {} self.update_rng_docs(self.validate_name, self.schema_filename) return True def update_rng_docs(self, validate_name="", file=""): self.rng_docs[file] = self.find_start_rng_node(validate_name, file) if self.rng_docs[file] is None: return for extern_ref in self.rng_docs[file][0].xpath(self.expr, name="externalRef"): href_value = extern_ref.get("href") if self.rng_docs.get(href_value) is None: self.update_rng_docs(validate_name, href_value) def find_start_rng_node(self, validate_name="", file=""): schema_info = validate_name + " " + file crm_schema = self.get_schema_fn(validate_name, os.path.join(self.local_dir, file)) if not crm_schema: raise PacemakerError("Cannot get the Relax-NG schema: " + schema_info) self.schema_str_docs[file] = crm_schema try: grammar = etree.fromstring(crm_schema) except Exception, msg: raise PacemakerError("Failed to parse the Relax-NG schema: " + str(msg) + schema_info) start_nodes = grammar.xpath(self.expr, name="start") if len(start_nodes) > 0: start_node = start_nodes[0] return (grammar, start_node) else: raise PacemakerError("Cannot find the start in the Relax-NG schema: " + schema_info) def find_in_grammar(self, grammar, node, name): for elem_node in grammar.xpath(self.expr, name=node): if elem_node.get("name") == name: return elem_node return None def find_elem(self, elem_name): elem_node = None for (grammar, start_node) in self.rng_docs.values(): elem_node = self.find_in_grammar(grammar, 'element', elem_name) if elem_node is not None: return (grammar, elem_node) return None def rng_xpath(self, xpath, namespaces=None): return [grammar.xpath(xpath, namespaces=namespaces) for grammar, _ in self.rng_docs.values()] def get_sub_rng_nodes(self, grammar, rng_node): sub_rng_nodes = [] for child_node in rng_node.iterchildren(): if not isinstance(child_node.tag, basestring): continue local_tag = get_local_tag(child_node) if local_tag == "ref": def_node = self.find_in_grammar(grammar, 'define', child_node.get('name')) if def_node is not None: sub_rng_nodes.extend(self.get_sub_rng_nodes(grammar, def_node)) elif local_tag == "externalRef": nodes = self.get_sub_rng_nodes(*self.rng_docs[child_node.get("href")]) sub_rng_nodes.extend(nodes) elif local_tag in ["element", "attribute", "value", "data", "text"]: sub_rng_nodes.append([(grammar, child_node)]) elif local_tag in ["interleave", "optional", "zeroOrMore", "choice", "group", "oneOrMore"]: nodes = self.get_sub_rng_nodes(grammar, child_node) for node in nodes: node.append(copy.deepcopy(child_node)) sub_rng_nodes.extend(nodes) return sub_rng_nodes def sorted_sub_rng_nodes_by_name(self, obj_type): rng_node = self.find_elem(obj_type) if rng_node is None or rng_node[1] is None: return None return self.sorted_sub_rng_nodes_by_node(*rng_node) def sorted_sub_rng_nodes_by_node(self, grammar, rng_node): sub_rng_nodes = self.get_sub_rng_nodes(grammar, rng_node) sorted_nodes = {} for sub_rng_node in sub_rng_nodes: name = get_local_tag(sub_rng_node[0][1]) if sorted_nodes.get(name) is None: sorted_nodes[name] = [] sorted_nodes[name].append(sub_rng_node) return sorted_nodes def get_elem_attr_objs(self, obj_type): return self.sorted_sub_rng_nodes_by_name(obj_type).get("attribute", []) def get_sub_elem_objs(self, obj_type): return self.sorted_sub_rng_nodes_by_name(obj_type).get("element", []) def find_decl(self, rng_node, name, first=True): decl_node_index = 0 for decl_node in rng_node[1:]: if get_local_tag(decl_node) == name: decl_node_index = rng_node.index(decl_node) - len(rng_node) if first: break return decl_node_index def get_sorted_decl_nodes(self, decl_nodes_list, decl_type): sorted_nodes = [] for rng_nodes in decl_nodes_list: rng_node = rng_nodes.get(decl_type) if rng_node is not None and rng_node not in sorted_nodes: sorted_nodes.append(rng_node) return sorted_nodes def get_obj_name(self, rng_node): return rng_node[0][1].get("name") def get_attr_type(self, attr_rng_node): sub_rng_nodes = self.sorted_sub_rng_nodes_by_node(*attr_rng_node[0]) for sub_rng_node in sub_rng_nodes.get("data", []): return sub_rng_nodes["data"][0][0][1].get("type") return None def get_attr_values(self, attr_rng_node): attr_values = [] sub_rng_nodes = self.sorted_sub_rng_nodes_by_node(*attr_rng_node[0]) for sub_rng_node in sub_rng_nodes.get("value", []): #print etree.tostring(sub_rng_node[0][1]) #print sub_rng_node[0][1].text #attr_values.append(sub_rng_node[0][1].getchildren()[0].data) attr_values.append(sub_rng_node[0][1].text) return attr_values def get_attr_default(self, attr_rng_node): return attr_rng_node[0][1].get("ann:defaultValue") def _get_by_obj(self, rng_obj, typ, sub_set): """ Used to select attributes or elements based on sub_set selector and optionality. typ: 'attribute' or 'element' sub_set: 'a'(all), 'r'(required), 'o'(optional) """ grammar, rng_node = rng_obj if rng_node is None: return None selected = [] sub_rng_nodes = self.get_sub_rng_nodes(grammar, rng_node) for node in sub_rng_nodes: head = node[0][1] if get_local_tag(head) != typ: continue name = head.get("name") if selected.count(name): continue # the complicated case: 'choice' #if self.find_decl(sub_rng_node, "choice") != 0: optional = any(self.find_decl(node, opt) != 0 for opt in ("optional", "zeroOrMore")) if subset_select(sub_set, optional): selected.append(name) return selected def get_elem_attrs_by_obj(self, rng_obj, sub_set='a'): "sub_set: 'a'(all), 'r'(required), 'o'(optional)" return self._get_by_obj(rng_obj, 'attribute', sub_set=sub_set) def get_sub_elems_by_obj(self, rng_obj, sub_set='a'): "sub_set: 'a'(all), 'r'(required), 'o'(optional)" return self._get_by_obj(rng_obj, 'element', sub_set=sub_set)<|fim▁end|>
if self.schema_filename in self.schema_str_docs:
<|file_name|>MigrationsPage.tsx<|end_file_name|><|fim▁begin|>/* Copyright (C) 2017 Cloudbase Solutions SRL This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ import React from 'react' import styled from 'styled-components' import { observer } from 'mobx-react' import MainTemplate from '@src/components/modules/TemplateModule/MainTemplate' import Navigation from '@src/components/modules/NavigationModule/Navigation' import FilterList from '@src/components/ui/Lists/FilterList' import PageHeader from '@src/components/smart/PageHeader' import AlertModal from '@src/components/ui/AlertModal' import projectStore from '@src/stores/ProjectStore' import migrationStore from '@src/stores/MigrationStore' import endpointStore from '@src/stores/EndpointStore' import notificationStore from '@src/stores/NotificationStore' import configLoader from '@src/utils/Config' import { ThemePalette } from '@src/components/Theme' import replicaMigrationFields from '@src/components/modules/TransferModule/ReplicaMigrationOptions/replicaMigrationFields' import { MigrationItem } from '@src/@types/MainItem' import userStore from '@src/stores/UserStore' import TransferListItem from '@src/components/modules/TransferModule/TransferListItem' import migrationLargeImage from './images/migration-large.svg' import migrationItemImage from './images/migration.svg' const Wrapper = styled.div<any>`` type State = { selectedMigrations: MigrationItem[], modalIsOpen: boolean, showDeleteMigrationModal: boolean, showCancelMigrationModal: boolean, showRecreateMigrationsModal: boolean, } @observer class MigrationsPage extends React.Component<{ history: any }, State> { state: State = { showDeleteMigrationModal: false, showCancelMigrationModal: false, showRecreateMigrationsModal: false, selectedMigrations: [], modalIsOpen: false, } pollTimeout: number = 0 stopPolling: boolean = false componentDidMount() { document.title = 'Coriolis Migrations' projectStore.getProjects() endpointStore.getEndpoints({ showLoading: true }) userStore.getAllUsers({ showLoading: userStore.users.length === 0, quietError: true, }) this.stopPolling = false this.pollData() } componentWillUnmount() { clearTimeout(this.pollTimeout) this.stopPolling = true } getEndpoint(endpointId: string) { return endpointStore.endpoints.find(endpoint => endpoint.id === endpointId) } getFilterItems() { return [ { label: 'All', value: 'all' }, { label: 'Running', value: 'RUNNING' }, { label: 'Error', value: 'ERROR' }, { label: 'Completed', value: 'COMPLETED' }, { label: 'Canceled', value: 'CANCELED' }, ] } getStatus(migrationId: string): string { const migration = migrationStore.migrations.find(m => m.id === migrationId) return migration ? migration.last_execution_status : '' } handleProjectChange() { endpointStore.getEndpoints({ showLoading: true }) migrationStore.getMigrations({ showLoading: true }) } handleReloadButtonClick() { projectStore.getProjects() endpointStore.getEndpoints({ showLoading: true }) migrationStore.getMigrations({ showLoading: true }) userStore.getAllUsers({ showLoading: true, quietError: true }) } handleItemClick(item: MigrationItem) { if (item.last_execution_status === 'RUNNING') { this.props.history.push(`/migrations/${item.id}/tasks`) } else { this.props.history.push(`/migrations/${item.id}`) } } deleteSelectedMigrations() { this.state.selectedMigrations.forEach(migration => { migrationStore.delete(migration.id) }) this.setState({ showDeleteMigrationModal: false }) } cancelSelectedMigrations() { this.state.selectedMigrations.forEach(migration => { const status = this.getStatus(migration.id) if (status === 'RUNNING' || status === 'AWAITING_MINION_ALLOCATIONS') { migrationStore.cancel(migration.id) } }) notificationStore.alert('Canceling migrations') this.setState({ showCancelMigrationModal: false }) } async recreateMigrations() { notificationStore.alert('Recreating migrations') this.setState({ showRecreateMigrationsModal: false }) await Promise.all(this.state.selectedMigrations.map(async migration => { if (migration.replica_id) { await migrationStore.migrateReplica({ replicaId: migration.replica_id, fields: replicaMigrationFields, uploadedUserScripts: [], removedUserScripts: [], userScriptData: migration.user_scripts, minionPoolMappings: migration.instance_osmorphing_minion_pool_mappings || {}, }) } else { await migrationStore.recreateFullCopy(migration as any) } })) migrationStore.getMigrations() } handleEmptyListButtonClick() { this.props.history.push('/wizard/migration') } handleModalOpen() { this.setState({ modalIsOpen: true }) } handleModalClose() { this.setState({ modalIsOpen: false }, () => { this.pollData() }) } searchText(item: MigrationItem, text?: string) { let result = false if (item.instances[0].toLowerCase().indexOf(text || '') > -1) { return true } if (item.destination_environment) { Object.keys(item.destination_environment).forEach(prop => { if (item.destination_environment[prop] && item.destination_environment[prop].toLowerCase && item.destination_environment[prop].toLowerCase().indexOf(text) > -1) { result = true } }) } return result } itemFilterFunction(item: MigrationItem, filterStatus?: string | null, filterText?: string) { if ((filterStatus !== 'all' && (item.last_execution_status !== filterStatus)) || !this.searchText(item, filterText) ) { return false } return true } async pollData() { if (this.state.modalIsOpen || this.stopPolling) { return } await Promise.all([ migrationStore.getMigrations({ skipLog: true }), endpointStore.getEndpoints({ skipLog: true }), userStore.getAllUsers({ skipLog: true, quietError: true }), ]) this.pollTimeout = window.setTimeout(() => { this.pollData() }, configLoader.config.requestPollTimeout) } render() { let atLeaseOneIsRunning = false this.state.selectedMigrations.forEach(migration => { const status = this.getStatus(migration.id) atLeaseOneIsRunning = atLeaseOneIsRunning || status === 'RUNNING' || status === 'AWAITING_MINION_ALLOCATIONS' }) const BulkActions = [ { label: 'Cancel', disabled: !atLeaseOneIsRunning, action: () => { this.setState({ showCancelMigrationModal: true }) }, }, { label: 'Recreate Migrations', disabled: atLeaseOneIsRunning, color: ThemePalette.primary, action: () => { this.setState({ showRecreateMigrationsModal: true }) }, }, { label: 'Delete Migrations', color: ThemePalette.alert, action: () => { this.setState({ showDeleteMigrationModal: true }) }, }, ] return ( <Wrapper> <MainTemplate navigationComponent={<Navigation currentPage="migrations" />} listComponent={( <FilterList filterItems={this.getFilterItems()} selectionLabel="migration" loading={migrationStore.loading} items={migrationStore.migrations} onItemClick={item => { this.handleItemClick(item) }} onReloadButtonClick={() => { this.handleReloadButtonClick() }} itemFilterFunction={(...args) => this.itemFilterFunction(...args)} onSelectedItemsChange={selectedMigrations => { this.setState({ selectedMigrations }) }} dropdownActions={BulkActions} renderItemComponent={options => ( <TransferListItem {...options} image={migrationItemImage} endpointType={id => { const endpoint = this.getEndpoint(id) if (endpoint) { return endpoint.type } if (endpointStore.loading) { return 'Loading...' } return 'Not Found' }} getUserName={id => userStore.users.find(u => u.id === id)?.name} userNameLoading={userStore.allUsersLoading} /> )} emptyListImage={migrationLargeImage} emptyListMessage="It seems like you don’t have any Migrations in this project." emptyListExtraMessage="A Coriolis Migration is a full virtual machine migration between two cloud endpoints." emptyListButtonLabel="Create a Migration" onEmptyListButtonClick={() => { this.handleEmptyListButtonClick() }} /> )} headerComponent={( <PageHeader title="Coriolis Migrations" onProjectChange={() => { this.handleProjectChange() }} onModalOpen={() => { this.handleModalOpen() }} onModalClose={() => { this.handleModalClose() }} /> )} /> {this.state.showDeleteMigrationModal ? ( <AlertModal isOpen title="Delete Selected Migrations?" message="Are you sure you want to delete the selected migrations?" extraMessage="Deleting a Coriolis Migration is permanent!" onConfirmation={() => { this.deleteSelectedMigrations() }} onRequestClose={() => { this.setState({ showDeleteMigrationModal: false }) }} /> ) : null} {this.state.showCancelMigrationModal ? ( <AlertModal isOpen title="Cancel Selected Migrations?" message="Are you sure you want to cancel the selected migrations?" extraMessage="Canceling a Coriolis Migration is permanent!" onConfirmation={() => { this.cancelSelectedMigrations() }} onRequestClose={() => { this.setState({ showCancelMigrationModal: false }) }} /> ) : null} {this.state.showRecreateMigrationsModal ? ( <AlertModal isOpen title="Recreate Selected Migrations?" message="Are you sure you want to recreate the selected migrations?" extraMessage="Migrations created from replicas will be recreated using default options and regular migrations will be recreated using their original source and destination environment options." onConfirmation={() => { this.recreateMigrations() }} onRequestClose={() => { this.setState({ showRecreateMigrationsModal: false }) }}<|fim▁hole|> ) : null} </Wrapper> ) } } export default MigrationsPage<|fim▁end|>
/>
<|file_name|>MSChooser.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1) # # (1) Kamaelia Contributors are listed in the AUTHORS file and at # http://www.kamaelia.org/AUTHORS - please extend this file, # not this notice. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------------------------------------------------- # import Axon from Axon.Ipc import producerFinished class Chooser(Axon.Component.component): """Chooses items out of a set, as directed by commands sent to its inbox Emits the first item at initialisation, then whenever a command is received it emits another item (unless you're asking it to step beyond the start or end of the set) """ Inboxes = { "inbox" : "receive commands", "control" : "" } Outboxes = { "outbox" : "emits chosen items", "signal" : "" } def __init__(self, items = [], loop = False): """Initialisation. items = set of items that can be iterated over. Must be finite. If an iterator is supplied, it is enumerated into a list during initialisation. """ super(Chooser,self).__init__() self.items = list(items) self.index = 0 self.loop = loop def shutdown(self): if self.dataReady("control"): message = self.recv("control") if isinstance(message, shutdownMicroprocess): self.send(message, "signal") return True return False def main(self): try: self.send( self.items[self.index], "outbox") except IndexError: pass done = False while not done: yield 1 while self.dataReady("inbox"): send = True msg = self.recv("inbox") if msg == "SAME": pass elif msg == "NEXT": self.index = self.index + 1<|fim▁hole|> self.index = 0 else: self.index = len(self.items)-1 elif msg == "PREV": self.index = self.index - 1 if self.index < 0: if self.loop: self.index = len(self.items)-1 else: self.index = 0 elif msg == "FIRST": self.index = 0 elif msg == "LAST": self.index = 1 try: self.send( self.items[self.index], "outbox") except IndexError: pass done = self.shutdown() __kamaelia_components__ = ( Chooser, )<|fim▁end|>
if self.index >= len(self.items): if self.loop:
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright (C) 2013 Michael Hogg # This file is part of bonemapy - See LICENSE.txt for information on usage and redistribution import bonemapy from distutils.core import setup setup( name = 'bonemapy', version = bonemapy.__version__, description = 'An ABAQUS plug-in to map bone properties from CT scans to 3D finite element bone/implant models', license = 'MIT license', keywords = ["ABAQUS", "plug-in","CT","finite","element","bone","properties","python"], author = 'Michael Hogg',<|fim▁hole|> "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Development Status :: 4 - Beta", "Environment :: Other Environment", "Environment :: Plugins", "Intended Audience :: Healthcare Industry", "Intended Audience :: Science/Research", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Topic :: Scientific/Engineering :: Medical Science Apps.", "Topic :: Scientific/Engineering :: Visualization", ], long_description = """ bonemapy is an ABAQUS plug-in that is used to extract bone density, or Hounsfield Unit (HU) values, from CT scans. The bone density can then be used to setup heterogeneous material properties for a 3D finite element bone/implant model. The HU values are extracted at the element integration points. Tri-linear interpolation is used to calculate the HU values at the location of the integration points. bonemapy produces a text file containing the HU values that is formatted so that it can easily be read using ABAQUS user subroutines that are required to apply the bone properties. An ABAQUS odb file is also created containing a fieldoutput representing HU so that the user can quickly visualise the mapped HU values. """, )<|fim▁end|>
author_email = '[email protected]', url = "https://github.com/mhogg/bonemapy", download_url = "https://github.com/mhogg/bonemapy/releases", classifiers = [
<|file_name|>0f383d.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/> # # @author : [email protected] from headers.BeaEnginePython import * from nose.tools import * class TestSuite: def test(self): # 66 0F 38 3d /r # pmaxsd mm1, mm2/m64 Buffer = bytes.fromhex('660f383d9011223344') myDisasm = Disasm(Buffer) myDisasm.read() assert_equal(hex(myDisasm.infos.Instruction.Opcode), '0xf383d') assert_equal(myDisasm.infos.Instruction.Mnemonic, b'pmaxsd') assert_equal(myDisasm.repr(), 'pmaxsd xmm2, xmmword ptr [rax+44332211h]') # VEX.NDS.128.66.0F38.WIG 3d /r # vpmaxsd xmm1, xmm2, xmm3/m128 Buffer = bytes.fromhex('c402013d0e') myDisasm = Disasm(Buffer) myDisasm.read() assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpmaxsd') assert_equal(myDisasm.repr(), 'vpmaxsd xmm9, xmm15, xmmword ptr [r14]') # VEX.NDS.256.66.0F38.WIG 3d /r # vpmaxsd ymm1, ymm2, ymm3/m256 Buffer = bytes.fromhex('c402053d0e') myDisasm = Disasm(Buffer) myDisasm.read() assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpmaxsd') assert_equal(myDisasm.repr(), 'vpmaxsd ymm9, ymm15, ymmword ptr [r14]') # EVEX.NDS.128.66.0F38.WIG 3d /r # vpmaxsd xmm1 {k1}{z}, xmm2, xmm3/m128 Buffer = bytes.fromhex('620205063d0e') myDisasm = Disasm(Buffer) myDisasm.read() assert_equal(myDisasm.infos.Reserved_.EVEX.P0, 0x2) assert_equal(myDisasm.infos.Reserved_.EVEX.P1, 0x5) assert_equal(myDisasm.infos.Reserved_.EVEX.P2, 0x6)<|fim▁hole|> assert_equal(myDisasm.repr(), 'vpmaxsd xmm25, xmm31, xmmword ptr [r14]') # EVEX.NDS.256.66.0F38.WIG 3d /r # vpmaxsd ymm1 {k1}{z}, ymm2, ymm3/m256 Buffer = bytes.fromhex('620205203d0e') myDisasm = Disasm(Buffer) myDisasm.read() assert_equal(myDisasm.infos.Reserved_.EVEX.P0, 0x2) assert_equal(myDisasm.infos.Reserved_.EVEX.P1, 0x5) assert_equal(myDisasm.infos.Reserved_.EVEX.P2, 0x20) assert_equal(myDisasm.infos.Reserved_.EVEX.pp, 0x1) assert_equal(myDisasm.infos.Reserved_.EVEX.mm, 0x2) assert_equal(hex(myDisasm.infos.Instruction.Opcode), '0x3d') assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpmaxsd') assert_equal(myDisasm.repr(), 'vpmaxsd ymm25, ymm31, ymmword ptr [r14]') # EVEX.NDS.512.66.0F38.WIG 3d /r # vpmaxsd zmm1 {k1}{z}, zmm2, zmm3/m512 Buffer = bytes.fromhex('620205403d0e') myDisasm = Disasm(Buffer) myDisasm.read() assert_equal(myDisasm.infos.Reserved_.EVEX.P0, 0x2) assert_equal(myDisasm.infos.Reserved_.EVEX.P1, 0x5) assert_equal(myDisasm.infos.Reserved_.EVEX.P2, 0x40) assert_equal(myDisasm.infos.Reserved_.EVEX.pp, 0x1) assert_equal(myDisasm.infos.Reserved_.EVEX.mm, 0x2) assert_equal(hex(myDisasm.infos.Instruction.Opcode), '0x3d') assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpmaxsd') assert_equal(myDisasm.repr(), 'vpmaxsd zmm25, zmm31, zmmword ptr [r14]')<|fim▁end|>
assert_equal(myDisasm.infos.Reserved_.EVEX.pp, 0x1) assert_equal(myDisasm.infos.Reserved_.EVEX.mm, 0x2) assert_equal(hex(myDisasm.infos.Instruction.Opcode), '0x3d') assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpmaxsd')
<|file_name|>SplitLDIFFilterTranslator.java<|end_file_name|><|fim▁begin|>/* * Copyright 2016-2020 Ping Identity Corporation * All Rights Reserved. */ /* * Copyright 2016-2020 Ping Identity Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Copyright (C) 2016-2020 Ping Identity Corporation * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License (GPLv2 only) * or the terms of the GNU Lesser General Public License (LGPLv2.1 only) * as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, see <http://www.gnu.org/licenses>. */ package com.unboundid.ldap.sdk.unboundidds.tools; import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import com.unboundid.ldap.sdk.DN; import com.unboundid.ldap.sdk.Entry; import com.unboundid.ldap.sdk.Filter; import com.unboundid.ldap.sdk.LDAPException; import com.unboundid.ldap.sdk.RDN; import com.unboundid.ldap.sdk.schema.Schema; import com.unboundid.ldif.LDIFException; import com.unboundid.util.Debug; import com.unboundid.util.NotNull; import com.unboundid.util.Nullable; import com.unboundid.util.StaticUtils; import com.unboundid.util.ThreadSafety; import com.unboundid.util.ThreadSafetyLevel; import static com.unboundid.ldap.sdk.unboundidds.tools.ToolMessages.*; /** * This class provides an implementation of an LDIF reader entry translator that * can be used to determine the set into which an entry should be placed by * selecting the first set for which the associated filter matches the entry. * <BR> * <BLOCKQUOTE> * <B>NOTE:</B> This class, and other classes within the * {@code com.unboundid.ldap.sdk.unboundidds} package structure, are only * supported for use against Ping Identity, UnboundID, and * Nokia/Alcatel-Lucent 8661 server products. These classes provide support * for proprietary functionality or for external specifications that are not * considered stable or mature enough to be guaranteed to work in an * interoperable way with other types of LDAP servers. * </BLOCKQUOTE> */ @ThreadSafety(level=ThreadSafetyLevel.NOT_THREADSAFE) final class SplitLDIFFilterTranslator extends SplitLDIFTranslator { // The map used to cache decisions made by this translator. @Nullable private final ConcurrentHashMap<String,Set<String>> rdnCache; // A map used to associate the search filter for each set with the name of // that set. @NotNull private final Map<Filter,Set<String>> setFilters; // A map of the names that will be used for each of the sets. @NotNull private final Map<Integer,Set<String>> setNames; // The schema to use for filter evaluation. @Nullable private final Schema schema; // The sets in which entries outside the split base should be placed. @NotNull private final Set<String> outsideSplitBaseSetNames; // The sets in which the split base entry should be placed. @NotNull private final Set<String> splitBaseEntrySetNames; /** * Creates a new instance of this translator with the provided information. * * @param splitBaseDN The base DN below which to * split entries. * @param schema The schema to use for filter * evaluation. * @param filters The filters to use to select * the appropriate backend set. * This must not be * {@code null} or empty. * @param assumeFlatDIT Indicates whether to assume * that the DIT is flat, and * there aren't any entries more * than one level below the * split base DN. If this is * {@code true}, then any * entries more than one level * below the split base DN will * be considered an error. * @param addEntriesOutsideSplitToAllSets Indicates whether entries * outside the split should be * added to all sets. * @param addEntriesOutsideSplitToDedicatedSet Indicates whether entries * outside the split should be * added to all sets. */ SplitLDIFFilterTranslator(@NotNull final DN splitBaseDN, @Nullable final Schema schema, @NotNull final LinkedHashSet<Filter> filters, final boolean assumeFlatDIT, final boolean addEntriesOutsideSplitToAllSets, final boolean addEntriesOutsideSplitToDedicatedSet) { super(splitBaseDN); this.schema = schema; if (assumeFlatDIT) { rdnCache = null; } else { rdnCache = new ConcurrentHashMap<>(StaticUtils.computeMapCapacity(100)); } final int numSets = filters.size(); outsideSplitBaseSetNames = new LinkedHashSet<>(StaticUtils.computeMapCapacity(numSets+1)); splitBaseEntrySetNames = new LinkedHashSet<>(StaticUtils.computeMapCapacity(numSets)); if (addEntriesOutsideSplitToDedicatedSet) { outsideSplitBaseSetNames.add(SplitLDIFEntry.SET_NAME_OUTSIDE_SPLIT); } setFilters = new LinkedHashMap<>(StaticUtils.computeMapCapacity(numSets)); setNames = new LinkedHashMap<>(StaticUtils.computeMapCapacity(numSets)); int i=0; for (final Filter f : filters) { final String setName = ".set" + (i+1); final Set<String> sets = Collections.singleton(setName); splitBaseEntrySetNames.add(setName); if (addEntriesOutsideSplitToAllSets) { outsideSplitBaseSetNames.add(setName); } setFilters.put(f, sets); setNames.put(i, sets); i++; } } /** * {@inheritDoc} */ @Override() @NotNull() public SplitLDIFEntry translate(@NotNull final Entry original, final long firstLineNumber) throws LDIFException { // Get the parsed DN for the entry. If we can't, that's an error and we // should only include it in the error set. final DN dn; try { dn = original.getParsedDN(); } catch (final LDAPException le) { Debug.debugException(le); return createEntry(original, ERR_SPLIT_LDIF_FILTER_TRANSLATOR_CANNOT_PARSE_DN.get( le.getMessage()), getErrorSetNames()); } // If the parsed DN is outside the split base DN, then return the // appropriate sets for that. if (! dn.isDescendantOf(getSplitBaseDN(), true)) { return createEntry(original, outsideSplitBaseSetNames); } // If the parsed DN matches the split base DN, then it will always go into // all of the split sets. if (dn.equals(getSplitBaseDN())) { return createEntry(original, splitBaseEntrySetNames); } // Determine which RDN component is immediately below the split base DN. final RDN[] rdns = dn.getRDNs(); final int targetRDNIndex = rdns.length - getSplitBaseRDNs().length - 1; final String normalizedRDNString = rdns[targetRDNIndex].toNormalizedString(); // If the target RDN component is not the first component of the DN, then // we'll use the cache to send this entry to the same set as its parent. if (targetRDNIndex > 0) { // If we aren't maintaining an RDN cache (which should only happen if // the --assumeFlatDIT argument was provided), then this is an error. if (rdnCache == null) { return createEntry(original, ERR_SPLIT_LDIF_FILTER_TRANSLATOR_NON_FLAT_DIT.get( getSplitBaseDN().toString()), getErrorSetNames()); } // Note that even if we are maintaining an RDN cache, it may not contain // the information that we need to determine which set should hold this // entry. There are two reasons for this: // // - The LDIF file contains an entry below the split base DN without // including the parent for that entry, (or includes a child entry // before its parent). // // - We are processing multiple entries in parallel, and the parent entry // is currently being processed in another thread and that thread hasn't // yet made the determination as to which set should be used for that // parent entry. // // In either case, use null for the target set names. If we are in the // parallel processing phase, then we will re-invoke this method later // at a point in which we can be confident that the caching should have // been performed If we still get null the second time through, then // the caller will consider that an error and handle it appropriately. return createEntry(original, rdnCache.get(normalizedRDNString)); } // At this point, we know that the entry is exactly one level below the // split base DN. Iterate through the filters and see if any of them // matches the entry. for (final Map.Entry<Filter,Set<String>> e : setFilters.entrySet()) { final Filter f = e.getKey(); try { if (f.matchesEntry(original, schema)) { final Set<String> sets = e.getValue(); if (rdnCache != null) { rdnCache.put(normalizedRDNString, sets);<|fim▁hole|> return createEntry(original, sets); } } catch (final Exception ex) { Debug.debugException(ex); } } // If we've gotten here, then none of the filters matched so pick a set // based on a hash of the RDN. final SplitLDIFEntry e = createFromRDNHash(original, dn, setNames); if (rdnCache != null) { rdnCache.put(normalizedRDNString, e.getSets()); } return e; } }<|fim▁end|>
}
<|file_name|>test_video_intelligence_service_client_v1beta2.py<|end_file_name|><|fim▁begin|># Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Unit tests.""" import pytest <|fim▁hole|> from google.cloud import videointelligence_v1beta2 from google.cloud.videointelligence_v1beta2.proto import video_intelligence_pb2 from google.longrunning import operations_pb2 class MultiCallableStub(object): """Stub for the grpc.UnaryUnaryMultiCallable interface.""" def __init__(self, method, channel_stub): self.method = method self.channel_stub = channel_stub def __call__(self, request, timeout=None, metadata=None, credentials=None): self.channel_stub.requests.append((self.method, request)) response = None if self.channel_stub.responses: response = self.channel_stub.responses.pop() if isinstance(response, Exception): raise response if response: return response class ChannelStub(object): """Stub for the grpc.Channel interface.""" def __init__(self, responses=[]): self.responses = responses self.requests = [] def unary_unary(self, method, request_serializer=None, response_deserializer=None): return MultiCallableStub(method, self) class CustomException(Exception): pass class TestVideoIntelligenceServiceClient(object): def test_annotate_video(self): # Setup Expected Response expected_response = {} expected_response = video_intelligence_pb2.AnnotateVideoResponse( **expected_response) operation = operations_pb2.Operation( name='operations/test_annotate_video', done=True) operation.response.Pack(expected_response) # Mock the API response channel = ChannelStub(responses=[operation]) client = videointelligence_v1beta2.VideoIntelligenceServiceClient( channel=channel) response = client.annotate_video() result = response.result() assert expected_response == result assert len(channel.requests) == 1 expected_request = video_intelligence_pb2.AnnotateVideoRequest() actual_request = channel.requests[0][1] assert expected_request == actual_request def test_annotate_video_exception(self): # Setup Response error = status_pb2.Status() operation = operations_pb2.Operation( name='operations/test_annotate_video_exception', done=True) operation.error.CopyFrom(error) # Mock the API response channel = ChannelStub(responses=[operation]) client = videointelligence_v1beta2.VideoIntelligenceServiceClient( channel=channel) response = client.annotate_video() exception = response.exception() assert exception.errors[0] == error<|fim▁end|>
from google.rpc import status_pb2
<|file_name|>opts.py<|end_file_name|><|fim▁begin|>class Options: instance = None def __init__(self,options): self.options = options @classmethod def set(cls,options): """Create an Options instance with the provided dictionary of<|fim▁hole|> def inst(cls): """Get the Options instance. """ if cls.instance is None: raise OptionsError("No options have been set") return cls.instance @classmethod def get(cls,name,as_type = str): """Get an option by name. Raises an OptionsError if the option doesn't exist. """ inst = cls.inst() if name in inst.options: return as_type(inst.options[name]) else: raise OptionsError("No option with key '%s'" % name) @classmethod def overwrite(cls,name,value): inst = cls.inst() inst.options[name] = value @classmethod def isset(cls,name): """Checks whether the option exists and is set. By set, it means whether the option has length. All the option values are strings. """ inst = cls.inst() if name in inst.options and \ len(inst.options[name]) > 0: return True else: return False class OptionsError(Exception): pass<|fim▁end|>
options""" cls.instance = Options(options) @classmethod
<|file_name|>group.ts<|end_file_name|><|fim▁begin|>module csComp.Services { export enum LayerType { GeoJson, Kml } /** a project group contains a list of layers that can be grouped together. * Filters, styles can clustering is always defined on the group level. * If a filter is selected (e.g. show only the features within a certain property range) * this filter is applied to all layers within this group. * If clustering is enabled all features in all layers are grouped together */ export class ProjectGroup { id: string; title: string; description: string; layers: Array<ProjectLayer>; filters: Array<GroupFilter>; /* Including styles in (projectgroups in) project.json files is probably not a good idea, in case the layers in the group have properties (attributes), as for example in geojson files. This is because when selecting a property for styling, the call to setStyle leads to the creation of a new group and deletion of existing styles. */ styles: Array<GroupStyle>; showTitle: boolean; _cluster: L.MarkerClusterGroup; _vectors: L.LayerGroup<L.ILayer>; /** Turn on the leaflet markercluster */ clustering: boolean; /** If set, at this zoom level and below markers will not be clustered. This defaults to disabled */ clusterLevel: number; /** * The maximum radius that a cluster will cover from the central marker (in pixels). Default 80. * Decreasing will make more smaller clusters. You can also use a function that accepts the current map * zoom and returns the maximum cluster radius in pixels. */ maxClusterRadius: number; clusterFunction: Function; /** Creates radio buttons instead of checkboxes in the level */ oneLayerActive: boolean; ndx: any; filterResult: IFeature[]; public markers: any; styleProperty: string; languages: ILanguageData; owsurl: string; owsgeojson: boolean; error: string; isLoading: boolean; /** * gui is used for setting temp. properties for rendering */ _gui: any = {}; /** * Returns an object which contains all the data that must be serialized. */ public static serializeableData(projectGroup: ProjectGroup): Object { return { id: projectGroup.id, title: projectGroup.title, description: projectGroup.description, showTitle: projectGroup.showTitle, clustering: projectGroup.clustering, clusterLevel: projectGroup.clusterLevel, maxClusterRadius: projectGroup.maxClusterRadius, oneLayerActive: projectGroup.oneLayerActive, styleProperty: projectGroup.styleProperty, languages: projectGroup.languages, layers: csComp.Helpers.serialize<ProjectLayer>(projectGroup.layers, ProjectLayer.serializeableData) }; } public static deserialize(input: Object): ProjectGroup { var res = <ProjectGroup>$.extend(new ProjectGroup(), input); if (res.owsurl) { res.loadLayersFromOWS(); } if (res.layers) { res.layers.forEach(layer => { if (!layer.opacity) layer.opacity = 100; }); } return res; } public loadLayersFromOWS($injector: ng.auto.IInjectorService = null): void { this.layers = []; // add some layers here... this.isLoading = true; if ($injector == null) { // create an injector if not given $injector = angular.injector(['ng']); } $injector.invoke(($http, $timeout) => { $http.get(this.owsurl) .then((res: {data: any}) => { this.parseXML(res.data, $timeout); this.isLoading = false; }, (xml, status) => { console.log('Unable to load OWSurl: ' + this.owsurl); console.log(' HTTP status: ' + status); this.isLoading = false; }); }); } private parseXML(xml: any, $timeout: ng.ITimeoutService): void { var theGroup = this; var baseurl = this.owsurl.split('?')[0]; $(xml).find('Layer').each((index: number, elem: any) => { // run each evaluation asynchronously, otherwise parsing may lock the browser. $timeout(() => { var layerName = $(elem).children('Name').text(); if (layerName != null && layerName !== '') { var title = $(elem).children('Title').text(); // If <KeywordList> element has an element <keyword vocabulary="defaultFeatureType">featureType</keyword> // use featureType as defaultFeatureType var featureType = $(elem).children('KeywordList').children('[vocabulary="defaultFeatureType"]').text(); var resourceURL = $(elem).children('KeywordList').children('[vocabulary="typeResourceURL"]').text(); // TODO: should be using layerService.initLayer(theGroup, layer); // But I don't know how to 'inject' layerService :( var layer = theGroup.buildLayer(baseurl, title, layerName); if (featureType != '') { layer.defaultFeatureType = featureType; layer.typeUrl = 'data/resourceTypes/resources.json'; } if (resourceURL != '') { layer.typeUrl = resourceURL; } theGroup.layers.push(layer); } }); }); } private buildLayer(baseurl: string, title: string, layerName: string): ProjectLayer { var extraInfo = { 'id': Helpers.getGuid(), 'reference': layerName, 'title': title, 'enabled': false, 'group': this }; // Image layers if (this.owsgeojson) { extraInfo['type'] = 'geojson'; extraInfo['url'] = baseurl + '?service=wfs&request=getFeature' + '&outputFormat=application/json&typeName=' + layerName; } else { extraInfo['type'] = 'wms'; extraInfo['wmsLayers'] = layerName; extraInfo['url'] = baseurl; } var layer = <ProjectLayer>jQuery.extend(new ProjectLayer(), extraInfo); return layer; } } <|fim▁hole|> * Filters are used to select a subset of features within a group. */ export class GroupFilter { id: string; title: string; enabled: boolean; filterType: string; property: string; property2: string; criteria: string; group: ProjectGroup; dimension: any; value: any; stringValue: string; rangex: number[]; meta: IPropertyType; to: number; from: number; filterLabel: string; showInWidget:boolean; } /** * Styles determine how features are shown on the map. */ export class GroupStyle { id: string; title: string; enabled: boolean; layers: string[]; visualAspect: string; property: string; colors: string[]; group: ProjectGroup; availableAspects: string[]; canSelectColor: boolean; colorScales: any; info: PropertyInfo; meta: IPropertyType; legends: { [key: string]: Legend; }; activeLegend: Legend; fixedColorRange: boolean; constructor($translate: ng.translate.ITranslateService) { this.availableAspects = ['strokeColor', 'fillColor', 'strokeWidth', 'height']; this.colorScales = {}; this.legends = {}; this.fixedColorRange = false; $translate('WHITE_RED').then((translation) => { this.colorScales[translation] = ['white', 'red']; }); $translate('GREEN_RED').then((translation) => { this.colorScales[translation] = ['green', 'red']; }); $translate('RED_GREEN').then((translation) => { this.colorScales[translation] = ['red', 'green']; }); $translate('BLUE_RED').then((translation) => { this.colorScales[translation] = ['#F04030', '#3040F0']; }); $translate('RED_BLUE').then((translation) => { this.colorScales[translation] = ['#3040F0', '#F04030']; }); $translate('WHITE_BLUE').then((translation) => { this.colorScales[translation] = ['white', 'blue']; }); $translate('BLUE_WHITE').then((translation) => { this.colorScales[translation] = ['blue', 'white']; }); $translate('WHITE_GREEN').then((translation) => { this.colorScales[translation] = ['white', 'green']; }); $translate('GREEN_WHITE').then((translation) => { this.colorScales[translation] = ['green', 'white']; }); $translate('WHITE_ORANGE').then((translation) => { this.colorScales[translation] = ['white', '#FF5500']; }); $translate('ORANGE_WHITE').then((translation) => { this.colorScales[translation] = ['#FF5500', 'white']; }); $translate('RED_WHITE_BLUE').then((translation) => { this.colorScales[translation] = ['red', 'white', 'blue']; }); } } /** * the Legend class provides a data structure that is used to map a value to a color * (see also the function getColor()) */ export class Legend { id: string; description: string; legendKind: 'image' | 'discrete' | 'discreteStrings' | 'interpolated'; imageUrl?: string; visualAspect: string; /** Optionally provide a label of a LegendEntry for values not present in the Legend */ defaultLabel?: string; legendEntries: LegendEntry[]; // it is assumed that the legendentries have their values and/or intervals // sorted in ascending order } export class LegendEntry { label: string; interval?: { min: number; max: number; }; // either interval or value is used, depending on legendtype (discrete or interpolated) value?: number; stringValue?: string; color: string; // hex string; rgb sortKey?: string; // Sort entries based on this key } }<|fim▁end|>
/**
<|file_name|>SaveFileController.java<|end_file_name|><|fim▁begin|>/* SaveFileController * * Version 1.0 * * November 13, 2017 * * Copyright (c) 2017 Cup Of Java. All rights reserved. */ package com.cmput301f17t11.cupofjava.Controllers; import android.content.Context; import com.cmput301f17t11.cupofjava.Models.Habit; import com.cmput301f17t11.cupofjava.Models.HabitEvent; import com.cmput301f17t11.cupofjava.Models.HabitList; import com.cmput301f17t11.cupofjava.Models.User; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.lang.reflect.Type; import java.util.ArrayList; /** * Implements the file to save data to. * * @version 1.0 */ public class SaveFileController { private ArrayList<User> allUsers; //private String username; private String saveFile = "test_save_file4.sav"; public SaveFileController(){ this.allUsers = new ArrayList<User>(); } /** * Loads data from file. * * @param context instance of Context */ private void loadFromFile(Context context){ try{ FileInputStream ifStream = context.openFileInput(saveFile); BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(ifStream)); Gson gson = new Gson(); Type userArrayListType = new TypeToken<ArrayList<User>>(){}.getType(); this.allUsers = gson.fromJson(bufferedReader, userArrayListType); ifStream.close(); } //create a new array list if a file does not already exist catch (FileNotFoundException e){ this.allUsers = new ArrayList<User>(); saveToFile(context); } catch (IOException e){ throw new RuntimeException(); } } /** * Saves current ArrayList contents in file. * * @param context instance of Context */ private void saveToFile(Context context){ try{ FileOutputStream ofStream = context.openFileOutput(saveFile, Context.MODE_PRIVATE); BufferedWriter bufferedWriter = new BufferedWriter(new OutputStreamWriter(ofStream)); Gson gson = new Gson(); gson.toJson(this.allUsers, bufferedWriter); bufferedWriter.flush(); ofStream.close(); } catch (FileNotFoundException e){ //shouldn't really happen, since a file not found would create a new file. throw new RuntimeException("Laws of nature defied!"); } catch (IOException e){ throw new RuntimeException(); } } /** * Adds new user and saves to file. * * @param context instance of Context * @param user instance of User * @see User */ public void addNewUser(Context context, User user){ loadFromFile(context); this.allUsers.add(user); saveToFile(context); } /** * Deletes all user from file. * * @param context instance of Context */ public void deleteAllUsers(Context context){ this.allUsers = new ArrayList<>(); saveToFile(context); } /** * Gets user index. * * @param context instance of Context * @param username string username * @return integer user index if username matches, -1 otherwise */ public int getUserIndex(Context context, String username){ loadFromFile(context); for (int i = 0; i < this.allUsers.size(); i++){ if (this.allUsers.get(i).getUsername().equals(username)){ return i; } } return -1; } /** * Gets HabitList instance. * * @param context instance of Context<|fim▁hole|> * @param userIndex integer user index * @return HabitList * @see HabitList */ public HabitList getHabitList(Context context, int userIndex){ loadFromFile(context); return this.allUsers.get(userIndex).getHabitList(); } /** * Gets ArrayList of type Habit. * * @param context instance of Context * @param userIndex integer user index * @return list */ public ArrayList<Habit> getHabitListAsArray(Context context, int userIndex){ loadFromFile(context); ArrayList<Habit> list = this.allUsers.get(userIndex).getHabitListAsArray(); return list; } /** * Adds a habit to a particular user's habit list. * * @param context instance of Context * @param userIndex integer user index * @param habit instance of Habit * @see Habit */ public void addHabit(Context context, int userIndex, Habit habit){ loadFromFile(context); this.allUsers.get(userIndex).getHabitList().addHabit(habit); saveToFile(context); } /** * Gets habit from a particular user's habit list. * * @param context instance of Context * @param userIndex integer user index * @param habitIndex integer index of habit * @return instance of Habit * @see Habit */ public Habit getHabit(Context context, int userIndex, int habitIndex){ loadFromFile(context); return this.allUsers.get(userIndex).getHabitListAsArray().get(habitIndex); } /** * Deletes habit from a certain user's habit list. * * @param context instance of Context * @param userIndex integer user index * @param habitIndex integer index of habit */ public void deleteHabit(Context context, int userIndex, int habitIndex){ loadFromFile(context); this.allUsers.get(userIndex).getHabitListAsArray().remove(habitIndex); saveToFile(context); } /** * Adds habit event to a particular user's habit event list. * * @param context instance of Context * @param userIndex integer user index * @param habitIndex integer index of habit * @param habitEvent instance of HabitEvent * @see HabitEvent */ public void addHabitEvent(Context context, int userIndex, int habitIndex, HabitEvent habitEvent){ loadFromFile(context); this.allUsers.get(userIndex).getHabitList().getHabit(habitIndex).addHabitEvent(habitEvent); saveToFile(context); } /** * Removes a habit event from a particular user's habit event list. * * @param context instance of Context * @param userIndex integer user index * @param habitIndex integer index of habit * @param habitEventIndex integer index of habit event */ public void removeHabitEvent(Context context, int userIndex, int habitIndex, int habitEventIndex){ loadFromFile(context); this.allUsers.get(userIndex).getHabitList().getHabit(habitIndex) .getHabitEventHistory().getHabitEvents().remove(habitEventIndex); saveToFile(context); } /** * For use in timeline view. * * @param context instance of Context * @param userIndex integer user index * @return ArrayList of HabitEvent type * @see HabitEvent */ public ArrayList<HabitEvent> getAllHabitEvents(Context context, int userIndex){ loadFromFile(context); ArrayList<HabitEvent> habitEvents = new ArrayList<>(); User user = this.allUsers.get(userIndex); ArrayList<Habit> habitList = user.getHabitListAsArray(); Habit currentHabit; ArrayList<HabitEvent> currentHabitEvents; for (int i = 0; i < habitList.size(); i++){ currentHabit = habitList.get(i); currentHabitEvents = currentHabit.getHabitEventHistory().getHabitEvents(); for (int j = 0; j < currentHabitEvents.size() ; j++){ habitEvents.add(user.getHabitListAsArray().get(i) .getHabitEventHistory().getHabitEvents().get(j)); } } return habitEvents; } }<|fim▁end|>
<|file_name|>lvm.py<|end_file_name|><|fim▁begin|># # Copyright 2010-2011 Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # # Refer to the README and COPYING files for full details of the license # """ Generic LVM interface wrapper Incapsulates the actual LVM mechanics. """ import errno import os import re import pwd import grp import logging from collections import namedtuple import pprint as pp import threading from itertools import chain from subprocess import list2cmdline from vdsm import constants import misc import multipath import storage_exception as se from vdsm.config import config import devicemapper log = logging.getLogger("Storage.LVM") LVM_DEFAULT_TTL = 100 PV_FIELDS = ("uuid,name,size,vg_name,vg_uuid,pe_start,pe_count," "pe_alloc_count,mda_count,dev_size") VG_FIELDS = ("uuid,name,attr,size,free,extent_size,extent_count,free_count," "tags,vg_mda_size,vg_mda_free,lv_count,pv_count,pv_name") LV_FIELDS = "uuid,name,vg_name,attr,size,seg_start_pe,devices,tags" VG_ATTR_BITS = ("permission", "resizeable", "exported", "partial", "allocation", "clustered") LV_ATTR_BITS = ("voltype", "permission", "allocations", "fixedminor", "state", "devopen", "target", "zero") # Returned by lvm commands for pv_name when a pv name is not available. UNKNOWN_DEVICE = "unknown device" PV = namedtuple("PV", PV_FIELDS + ",guid") VG = namedtuple("VG", VG_FIELDS + ",writeable,partial") VG_ATTR = namedtuple("VG_ATTR", VG_ATTR_BITS) LV = namedtuple("LV", LV_FIELDS + ",writeable,opened,active") LV_ATTR = namedtuple("LV_ATTR", LV_ATTR_BITS) Stub = namedtuple("Stub", "name, stale") class Unreadable(Stub): __slots__ = () def __getattr__(self, attrName): log.warning("%s can't be reloaded, please check your storage " "connections.", self.name) raise AttributeError("Failed reload: %s" % self.name) # VG states VG_OK = "OK" VG_PARTIAL = "PARTIAL" VG_UNKNOWN = "UNKNOWN" SEPARATOR = "|" LVM_NOBACKUP = ("--autobackup", "n") LVM_FLAGS = ("--noheadings", "--units", "b", "--nosuffix", "--separator", SEPARATOR, "--ignoreskippedcluster") PV_PREFIX = "/dev/mapper" # Assuming there are no spaces in the PV name re_pvName = re.compile(PV_PREFIX + '[^\s\"]+', re.MULTILINE) # operations lock LVM_OP_INVALIDATE = "lvm invalidate operation" LVM_OP_RELOAD = "lvm reload operation" PVS_CMD = ("pvs",) + LVM_FLAGS + ("-o", PV_FIELDS) VGS_CMD = ("vgs",) + LVM_FLAGS + ("-o", VG_FIELDS) LVS_CMD = ("lvs",) + LVM_FLAGS + ("-o", LV_FIELDS) # FIXME we must use different METADATA_USER ownership for qemu-unreadable # metadata volumes USER_GROUP = constants.DISKIMAGE_USER + ":" + constants.DISKIMAGE_GROUP LVMCONF_TEMPLATE = """ devices { preferred_names = ["^/dev/mapper/"] ignore_suspended_devices=1 write_cache_state=0 disable_after_error_count=3 obtain_device_list_from_udev=0 %s } global { locking_type=1 prioritise_write_locks=1 wait_for_locks=1 use_lvmetad=0 } backup { retain_min = 50 retain_days = 0 } """ VAR_RUN_VDSM = constants.P_VDSM_RUN VDSM_LVM_SYSTEM_DIR = os.path.join(VAR_RUN_VDSM, "lvm") VDSM_LVM_CONF = os.path.join(VDSM_LVM_SYSTEM_DIR, "lvm.conf") USER_DEV_LIST = filter(None, config.get("irs", "lvm_dev_whitelist").split(",")) def _buildFilter(devices): strippeds = set(d.strip() for d in devices) strippeds.discard('') # Who has put a blank here? strippeds = sorted(strippeds) dmPaths = [dev.replace(r'\x', r'\\x') for dev in strippeds] filt = '|'.join(dmPaths) if len(filt) > 0: filt = "'a|" + filt + "|', " filt = "filter = [ " + filt + "'r|.*|' ]" return filt def _buildConfig(devList): flt = _buildFilter(chain(devList, USER_DEV_LIST)) conf = LVMCONF_TEMPLATE % flt return conf.replace("\n", " ") def _updateLvmConf(conf): # Make a convenience copy for the debugging purposes try: if not os.path.isdir(VDSM_LVM_SYSTEM_DIR): os.mkdir(VDSM_LVM_SYSTEM_DIR) with open(VDSM_LVM_CONF, "w") as lvmconf: lvmconf.write(conf) except IOError as e: # We are not interested in exceptions here, note it and log.warning("Cannot create %s file %s", VDSM_LVM_CONF, str(e)) # # Make sure that "args" is suitable for consumption in interfaces # that expect an iterabale argument. The string is treated a single # argument an converted into list, containing that string. # Strings have not __iter__ attribute. # def _normalizeargs(args=None): if args is None: args = [] elif not hasattr(args, "__iter__"): args = [args] return args def _tags2Tuple(sTags): """ Tags comma separated string as a list. Return an empty tuple for sTags == "" """ return tuple(sTags.split(",")) if sTags else tuple() def makePV(*args): guid = os.path.basename(args[1]) args += (guid,) return PV(*args) def makeVG(*args): args = list(args) # Convert tag string into tuple. tags = _tags2Tuple(args[VG._fields.index("tags")]) args[VG._fields.index("tags")] = tags # Convert attr string into named tuple fields. # tuple("wz--n-") = ('w', 'z', '-', '-', 'n', '-') sAttr = args[VG._fields.index("attr")] attr_values = tuple(sAttr[:len(VG_ATTR._fields)]) attrs = VG_ATTR(*attr_values) args[VG._fields.index("attr")] = attrs # Convert pv_names list to tuple. args[VG._fields.index("pv_name")] = \ tuple(args[VG._fields.index("pv_name")]) # Add properties. Should be ordered as VG_PROPERTIES. args.append(attrs.permission == "w") # Writable args.append(VG_OK if attrs.partial == "-" else VG_PARTIAL) # Partial return VG(*args) def makeLV(*args): args = list(args) # Convert tag string into tuple. tags = _tags2Tuple(args[LV._fields.index("tags")]) args[LV._fields.index("tags")] = tags # Convert attr string into named tuple fields. sAttr = args[LV._fields.index("attr")] attr_values = tuple(sAttr[:len(LV_ATTR._fields)]) attrs = LV_ATTR(*attr_values) args[LV._fields.index("attr")] = attrs # Add properties. Should be ordered as VG_PROPERTIES. args.append(attrs.permission == "w") # writable args.append(attrs.devopen == "o") # opened args.append(attrs.state == "a") # active return LV(*args) class LVMCache(object): """ Keep all the LVM information. """ def _getCachedExtraCfg(self): if not self._filterStale: return self._extraCfg with self._filterLock: if not self._filterStale: return self._extraCfg self._extraCfg = _buildConfig(multipath.getMPDevNamesIter()) _updateLvmConf(self._extraCfg) self._filterStale = False return self._extraCfg def _addExtraCfg(self, cmd, devices=tuple()): newcmd = [constants.EXT_LVM, cmd[0]] if devices: conf = _buildConfig(devices) else: conf = self._getCachedExtraCfg() newcmd += ["--config", conf] if len(cmd) > 1: newcmd += cmd[1:] return newcmd def invalidateFilter(self): self._filterStale = True def invalidateCache(self): self.invalidateFilter() self.flush() def __init__(self): self._filterStale = True self._extraCfg = None self._filterLock = threading.Lock() self._oplock = misc.OperationMutex() self._stalepv = True self._stalevg = True self._stalelv = True self._pvs = {} self._vgs = {} self._lvs = {} def cmd(self, cmd, devices=tuple()): finalCmd = self._addExtraCfg(cmd, devices) rc, out, err = misc.execCmd(finalCmd, sudo=True) if rc != 0: # Filter might be stale self.invalidateFilter() newCmd = self._addExtraCfg(cmd) # Before blindly trying again make sure # that the commands are not identical, because # the devlist is sorted there is no fear # of two identical filters looking differently if newCmd != finalCmd: return misc.execCmd(newCmd, sudo=True) return rc, out, err def __str__(self): return ("PVS:\n%s\n\nVGS:\n%s\n\nLVS:\n%s" % (pp.pformat(self._pvs), pp.pformat(self._vgs), pp.pformat(self._lvs))) def bootstrap(self): self._reloadpvs() self._reloadvgs() self._reloadAllLvs() def _reloadpvs(self, pvName=None): cmd = list(PVS_CMD) pvNames = _normalizeargs(pvName) cmd.extend(pvNames) with self._oplock.acquireContext(LVM_OP_RELOAD): rc, out, err = self.cmd(cmd) if rc != 0: log.warning("lvm pvs failed: %s %s %s", str(rc), str(out), str(err)) pvNames = pvNames if pvNames else self._pvs.keys() for p in pvNames: if isinstance(self._pvs.get(p), Stub): self._pvs[p] = Unreadable(self._pvs[p].name, True) return dict(self._pvs) updatedPVs = {} for line in out: fields = [field.strip() for field in line.split(SEPARATOR)] pv = makePV(*fields) if pv.name == UNKNOWN_DEVICE: log.error("Missing pv: %s in vg: %s", pv.uuid, pv.vg_name) continue self._pvs[pv.name] = pv updatedPVs[pv.name] = pv # If we updated all the PVs drop stale flag if not pvName: self._stalepv = False # Remove stalePVs stalePVs = [staleName for staleName in self._pvs.keys() if staleName not in updatedPVs.iterkeys()] for staleName in stalePVs: log.warning("Removing stale PV: %s", staleName) self._pvs.pop((staleName), None) return updatedPVs def _getVGDevs(self, vgNames): devices = [] for name in vgNames: try: pvs = self._vgs[name].pv_name # pv_names tuple except (KeyError, AttributeError): # Yet unknown VG, stub devices = tuple() break # unknownVG = True else: devices.extend(pvs) else: # All known VGs devices = tuple(devices) return devices def _reloadvgs(self, vgName=None): cmd = list(VGS_CMD) vgNames = _normalizeargs(vgName) cmd.extend(vgNames) with self._oplock.acquireContext(LVM_OP_RELOAD): rc, out, err = self.cmd(cmd, self._getVGDevs(vgNames)) if rc != 0: log.warning("lvm vgs failed: %s %s %s", str(rc), str(out), str(err)) vgNames = vgNames if vgNames else self._vgs.keys() for v in vgNames: if isinstance(self._vgs.get(v), Stub): self._vgs[v] = Unreadable(self._vgs[v].name, True) if not len(out): return dict(self._vgs) updatedVGs = {} vgsFields = {} for line in out: fields = [field.strip() for field in line.split(SEPARATOR)] uuid = fields[VG._fields.index("uuid")] pvNameIdx = VG._fields.index("pv_name") pv_name = fields[pvNameIdx] if pv_name == UNKNOWN_DEVICE: # PV is missing, e.g. device lost of target not connected continue if uuid not in vgsFields: fields[pvNameIdx] = [pv_name] # Make a pv_names list vgsFields[uuid] = fields else: vgsFields[uuid][pvNameIdx].append(pv_name) for fields in vgsFields.itervalues(): vg = makeVG(*fields) if int(vg.pv_count) != len(vg.pv_name): log.error("vg %s has pv_count %s but pv_names %s", vg.name, vg.pv_count, vg.pv_name) self._vgs[vg.name] = vg updatedVGs[vg.name] = vg # If we updated all the VGs drop stale flag if not vgName: self._stalevg = False # Remove stale VGs staleVGs = [staleName for staleName in self._vgs.keys() if staleName not in updatedVGs.iterkeys()] for staleName in staleVGs: removeVgMapping(staleName) log.warning("Removing stale VG: %s", staleName) self._vgs.pop((staleName), None) return updatedVGs def _reloadlvs(self, vgName, lvNames=None): lvNames = _normalizeargs(lvNames) cmd = list(LVS_CMD) if lvNames: cmd.extend(["%s/%s" % (vgName, lvName) for lvName in lvNames]) else: cmd.append(vgName) with self._oplock.acquireContext(LVM_OP_RELOAD): rc, out, err = self.cmd(cmd, self._getVGDevs((vgName, ))) if rc != 0: log.warning("lvm lvs failed: %s %s %s", str(rc), str(out), str(err)) lvNames = lvNames if lvNames else self._lvs.keys() for l in lvNames: if isinstance(self._lvs.get(l), Stub): self._lvs[l] = Unreadable(self._lvs[l].name, True) return dict(self._lvs) updatedLVs = {} for line in out: fields = [field.strip() for field in line.split(SEPARATOR)] lv = makeLV(*fields) # For LV we are only interested in its first extent if lv.seg_start_pe == "0": self._lvs[(lv.vg_name, lv.name)] = lv updatedLVs[(lv.vg_name, lv.name)] = lv # Determine if there are stale LVs if lvNames: staleLVs = (lvName for lvName in lvNames if (vgName, lvName) not in updatedLVs.iterkeys()) else: # All the LVs in the VG staleLVs = (lvName for v, lvName in self._lvs.keys() if (v == vgName) and ((vgName, lvName) not in updatedLVs.iterkeys())) for lvName in staleLVs: log.warning("Removing stale lv: %s/%s", vgName, lvName) self._lvs.pop((vgName, lvName), None) log.debug("lvs reloaded") return updatedLVs def _reloadAllLvs(self): """ Used only during bootstrap. """ cmd = list(LVS_CMD) rc, out, err = self.cmd(cmd) if rc == 0: updatedLVs = set() for line in out: fields = [field.strip() for field in line.split(SEPARATOR)] lv = makeLV(*fields) # For LV we are only interested in its first extent if lv.seg_start_pe == "0": self._lvs[(lv.vg_name, lv.name)] = lv updatedLVs.add((lv.vg_name, lv.name)) # Remove stales for vgName, lvName in self._lvs.keys(): if (vgName, lvName) not in updatedLVs: self._lvs.pop((vgName, lvName), None) log.error("Removing stale lv: %s/%s", vgName, lvName) self._stalelv = False return dict(self._lvs) def _invalidatepvs(self, pvNames): with self._oplock.acquireContext(LVM_OP_INVALIDATE): pvNames = _normalizeargs(pvNames) for pvName in pvNames: self._pvs[pvName] = Stub(pvName, True) def _invalidateAllPvs(self): with self._oplock.acquireContext(LVM_OP_INVALIDATE): self._stalepv = True self._pvs.clear() def _invalidatevgs(self, vgNames): vgNames = _normalizeargs(vgNames) with self._oplock.acquireContext(LVM_OP_INVALIDATE): for vgName in vgNames: self._vgs[vgName] = Stub(vgName, True) def _invalidateAllVgs(self): with self._oplock.acquireContext(LVM_OP_INVALIDATE): self._stalevg = True self._vgs.clear() def _invalidatelvs(self, vgName, lvNames=None): with self._oplock.acquireContext(LVM_OP_INVALIDATE): lvNames = _normalizeargs(lvNames) # Invalidate LVs in a specific VG if lvNames: # Invalidate a specific LVs for lvName in lvNames: self._lvs[(vgName, lvName)] = Stub(lvName, True) else: # Invalidate all the LVs in a given VG for lv in self._lvs.values(): if not isinstance(lv, Stub): if lv.vg_name == vgName: self._lvs[(vgName, lv.name)] = Stub(lv.name, True) def _invalidateAllLvs(self): with self._oplock.acquireContext(LVM_OP_INVALIDATE): self._stalelv = True self._lvs.clear() def flush(self): self._invalidateAllPvs() self._invalidateAllVgs() self._invalidateAllLvs() def getPv(self, pvName): # Get specific PV pv = self._pvs.get(pvName) if not pv or isinstance(pv, Stub): pvs = self._reloadpvs(pvName) pv = pvs.get(pvName) return pv def getAllPvs(self): # Get everything we have if self._stalepv: pvs = self._reloadpvs() else: pvs = dict(self._pvs) stalepvs = [pv.name for pv in pvs.itervalues() if isinstance(pv, Stub)] if stalepvs: reloaded = self._reloadpvs(stalepvs) pvs.update(reloaded) return pvs.values() def getVg(self, vgName): # Get specific VG vg = self._vgs.get(vgName) if not vg or isinstance(vg, Stub): vgs = self._reloadvgs(vgName) vg = vgs.get(vgName) return vg def getVgs(self, vgNames): """Reloads all the VGs of the set. Can block for suspended devices. Fills the cache but not uses it. Only returns found VGs. """ return [vg for vgName, vg in self._reloadvgs(vgNames).iteritems() if vgName in vgNames] def getAllVgs(self): # Get everything we have if self._stalevg: vgs = self._reloadvgs() else: vgs = dict(self._vgs) stalevgs = [vg.name for vg in vgs.itervalues() if isinstance(vg, Stub)] if stalevgs: reloaded = self._reloadvgs(stalevgs) vgs.update(reloaded) return vgs.values() def getLv(self, vgName, lvName=None): # Checking self._stalelv here is suboptimal, because # unnecessary reloads # are done. # Return vgName/lvName info # If both 'vgName' and 'lvName' are None then return everything # If only 'lvName' is None then return all the LVs in the given VG # If only 'vgName' is None it is weird, so return nothing # (we can consider returning all the LVs with a given name) if lvName: # vgName, lvName lv = self._lvs.get((vgName, lvName)) if not lv or isinstance(lv, Stub): # while we here reload all the LVs in the VG lvs = self._reloadlvs(vgName) lv = lvs.get((vgName, lvName)) if not lv: log.warning("lv: %s not found in lvs vg: %s response", lvName, vgName) res = lv else: # vgName, None # If there any stale LVs reload the whole VG, since it would # cost us around same efforts anyhow and these stale LVs can # be in the vg. # Will be better when the pvs dict will be part of the vg. # Fix me: should not be more stubs if self._stalelv or any(isinstance(lv, Stub) for lv in self._lvs.values()): lvs = self._reloadlvs(vgName) else: lvs = dict(self._lvs) # lvs = self._reloadlvs() lvs = [lv for lv in lvs.values() if not isinstance(lv, Stub) and (lv.vg_name == vgName)] res = lvs return res def getAllLvs(self): # None, None if self._stalelv or any(isinstance(lv, Stub) for lv in self._lvs.values()): lvs = self._reloadAllLvs() else: lvs = dict(self._lvs) return lvs.values() _lvminfo = LVMCache() def bootstrap(refreshlvs=()): """ Bootstrap lvm module This function builds the lvm cache and ensure that all unused lvs are deactivated, expect lvs matching refreshlvs, which are refreshed instead. """ _lvminfo.bootstrap() refreshlvs = set(refreshlvs) for vg in _lvminfo.getAllVgs(): deactivate = [] refresh = [] for lv in _lvminfo.getLv(vg.name): if lv.active: if lv.name in refreshlvs: refresh.append(lv.name) elif lv.opened: log.debug("Skipping open lv: vg=%s lv=%s", vg.name, lv.name) else: deactivate.append(lv.name) if deactivate: log.info("Deactivating lvs: vg=%s lvs=%s", vg.name, deactivate) try: _setLVAvailability(vg.name, deactivate, "n") except se.CannotDeactivateLogicalVolume: log.error("Error deactivating lvs: vg=%s lvs=%s", vg.name, deactivate) # Some lvs are inactive now _lvminfo._invalidatelvs(vg.name, deactivate) if refresh: log.info("Refreshing lvs: vg=%s lvs=%s", vg.name, refresh) try: refreshLVs(vg.name, refresh) except se.LogicalVolumeRefreshError: log.error("Error refreshing lvs: vg=%s lvs=%s", vg.name, refresh) def invalidateCache(): _lvminfo.invalidateCache() def _fqpvname(pv): if pv and not pv.startswith(PV_PREFIX): pv = os.path.join(PV_PREFIX, pv) return pv def _createpv(devices, metadataSize, options=tuple()): """ Size for pvcreate should be with units k|m|g pvcreate on a dev that is already a PV but not in a VG returns rc = 0. The device is re-created with the new parameters. """ cmd = ["pvcreate"] if options: cmd.extend(options) if metadataSize != 0: cmd.extend(("--metadatasize", "%sm" % metadataSize, "--metadatacopies", "2", "--metadataignore", "y")) cmd.extend(devices) rc, out, err = _lvminfo.cmd(cmd, devices) return rc, out, err def _initpvs(devices, metadataSize, force=False): def _initpvs_removeHolders(): """Remove holders for all devices.""" for device in devices: try: devicemapper.removeMappingsHoldingDevice( os.path.basename(device)) except OSError as e: if e.errno == errno.ENODEV: raise se.PhysDevInitializationError("%s: %s" % (device, str(e))) else: raise if force is True: options = ("-y", "-ff") _initpvs_removeHolders() else: options = tuple() rc, out, err = _createpv(devices, metadataSize, options) _lvminfo._invalidatepvs(devices) if rc != 0: log.error("pvcreate failed with rc=%s", rc) log.error("%s, %s", out, err) raise se.PhysDevInitializationError(str(devices)) return (set(devices), set(), rc, out, err) def getLvDmName(vgName, lvName): return "%s-%s" % (vgName.replace("-", "--"), lvName) def removeVgMapping(vgName): """ Removes the mapping of the specified volume group. Utilizes the fact that the mapping created by the LVM looks like that e45c12b0--f520--498a--82bb--c6cb294b990f-master i.e vg name concatenated with volume name (dash is escaped with dash) """ mappingPrefix = getLvDmName(vgName, "") mappings = devicemapper.getAllMappedDevices() for mapping in mappings: if not mapping.startswith(mappingPrefix): continue try: devicemapper.removeMapping(mapping) except Exception: pass # Activation of the whole vg is assumed to be used nowhere. # This is a separate function just in case. def _setVgAvailability(vgs, available): vgs = _normalizeargs(vgs) cmd = ["vgchange", "--available", available] + vgs rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs(vgs)) for vg in vgs: _lvminfo._invalidatelvs(vg) if rc != 0: # During deactivation, in vg.py (sic): # we ignore error here because we don't care about this vg anymore if available == "n": log.info("deactivate vg %s failed: rc %s - %s %s (ignored)" % (vgs, rc, out, err)) else: raise se.VolumeGroupActionError( "vgchange on vg(s) %s failed. %d %s %s" % (vgs, rc, out, err)) def changelv(vg, lvs, attrs): """ Change multiple attributes on multiple LVs. vg: VG name lvs: a single LV name or iterable of LV names. attrs: an iterable of (attr, value) pairs), e.g. (('--available', 'y'), ('--permission', 'rw') Note: You may activate an activated LV without error but lvchange returns an error (RC=5) when activating rw if already rw """ lvs = _normalizeargs(lvs) # If it fails or not we (may be) change the lv, # so we invalidate cache to reload these volumes on first occasion lvnames = tuple("%s/%s" % (vg, lv) for lv in lvs) cmd = ["lvchange"] cmd.extend(LVM_NOBACKUP) if isinstance(attrs[0], str): # ("--attribute", "value") cmd.extend(attrs) else: # (("--aa", "v1"), ("--ab", "v2")) for attr in attrs: cmd.extend(attr) cmd.extend(lvnames) rc, out, err = _lvminfo.cmd(tuple(cmd), _lvminfo._getVGDevs((vg, ))) _lvminfo._invalidatelvs(vg, lvs) if rc != 0 and len(out) < 1: raise se.StorageException("%d %s %s\n%s/%s" % (rc, out, err, vg, lvs)) def _setLVAvailability(vg, lvs, available): try: changelv(vg, lvs, ("--available", available)) except se.StorageException as e: error = ({"y": se.CannotActivateLogicalVolumes, "n": se.CannotDeactivateLogicalVolume} .get(available, se.VolumeGroupActionError)) raise error(str(e)) # # Public Object Accessors # def getPV(pvName): pv = _lvminfo.getPv(_fqpvname(pvName)) if pv is None: raise se.InaccessiblePhysDev((pvName,)) return pv def getAllPVs(): return _lvminfo.getAllPvs() def testPVCreate(devices, metadataSize): """ Only tests the pv creation. Should not affect the cache state. Receives guids iterable. Returns (un)pvables, (un)succeed guids. """ devs = tuple("%s/%s" % (PV_PREFIX, dev) for dev in devices) options = ("--test",) rc, out, err = _createpv(devs, metadataSize, options) if rc == 0: unusedDevs = set(devices) usedDevs = set() else: unusedDevs = set(re_pvName.findall("\n".join(out))) usedDevs = set(devs) - set(unusedDevs) log.debug("rc: %s, out: %s, err: %s, unusedDevs: %s, usedDevs: %s", rc, out, err, unusedDevs, usedDevs) return unusedDevs, usedDevs def resizePV(vgName, guid): """ In case the LUN was increased on storage server, in order to see the changes it is needed to resize the PV after the multipath devices have been resized Raises se.CouldNotResizePhysicalVolume if pvresize fails """ pvName = _fqpvname(guid) cmd = ["pvresize", pvName] rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vgName, ))) if rc != 0: raise se.CouldNotResizePhysicalVolume(pvName, err) _lvminfo._invalidatepvs(pvName) _lvminfo._invalidatevgs(vgName) def getVG(vgName): vg = _lvminfo.getVg(vgName) # returns single VG namedtuple if not vg: raise se.VolumeGroupDoesNotExist(vgName) else: return vg def getVGs(vgNames): return _lvminfo.getVgs(vgNames) # returns list def getAllVGs(): return _lvminfo.getAllVgs() # returns list # TODO: lvm VG UUID should not be exposed. # Remove this function when hsm.public_createVG is removed. def getVGbyUUID(vgUUID): # cycle through all the VGs until the one with the given UUID found for vg in getAllVGs(): try: if vg.uuid == vgUUID: return vg except AttributeError as e: # An unreloadable VG found but may be we are not looking for it. log.debug("%s" % e.message, exc_info=True) continue # If not cry loudly raise se.VolumeGroupDoesNotExist("vg_uuid: %s" % vgUUID) def getLV(vgName, lvName=None): lv = _lvminfo.getLv(vgName, lvName) # getLV() should not return None if not lv: raise se.LogicalVolumeDoesNotExistError("%s/%s" % (vgName, lvName)) else: return lv # # Public Volume Group interface # def createVG(vgName, devices, initialTag, metadataSize, extentsize="128m", force=False): pvs = [_fqpvname(pdev) for pdev in _normalizeargs(devices)] _checkpvsblksize(pvs) _initpvs(pvs, metadataSize, force) # Activate the 1st PV metadata areas cmd = ["pvchange", "--metadataignore", "n"] cmd.append(pvs[0]) rc, out, err = _lvminfo.cmd(cmd, tuple(pvs)) if rc != 0: raise se.PhysDevInitializationError(pvs[0]) options = ["--physicalextentsize", extentsize] if initialTag: options.extend(("--addtag", initialTag)) cmd = ["vgcreate"] + options + [vgName] + pvs rc, out, err = _lvminfo.cmd(cmd, tuple(pvs)) if rc == 0: _lvminfo._invalidatepvs(pvs) _lvminfo._invalidatevgs(vgName) log.debug("Cache after createvg %s", _lvminfo._vgs) else: raise se.VolumeGroupCreateError(vgName, pvs) def removeVG(vgName): cmd = ["vgremove", "-f", vgName] rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vgName, ))) pvs = tuple(pvName for pvName, pv in _lvminfo._pvs.iteritems() if not isinstance(pv, Stub) and pv.vg_name == vgName) # PVS needs to be reloaded anyhow: if vg is removed they are staled, # if vg remove failed, something must be wrong with devices and we want # cache updated as well _lvminfo._invalidatepvs(pvs) # If vgremove failed reintroduce the VG into the cache if rc != 0: _lvminfo._invalidatevgs(vgName) raise se.VolumeGroupRemoveError("VG %s remove failed." % vgName) else: # Remove the vg from the cache _lvminfo._vgs.pop(vgName, None) def removeVGbyUUID(vgUUID): vg = getVGbyUUID(vgUUID) if vg: removeVG(vg.name) def extendVG(vgName, devices, force): pvs = [_fqpvname(pdev) for pdev in _normalizeargs(devices)] _checkpvsblksize(pvs, getVGBlockSizes(vgName)) vg = _lvminfo.getVg(vgName) # Format extension PVs as all the other already in the VG _initpvs(pvs, int(vg.vg_mda_size) / 2 ** 20, force) cmd = ["vgextend", vgName] + pvs devs = tuple(_lvminfo._getVGDevs((vgName, )) + tuple(pvs)) rc, out, err = _lvminfo.cmd(cmd, devs) if rc == 0: _lvminfo._invalidatepvs(pvs) _lvminfo._invalidatevgs(vgName) log.debug("Cache after extending vg %s", _lvminfo._vgs) else: raise se.VolumeGroupExtendError(vgName, pvs) def chkVG(vgName): cmd = ["vgck", vgName] rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vgName, ))) if rc != 0: _lvminfo._invalidatevgs(vgName) _lvminfo._invalidatelvs(vgName) raise se.StorageDomainAccessError("%s: %s" % (vgName, err)) return True def deactivateVG(vgName): getVG(vgName) # Check existence _setVgAvailability(vgName, available="n") def invalidateVG(vgName): _lvminfo._invalidatevgs(vgName) _lvminfo._invalidatelvs(vgName) def _getpvblksize(pv): dev = devicemapper.getDmId(os.path.basename(pv)) return multipath.getDeviceBlockSizes(dev) def _checkpvsblksize(pvs, vgBlkSize=None): for pv in pvs: pvBlkSize = _getpvblksize(pv) logPvBlkSize, phyPvBlkSize = pvBlkSize if logPvBlkSize not in constants.SUPPORTED_BLOCKSIZE: raise se.DeviceBlockSizeError(pvBlkSize) if phyPvBlkSize < logPvBlkSize: raise se.DeviceBlockSizeError(pvBlkSize) # WARN: This is setting vgBlkSize to the first value found by # _getpvblksize (if not provided by the function call). # It makes sure that all the PVs have the same block size. if vgBlkSize is None: vgBlkSize = pvBlkSize if logPvBlkSize != vgBlkSize[0]: raise se.VolumeGroupBlockSizeError(vgBlkSize, pvBlkSize) def checkVGBlockSizes(vgUUID, vgBlkSize=None): pvs = listPVNames(vgUUID) if not pvs: raise se.VolumeGroupDoesNotExist("vg_uuid: %s" % vgUUID) _checkpvsblksize(pvs, vgBlkSize) def getVGBlockSizes(vgUUID): pvs = listPVNames(vgUUID) if not pvs: raise se.VolumeGroupDoesNotExist("vg_uuid: %s" % vgUUID) # Returning the block size of the first pv is correct since we don't allow # devices with different block size to be on the same VG. return _getpvblksize(pvs[0]) # # Public Logical volume interface # def createLV(vgName, lvName, size, activate=True, contiguous=False, initialTag=None): """ Size units: MB (1024 ** 2 = 2 ** 20)B. """ # WARNING! From man vgs: # All sizes are output in these units: (h)uman-readable, (b)ytes, # (s)ectors, (k)ilobytes, (m)egabytes, (g)igabytes, (t)erabytes, # (p)etabytes, (e)xabytes. # Capitalise to use multiples of 1000 (S.I.) instead of 1024. cont = {True: "y", False: "n"}[contiguous] cmd = ["lvcreate"] cmd.extend(LVM_NOBACKUP) cmd.extend(("--contiguous", cont, "--size", "%sm" % size)) if initialTag is not None: cmd.extend(("--addtag", initialTag)) cmd.extend(("--name", lvName, vgName)) rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vgName, ))) if rc == 0: _lvminfo._invalidatevgs(vgName) _lvminfo._invalidatelvs(vgName, lvName) else: raise se.CannotCreateLogicalVolume(vgName, lvName) # TBD: Need to explore the option of running lvcreate w/o devmapper # so that if activation is not needed it would be skipped in the # first place if activate: lv_path = lvPath(vgName, lvName) st = os.stat(lv_path) uName = pwd.getpwuid(st.st_uid).pw_name gName = grp.getgrgid(st.st_gid).gr_name if ":".join((uName, gName)) != USER_GROUP: cmd = [constants.EXT_CHOWN, USER_GROUP, lv_path] if misc.execCmd(cmd, sudo=True)[0] != 0: log.warning("Could not change ownership of one or more " "volumes in vg (%s) - %s", vgName, lvName) else: _setLVAvailability(vgName, lvName, "n") def removeLVs(vgName, lvNames): lvNames = _normalizeargs(lvNames) # Assert that the LVs are inactive before remove. for lvName in lvNames: if _isLVActive(vgName, lvName): # Fix me # Should not remove active LVs # raise se.CannotRemoveLogicalVolume(vgName, lvName) log.warning("Removing active volume %s/%s" % (vgName, lvName)) # LV exists or not in cache, attempting to remove it. # Removing Stubs also. Active Stubs should raise. # Destroy LV # Fix me:removes active LVs too. "-f" should be removed. cmd = ["lvremove", "-f"] cmd.extend(LVM_NOBACKUP) for lvName in lvNames: cmd.append("%s/%s" % (vgName, lvName)) rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vgName, ))) if rc == 0: for lvName in lvNames: # Remove the LV from the cache _lvminfo._lvs.pop((vgName, lvName), None) # If lvremove succeeded it affected VG as well _lvminfo._invalidatevgs(vgName) else: # Otherwise LV info needs to be refreshed _lvminfo._invalidatelvs(vgName, lvNames) raise se.CannotRemoveLogicalVolume(vgName, str(lvNames)) def _resizeLV(op, vgName, lvName, size): """ Size units: MB (1024 ** 2 = 2 ** 20)B. """ # WARNING! From man vgs: # All sizes are output in these units: (h)uman-readable, (b)ytes, # (s)ectors,(k)ilobytes, (m)egabytes, (g)igabytes, (t)erabytes, # (p)etabytes, (e)xabytes. # Capitalise to use multiples of 1000 (S.I.) instead of 1024. cmd = (op,) + LVM_NOBACKUP cmd += ("--size", "%sm" % (size,), "%s/%s" % (vgName, lvName)) rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vgName, ))) if rc == 0: _lvminfo._invalidatevgs(vgName) _lvminfo._invalidatelvs(vgName, lvName) elif rc == 3: # In LVM we trust. Hope that 3 is only for this. log.debug("New size (in extents) matches existing size (in extents).") elif rc != 0: # get the free extents size # YaRC vg = getVG(vgName) free_size = int(vg.extent_size) * int(vg.free_count) # in B if free_size < int(size) * constants.MEGAB: raise se.VolumeGroupSizeError("%s/%s %d > %d (MiB)" % (vgName, lvName, int(size), free_size / constants.MEGAB)) raise se.LogicalVolumeExtendError(vgName, lvName, "%sM" % (size, )) def extendLV(vgName, lvName, size): _resizeLV("lvextend", vgName, lvName, size) def reduceLV(vgName, lvName, size): _resizeLV("lvreduce", vgName, lvName, size) def activateLVs(vgName, lvNames): lvNames = _normalizeargs(lvNames) toActivate = [lvName for lvName in lvNames if not _isLVActive(vgName, lvName)] if toActivate: _setLVAvailability(vgName, toActivate, "y") def deactivateLVs(vgName, lvNames): lvNames = _normalizeargs(lvNames) toDeactivate = [lvName for lvName in lvNames if _isLVActive(vgName, lvName)] if toDeactivate: _setLVAvailability(vgName, toDeactivate, "n") def renameLV(vg, oldlv, newlv): cmd = ("lvrename",) + LVM_NOBACKUP + (vg, oldlv, newlv) rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vg, ))) if rc != 0: raise se.LogicalVolumeRenameError("%s %s %s" % (vg, oldlv, newlv)) _lvminfo._lvs.pop((vg, oldlv), None) _lvminfo._reloadlvs(vg, newlv) def refreshLVs(vgName, lvNames): # If the logical volumes are active, reload their metadata. cmd = ['lvchange', '--refresh'] cmd.extend("%s/%s" % (vgName, lv) for lv in lvNames) rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vgName, ))) _lvminfo._invalidatelvs(vgName, lvNames) if rc != 0: raise se.LogicalVolumeRefreshError("%s failed" % list2cmdline(cmd)) # Fix me: Function name should mention LV or unify with VG version. # may be for all the LVs in the whole VG? def addtag(vg, lv, tag): lvname = "%s/%s" % (vg, lv) cmd = ("lvchange",) + LVM_NOBACKUP + ("--addtag", tag) + (lvname,) rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vg, ))) _lvminfo._invalidatelvs(vg, lv) if rc != 0: # Fix me: should be se.ChangeLogicalVolumeError but this not exists. raise se.MissingTagOnLogicalVolume("%s/%s" % (vg, lv), tag) def changeLVTags(vg, lv, delTags=(), addTags=()): lvname = '%s/%s' % (vg, lv) delTags = set(delTags) addTags = set(addTags) if delTags.intersection(addTags): raise se.LogicalVolumeReplaceTagError( "Cannot add and delete the same tag lv: `%s` tags: `%s`" % (lvname, ", ".join(delTags.intersection(addTags)))) cmd = ['lvchange'] cmd.extend(LVM_NOBACKUP) for tag in delTags: cmd.extend(("--deltag", tag)) for tag in addTags: cmd.extend(('--addtag', tag)) cmd.append(lvname) rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vg, ))) _lvminfo._invalidatelvs(vg, lv) if rc != 0: raise se.LogicalVolumeReplaceTagError( 'lv: `%s` add: `%s` del: `%s` (%s)' % (lvname, ", ".join(addTags), ", ".join(delTags), err[-1])) def addLVTags(vg, lv, addTags): changeLVTags(vg, lv, addTags=addTags) # # Helper functions # def lvPath(vgName, lvName): return os.path.join("/dev", vgName, lvName) def lvDmDev(vgName, lvName):<|fim▁hole|> If the LV is inactive there is no dm device and OSError will be raised. """ lvp = lvPath(vgName, lvName) return os.path.basename(os.readlink(lvp)) def _isLVActive(vgName, lvName): """Active volumes have a mp link. This function should not be used out of this module. """ return os.path.exists(lvPath(vgName, lvName)) def changeVGTags(vgName, delTags=(), addTags=()): delTags = set(delTags) addTags = set(addTags) if delTags.intersection(addTags): raise se.VolumeGroupReplaceTagError( "Cannot add and delete the same tag vg: `%s` tags: `%s`" % (vgName, ", ".join(delTags.intersection(addTags)))) cmd = ["vgchange"] for tag in delTags: cmd.extend(("--deltag", tag)) for tag in addTags: cmd.extend(("--addtag", tag)) cmd.append(vgName) rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vgName, ))) _lvminfo._invalidatevgs(vgName) if rc != 0: raise se.VolumeGroupReplaceTagError( "vg:%s del:%s add:%s (%s)" % (vgName, ", ".join(delTags), ", ".join(addTags), err[-1])) def replaceVGTag(vg, oldTag, newTag): changeVGTags(vg, [oldTag], [newTag]) def getFirstExt(vg, lv): return getLV(vg, lv).devices.strip(" )").split("(") def listPVNames(vgName): try: pvNames = _lvminfo._vgs[vgName].pv_name except (KeyError, AttributeError): pvNames = getVG(vgName).pv_name return pvNames def setrwLV(vg, lv, rw=True): permission = {False: 'r', True: 'rw'}[rw] try: changelv(vg, lv, ("--permission", permission)) except se.StorageException: l = getLV(vg, lv) if l.writeable == rw: # Ignore the error since lv is now rw, hoping that the error was # because lv was already rw, see BZ#654691. We may hide here # another lvchange error. return raise se.CannotSetRWLogicalVolume(vg, lv, permission) def lvsByTag(vgName, tag): return [lv for lv in getLV(vgName) if tag in lv.tags] def invalidateFilter(): _lvminfo.invalidateFilter() # Fix me: unify with addTag def replaceLVTag(vg, lv, deltag, addtag): """ Removes and add tags atomically. """ lvname = "%s/%s" % (vg, lv) cmd = (("lvchange",) + LVM_NOBACKUP + ("--deltag", deltag) + ("--addtag", addtag) + (lvname,)) rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vg, ))) _lvminfo._invalidatelvs(vg, lv) if rc != 0: raise se.LogicalVolumeReplaceTagError("%s/%s" % (vg, lv), "%s,%s" % (deltag, addtag))<|fim▁end|>
"""Return the LV dm device. returns: dm-X
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models from django.core.urlresolvers import reverse class Software(models.Model): name = models.CharField(max_length=200) def __unicode__(self): return self.name <|fim▁hole|> return reverse('software_edit', kwargs={'pk': self.pk})<|fim▁end|>
def get_absolute_url(self):
<|file_name|>MaximumNumberOfApprovalsExceededException.java<|end_file_name|><|fim▁begin|>/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.codecommit.model; import javax.annotation.Generated; /** * <p> * The number of approvals required for the approval rule exceeds the maximum number allowed. * </p> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class MaximumNumberOfApprovalsExceededException extends com.amazonaws.services.codecommit.model.AWSCodeCommitException { private static final long serialVersionUID = 1L; /** * Constructs a new MaximumNumberOfApprovalsExceededException with the specified error message. * * @param message * Describes the error encountered. */<|fim▁hole|> } }<|fim▁end|>
public MaximumNumberOfApprovalsExceededException(String message) { super(message);
<|file_name|>change_detection.js<|end_file_name|><|fim▁begin|>'use strict';var __extends = (this && this.__extends) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; function __() { this.constructor = d; } __.prototype = b.prototype; d.prototype = new __(); }; var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) { if (typeof Reflect === "object" && typeof Reflect.decorate === "function") return Reflect.decorate(decorators, target, key, desc); switch (arguments.length) { case 2: return decorators.reduceRight(function(o, d) { return (d && d(o)) || o; }, target); case 3: return decorators.reduceRight(function(o, d) { return (d && d(target, key)), void 0; }, void 0); case 4: return decorators.reduceRight(function(o, d) { return (d && d(target, key, o)) || o; }, desc); } }; var __metadata = (this && this.__metadata) || function (k, v) { if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v); }; var jit_proto_change_detector_1 = require('./jit_proto_change_detector'); var pregen_proto_change_detector_1 = require('./pregen_proto_change_detector'); var proto_change_detector_1 = require('./proto_change_detector'); var iterable_differs_1 = require('./differs/iterable_differs'); var default_iterable_differ_1 = require('./differs/default_iterable_differ'); var keyvalue_differs_1 = require('./differs/keyvalue_differs'); var default_keyvalue_differ_1 = require('./differs/default_keyvalue_differ'); var interfaces_1 = require('./interfaces'); var di_1 = require('angular2/di'); var collection_1 = require('angular2/src/core/facade/collection'); var lang_1 = require('angular2/src/core/facade/lang'); var ast_1 = require('./parser/ast'); exports.ASTWithSource = ast_1.ASTWithSource; exports.AST = ast_1.AST; exports.AstTransformer = ast_1.AstTransformer; exports.PropertyRead = ast_1.PropertyRead; exports.LiteralArray = ast_1.LiteralArray; exports.ImplicitReceiver = ast_1.ImplicitReceiver; <|fim▁hole|>exports.Lexer = lexer_1.Lexer; var parser_1 = require('./parser/parser'); exports.Parser = parser_1.Parser; var locals_1 = require('./parser/locals'); exports.Locals = locals_1.Locals; var exceptions_1 = require('./exceptions'); exports.DehydratedException = exceptions_1.DehydratedException; exports.ExpressionChangedAfterItHasBeenCheckedException = exceptions_1.ExpressionChangedAfterItHasBeenCheckedException; exports.ChangeDetectionError = exceptions_1.ChangeDetectionError; var interfaces_2 = require('./interfaces'); exports.ChangeDetection = interfaces_2.ChangeDetection; exports.ChangeDetectorDefinition = interfaces_2.ChangeDetectorDefinition; exports.DebugContext = interfaces_2.DebugContext; exports.ChangeDetectorGenConfig = interfaces_2.ChangeDetectorGenConfig; var constants_1 = require('./constants'); exports.ChangeDetectionStrategy = constants_1.ChangeDetectionStrategy; var proto_change_detector_2 = require('./proto_change_detector'); exports.DynamicProtoChangeDetector = proto_change_detector_2.DynamicProtoChangeDetector; var binding_record_1 = require('./binding_record'); exports.BindingRecord = binding_record_1.BindingRecord; exports.BindingTarget = binding_record_1.BindingTarget; var directive_record_1 = require('./directive_record'); exports.DirectiveIndex = directive_record_1.DirectiveIndex; exports.DirectiveRecord = directive_record_1.DirectiveRecord; var dynamic_change_detector_1 = require('./dynamic_change_detector'); exports.DynamicChangeDetector = dynamic_change_detector_1.DynamicChangeDetector; var change_detector_ref_1 = require('./change_detector_ref'); exports.ChangeDetectorRef = change_detector_ref_1.ChangeDetectorRef; var iterable_differs_2 = require('./differs/iterable_differs'); exports.IterableDiffers = iterable_differs_2.IterableDiffers; var keyvalue_differs_2 = require('./differs/keyvalue_differs'); exports.KeyValueDiffers = keyvalue_differs_2.KeyValueDiffers; var change_detection_util_1 = require('./change_detection_util'); exports.WrappedValue = change_detection_util_1.WrappedValue; /** * Structural diffing for `Object`s and `Map`s. */ exports.keyValDiff = lang_1.CONST_EXPR([lang_1.CONST_EXPR(new default_keyvalue_differ_1.DefaultKeyValueDifferFactory())]); /** * Structural diffing for `Iterable` types such as `Array`s. */ exports.iterableDiff = lang_1.CONST_EXPR([lang_1.CONST_EXPR(new default_iterable_differ_1.DefaultIterableDifferFactory())]); exports.defaultIterableDiffers = lang_1.CONST_EXPR(new iterable_differs_1.IterableDiffers(exports.iterableDiff)); exports.defaultKeyValueDiffers = lang_1.CONST_EXPR(new keyvalue_differs_1.KeyValueDiffers(exports.keyValDiff)); /** * Map from {@link ChangeDetectorDefinition#id} to a factory method which takes a * {@link Pipes} and a {@link ChangeDetectorDefinition} and generates a * {@link ProtoChangeDetector} associated with the definition. */ // TODO(kegluneq): Use PregenProtoChangeDetectorFactory rather than Function once possible in // dart2js. See https://github.com/dart-lang/sdk/issues/23630 for details. exports.preGeneratedProtoDetectors = {}; /** * Implements change detection using a map of pregenerated proto detectors. */ var PreGeneratedChangeDetection = (function (_super) { __extends(PreGeneratedChangeDetection, _super); function PreGeneratedChangeDetection(config, protoChangeDetectorsForTest) { _super.call(this); this._dynamicChangeDetection = new DynamicChangeDetection(); this._protoChangeDetectorFactories = lang_1.isPresent(protoChangeDetectorsForTest) ? protoChangeDetectorsForTest : exports.preGeneratedProtoDetectors; this._genConfig = lang_1.isPresent(config) ? config : new interfaces_1.ChangeDetectorGenConfig(lang_1.assertionsEnabled(), lang_1.assertionsEnabled(), false); } PreGeneratedChangeDetection.isSupported = function () { return pregen_proto_change_detector_1.PregenProtoChangeDetector.isSupported(); }; PreGeneratedChangeDetection.prototype.getProtoChangeDetector = function (id, definition) { if (collection_1.StringMapWrapper.contains(this._protoChangeDetectorFactories, id)) { return collection_1.StringMapWrapper.get(this._protoChangeDetectorFactories, id)(definition); } return this._dynamicChangeDetection.getProtoChangeDetector(id, definition); }; Object.defineProperty(PreGeneratedChangeDetection.prototype, "genConfig", { get: function () { return this._genConfig; }, enumerable: true, configurable: true }); Object.defineProperty(PreGeneratedChangeDetection.prototype, "generateDetectors", { get: function () { return true; }, enumerable: true, configurable: true }); PreGeneratedChangeDetection = __decorate([ di_1.Injectable(), __metadata('design:paramtypes', [interfaces_1.ChangeDetectorGenConfig, Object]) ], PreGeneratedChangeDetection); return PreGeneratedChangeDetection; })(interfaces_1.ChangeDetection); exports.PreGeneratedChangeDetection = PreGeneratedChangeDetection; /** * Implements change detection that does not require `eval()`. * * This is slower than {@link JitChangeDetection}. */ var DynamicChangeDetection = (function (_super) { __extends(DynamicChangeDetection, _super); function DynamicChangeDetection(config) { _super.call(this); this._genConfig = lang_1.isPresent(config) ? config : new interfaces_1.ChangeDetectorGenConfig(lang_1.assertionsEnabled(), lang_1.assertionsEnabled(), false); } DynamicChangeDetection.prototype.getProtoChangeDetector = function (id, definition) { return new proto_change_detector_1.DynamicProtoChangeDetector(definition); }; Object.defineProperty(DynamicChangeDetection.prototype, "genConfig", { get: function () { return this._genConfig; }, enumerable: true, configurable: true }); Object.defineProperty(DynamicChangeDetection.prototype, "generateDetectors", { get: function () { return true; }, enumerable: true, configurable: true }); DynamicChangeDetection = __decorate([ di_1.Injectable(), __metadata('design:paramtypes', [interfaces_1.ChangeDetectorGenConfig]) ], DynamicChangeDetection); return DynamicChangeDetection; })(interfaces_1.ChangeDetection); exports.DynamicChangeDetection = DynamicChangeDetection; /** * Implements faster change detection by generating source code. * * This requires `eval()`. For change detection that does not require `eval()`, see * {@link DynamicChangeDetection} and {@link PreGeneratedChangeDetection}. */ var JitChangeDetection = (function (_super) { __extends(JitChangeDetection, _super); function JitChangeDetection(config) { _super.call(this); this._genConfig = lang_1.isPresent(config) ? config : new interfaces_1.ChangeDetectorGenConfig(lang_1.assertionsEnabled(), lang_1.assertionsEnabled(), false); } JitChangeDetection.isSupported = function () { return jit_proto_change_detector_1.JitProtoChangeDetector.isSupported(); }; JitChangeDetection.prototype.getProtoChangeDetector = function (id, definition) { return new jit_proto_change_detector_1.JitProtoChangeDetector(definition); }; Object.defineProperty(JitChangeDetection.prototype, "genConfig", { get: function () { return this._genConfig; }, enumerable: true, configurable: true }); Object.defineProperty(JitChangeDetection.prototype, "generateDetectors", { get: function () { return true; }, enumerable: true, configurable: true }); JitChangeDetection = __decorate([ di_1.Injectable(), __metadata('design:paramtypes', [interfaces_1.ChangeDetectorGenConfig]) ], JitChangeDetection); return JitChangeDetection; })(interfaces_1.ChangeDetection); exports.JitChangeDetection = JitChangeDetection; //# sourceMappingURL=change_detection.js.map<|fim▁end|>
var lexer_1 = require('./parser/lexer');
<|file_name|>facade_tests.py<|end_file_name|><|fim▁begin|>from django.test import TestCase from mock import Mock, patch from paymentexpress.facade import Facade from paymentexpress.gateway import AUTH, PURCHASE from paymentexpress.models import OrderTransaction from tests import (XmlTestingMixin, CARD_VISA, SAMPLE_SUCCESSFUL_RESPONSE, SAMPLE_DECLINED_RESPONSE, SAMPLE_ERROR_RESPONSE) from oscar.apps.payment.utils import Bankcard from oscar.apps.payment.exceptions import (UnableToTakePayment, InvalidGatewayRequestError) class MockedResponseTestCase(TestCase): def create_mock_response(self, body, status_code=200): response = Mock() response.content = body response.text = body response.status_code = status_code return response class FacadeTests(TestCase, XmlTestingMixin): def setUp(self): self.facade = Facade() def test_zero_amount_raises_exception(self): card = Bankcard(card_number=CARD_VISA, expiry_date='1015', name="Frankie", cvv="123", start_date="1010") with self.assertRaises(UnableToTakePayment): self.facade.authorise('1000', 0, card) def test_zero_amount_for_complete_raises_exception(self): with self.assertRaises(UnableToTakePayment): self.facade.complete('1000', 0, '1234') def test_zero_amount_for_purchase_raises_exception(self): with self.assertRaises(UnableToTakePayment): self.facade.purchase('1000', 0) def test_purchase_without_billing_id_or_card_raises_exception(self): with self.assertRaises(ValueError): self.facade.purchase('1000', 1.23) def test_zero_amount_for_refund_raises_exception(self): with self.assertRaises(UnableToTakePayment): self.facade.refund('1000', 0, '1234') def test_merchant_reference_format(self): merchant_ref = self.facade._get_merchant_reference('1000', AUTH) self.assertRegexpMatches(merchant_ref, r'^\d+_[A-Z]+_\d+_\d{4}$') class FacadeSuccessfulResponseTests(MockedResponseTestCase): dps_txn_ref = '000000030884cdc6' dps_billing_id = '0000080023225598' def setUp(self): self.facade = Facade() self.card = Bankcard(card_number=CARD_VISA, expiry_date='1015', name="Frankie", cvv="123",<|fim▁hole|> with patch('requests.post') as post: post.return_value = self.create_mock_response( SAMPLE_SUCCESSFUL_RESPONSE) auth_dict = self.facade.authorise('1000', 1, self.card) complete_dict = self.facade.complete('1000', 1.23, self.dps_txn_ref) refund_dict = self.facade.refund('1000', 1.23, '000000030884cdc6') validate_dict = self.facade.validate(self.card) response_dicts = (auth_dict, complete_dict, refund_dict, validate_dict) for response_dict in response_dicts: self.assertEquals(self.dps_txn_ref, response_dict['txn_reference']) self.assertEquals(self.dps_billing_id, response_dict['partner_reference']) def test_purchase_with_billing_id_returns_valid_dict(self): with patch('requests.post') as post: post.return_value = self.create_mock_response( SAMPLE_SUCCESSFUL_RESPONSE) txn_ref = self.facade.purchase('1000', 1.23, 'abc123') self.assertEquals(self.dps_txn_ref, txn_ref['txn_reference']) def test_purchase_with_bankcard_returns_valid_dict(self): with patch('requests.post') as post: post.return_value = self.create_mock_response( SAMPLE_SUCCESSFUL_RESPONSE) txn_ref = self.facade.purchase('1000', 1.23, None, self.card) self.assertEquals(self.dps_txn_ref, txn_ref['txn_reference']) def test_successful_call_is_recorded(self): with patch('requests.post') as post: post.return_value = self.create_mock_response( SAMPLE_SUCCESSFUL_RESPONSE) self.facade.authorise('10001', 10.25, self.card) txn = OrderTransaction.objects.filter(order_number='10001')[0] self.assertEquals(AUTH, txn.txn_type) def test_empty_issue_date_is_allowed(self): with patch('requests.post') as post: post.return_value = self.create_mock_response( SAMPLE_SUCCESSFUL_RESPONSE) card = Bankcard(card_number=CARD_VISA, expiry_date='1015', name="Frankie", cvv="123") txn_ref = self.facade.authorise('1000', 1.23, card) self.assertEquals(self.dps_txn_ref, txn_ref['txn_reference']) class FacadeDeclinedResponseTests(MockedResponseTestCase): def setUp(self): self.facade = Facade() self.card = Bankcard(card_number=CARD_VISA, expiry_date='1015', name="Frankie", cvv="123", start_date="1010") def test_declined_call_raises_an_exception(self): with patch('requests.post') as post: post.return_value = self.create_mock_response( SAMPLE_DECLINED_RESPONSE) with self.assertRaises(UnableToTakePayment): self.facade.authorise('1000', 1, self.card) with self.assertRaises(UnableToTakePayment): self.facade.complete('1000', 1.23, '000000030884cdc6') with self.assertRaises(UnableToTakePayment): self.facade.purchase('1000', 1.23, 'abc123') with self.assertRaises(UnableToTakePayment): self.facade.purchase('1000', 1.23, None, self.card) with self.assertRaises(UnableToTakePayment): self.facade.refund('1000', 1.23, '000000030884cdc6') with self.assertRaises(UnableToTakePayment): self.facade.validate(self.card) def test_declined_call_is_recorded(self): with patch('requests.post') as post: post.return_value = self.create_mock_response( SAMPLE_DECLINED_RESPONSE) try: self.facade.purchase('1001', 10.24, None, self.card) except Exception: pass txn = OrderTransaction.objects.filter(order_number='1001')[0] self.assertIsNotNone(txn) self.assertEquals(PURCHASE, txn.txn_type) class FacadeErrorResponseTests(MockedResponseTestCase): def setUp(self): self.facade = Facade() self.card = Bankcard(card_number=CARD_VISA, expiry_date='1015', name="Frankie", cvv="123", start_date="1010") def test_error_response_raises_invalid_gateway_request_exception(self): with patch('requests.post') as post: post.return_value = self.create_mock_response( SAMPLE_ERROR_RESPONSE) with self.assertRaises(InvalidGatewayRequestError): self.facade.purchase('1000', 10.24, None, self.card)<|fim▁end|>
start_date="1010") def test_successful_call_returns_valid_dict(self):
<|file_name|>event_span.cc<|end_file_name|><|fim▁begin|>/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include "tensorflow/core/profiler/utils/event_span.h" #include <string> #include <utility> #include <vector> #include "absl/algorithm/container.h" #include "absl/container/flat_hash_map.h" #include "absl/strings/match.h" #include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" #include "tensorflow/core/lib/gtl/map_util.h" #include "tensorflow/core/platform/types.h" #include "tensorflow/core/profiler/protobuf/op_metrics.pb.h" #include "tensorflow/core/profiler/utils/timespan.h" namespace tensorflow { namespace profiler { namespace { // Representing a boundary of an event. struct EventBoundary { // Time at this boundary. uint64 time_ps; // Type of the event. EventType type; // True if this is the start of the event; False if this is the end. bool is_start; EventBoundary(uint64 time_ps, EventType type, bool is_start) : time_ps(time_ps), type(type), is_start(is_start) {} }; // Returns true if EventBoundary a should appear before EventBoundary b. bool CmpEventBoundaries(const EventBoundary& a, const EventBoundary& b) { if (a.time_ps == b.time_ps) { if (a.is_start == b.is_start) { // Puts the higher-priority type before the lower-priority type if they // have the same time and same boundary type. return a.type > b.type; } else { // Puts the "end" bounary before the "start" boundary if they have the // same time. return !a.is_start; } } // In ascending order of time. return a.time_ps < b.time_ps; } // Generates vector of event boundaries from the given overlapped_events. std::vector<EventBoundary> GenerateEventBoundaries( const std::vector<EventTypeSpan>& overlapped_events) { std::vector<EventBoundary> boundaries; boundaries.reserve(2 * overlapped_events.size()); for (const auto& event : overlapped_events) { boundaries.push_back( {event.span.begin_ps(), event.type, /*is_start=*/true}); boundaries.push_back({event.span.end_ps(), event.type, /*is_start=*/false}); } absl::c_sort(boundaries, CmpEventBoundaries); return boundaries; } // A class to track the highest priority that an event should be assigned. class PriorityTracker { private: // The current maximum priority. EventType current_max_priority_; // A count for each possible priority. std::vector<int64> priority_count_; public: PriorityTracker() { current_max_priority_ = UNKNOWN_TIME; priority_count_.resize(LAST_EVENT_TYPE + 1, 0); } // Updates current_max_priority_ and priority_count_[] given the boundary. // Returns the new current_max_priority_. EventType Update(const EventBoundary& boundary) { EventType event_type = boundary.type; bool is_start = boundary.is_start; if (is_start) { priority_count_[event_type]++; if (event_type > current_max_priority_) { current_max_priority_ = event_type; } } else { priority_count_[event_type]--; if (event_type == current_max_priority_ && priority_count_[event_type] == 0) { // Reduces current_max_priority_ to the first event type (starting from // the highest priority) that has a non-zero count. bool found = false; for (int i = event_type - 1; i >= 0; i--) { if (priority_count_[i] > 0) { current_max_priority_ = static_cast<EventType>(i); found = true; break; } } if (!found) current_max_priority_ = UNKNOWN_TIME; } } return current_max_priority_; } }; std::vector<EventTypeSpan> ToNonOverlappedEvents( const std::vector<EventTypeSpan>& overlapped_events) { std::vector<EventBoundary> event_boundaries = GenerateEventBoundaries(overlapped_events); std::vector<EventTypeSpan> result; if (event_boundaries.empty()) return result; result.reserve(event_boundaries.size()); PriorityTracker priority_tracker; for (int64 i = 0, end = (event_boundaries.size() - 1); i < end; i++) { EventType highest_priority = priority_tracker.Update(event_boundaries[i]); result.push_back({highest_priority, Timespan::FromEndPoints( event_boundaries[i].time_ps, event_boundaries[i + 1].time_ps)}); } return result; } void CombineStepDetails(const StepDetails& src, StepDetails* dst) { dst->AppendMarkers(src.Markers()); dst->AppendEvents(src.Events()); dst->AppendCollectives(src.Collectives()); dst->AggregateDeviceMemoryTransfers(src.DeviceMemoryTransfers()); } EventType ClassifyDeviceCompute(absl::string_view event_name, absl::string_view tensor_shapes) { if (tensor_shapes.empty()) { // Deduces the precision from the name. if (absl::StrContains(event_name, "half") || absl::StrContains(event_name, "fp16")) return DEVICE_COMPUTE_16; else return DEVICE_COMPUTE_32; } else { // Deduces the precision from the shapes. if (absl::StrContains(tensor_shapes, "half")) return DEVICE_COMPUTE_16; else return DEVICE_COMPUTE_32; } } constexpr int kNumGenericEventTypes = GenericEventType::kLastGenericEventType - GenericEventType::kFirstGenericEventType + 1; using GenericEventTypeStrMap = absl::flat_hash_map<GenericEventType, absl::string_view>; const GenericEventTypeStrMap& GetGenericEventTypeStrMap() { static const auto* generic_event_type_str_map = new GenericEventTypeStrMap({ {kDeviceCompute, "Device compute"}, {kDeviceToDevice, "Device to device"}, {kDeviceCollectives, "Device collective communication"}, {kHostCompute, "Host compute"}, {kHostPrepare, "Kernel launch"}, {kInput, "Input"}, {kOutput, "Output"}, {kCompile, "Compilation"}, {kAllOthers, "All others"}, }); DCHECK_EQ(generic_event_type_str_map->size(), kNumGenericEventTypes); return *generic_event_type_str_map; } } // namespace absl::string_view GetGenericEventTypeStr(GenericEventType event_type) { return GetGenericEventTypeStrMap().at(event_type); } EventType ClassifyGpuEvent(absl::string_view event_name, absl::string_view tensor_shapes) { if (absl::StartsWithIgnoreCase(event_name, "MEMCPYHtoD")) return HOST_TO_DEVICE; if (absl::StartsWithIgnoreCase(event_name, "MEMCPYDtoH")) return DEVICE_TO_HOST; if (absl::StartsWithIgnoreCase(event_name, "MEMCPYDtoD")) return DEVICE_TO_DEVICE; if (absl::StartsWithIgnoreCase(event_name, "nccl")) { return DEVICE_COLLECTIVES; } return ClassifyDeviceCompute(event_name, tensor_shapes); } EventType ClassifyCpuEvent(absl::string_view event_name, int64 correlation_id, bool has_device) { if (absl::StartsWithIgnoreCase(event_name, "MEMCPYHtoD") || absl::StrContains(event_name, "Infeed")) return HOST_TO_DEVICE; if (absl::StartsWithIgnoreCase(event_name, "MEMCPYHtoH")) return HOST_TO_HOST; // TODO(b/150420972): Separate runtime overhead from actual compute for // CPU-only. if (has_device && (correlation_id >= 0 || absl::StartsWithIgnoreCase(event_name, "ExecutorState::Process"))) { return HOST_PREPARE; } if (absl::StartsWithIgnoreCase(event_name, "IteratorGetNext")) return HOST_WAIT_INPUT; return HOST_COMPUTE; } std::string PrintEventType(EventType event_type) { switch (event_type) { case UNKNOWN_TIME: return "unknown_time"; case HOST_COMPUTE: return "host_compute"; case HOST_COMPILE: return "host_compile"; case HOST_TO_HOST: return "host_to_host"; case HOST_TO_DEVICE: return "host_to_device"; case HOST_PREPARE: return "host_prepare"; case DEVICE_COLLECTIVES: return "device_collectives"; case HOST_WAIT_INPUT: return "host_wait_input"; case DEVICE_TO_DEVICE: return "device_to_device"; case DEVICE_TO_HOST: return "device_to_host"; case DEVICE_COMPUTE_32: return "device_compute_32"; case DEVICE_COMPUTE_16: return "device_compute_16"; case DEVICE_WAIT_DEVICE: return "device_wait_device"; case DEVICE_WAIT_HOST: return "device_wait_host"; default: return "unexpected"; } } std::string PrintEventTypeSpan(const EventTypeSpan& event_type_span) { return absl::StrCat("(", PrintEventType(event_type_span.type), ", ", event_type_span.span.DebugString(), ")"); } absl::string_view PrintStepMarkerType(StepMarkerType type) { switch (type) { case StepMarkerType::kExplicitHostStepMarker: return "ExplicitHostStepMarker"; case StepMarkerType::kImplicitHostStepMarker: return "ImplicitHostStepMarker"; case StepMarkerType::kDeviceStepMarker: return "DeviceStepMarker"; } } std::string PrintStepMarker(const StepMarker& step_marker) { return absl::StrCat("(", PrintStepMarkerType(step_marker.type), ", ", step_marker.event_name, ", ", step_marker.span.DebugString(), ")"); } std::string PrintStepEvents(const StepEvents& step_events) { std::vector<int64> step_ids; step_ids.reserve(step_events.size()); for (const auto& id_details : step_events) { step_ids.push_back(id_details.first); } absl::c_sort(step_ids); std::string result = "{"; for (auto id : step_ids) { absl::StrAppend(&result, "\n"); auto* details = gtl::FindOrNull(step_events, id); std::string details_str = details ? details->DebugString() : "()"; absl::StrAppend(&result, id, ":", details_str); } return absl::StrCat(result, "\n}"); } void CombineStepEvents(const StepEvents& src, StepEvents* dst) { for (const auto& step_details : src) { int64 step_id = step_details.first; const StepDetails& src_details = step_details.second; StepDetails* dst_details = &(*dst)[step_id]; CombineStepDetails(src_details, dst_details); } } // Converts from overlapped step-events to non-overlapped step-events. StepEvents ToNonOverlappedStepEvents(const StepEvents& overlapped_step_events) { StepEvents non_overlapped_step_events; for (const auto& step_events : overlapped_step_events) { const auto& step_id = step_events.first; const auto& step_details = step_events.second; *non_overlapped_step_events[step_id].MutableMarkers() = step_details.Markers(); *non_overlapped_step_events[step_id].MutableEvents() = ToNonOverlappedEvents(step_details.Events()); *non_overlapped_step_events[step_id].MutableCollectives() = step_details.Collectives(); *non_overlapped_step_events[step_id].MutableDeviceMemoryTransfers() = step_details.DeviceMemoryTransfers(); } return non_overlapped_step_events; } void StepDetails::AddMarker(const StepMarker& m) { markers_.push_back(m); } void StepDetails::AddEvent(const EventTypeSpan& e) { events_.push_back(e); } void StepDetails::AppendMarkers(const std::vector<StepMarker>& other_markers) { markers_.insert(markers_.end(), other_markers.begin(), other_markers.end()); } void StepDetails::AppendEvents(const std::vector<EventTypeSpan>& other_events) { events_.insert(events_.end(), other_events.begin(), other_events.end()); } void StepDetails::AppendCollectives( const absl::flat_hash_map<uint32, AllReduceDbResult>& collectives) { for (const auto& it : collectives) { collectives_[it.first] = it.second; } } void StepDetails::AggregateDeviceMemoryTransfers( const std::vector<DeviceMemoryTransfer> device_memory_transfers) { if (device_memory_transfers.size() != device_memory_transfers_.size()) { return; // Sanity check. } for (size_t i = 0; i < device_memory_transfers.size(); ++i) { device_memory_transfers_[i].set_occurrence( device_memory_transfers_[i].occurrence() + device_memory_transfers[i].occurrence()); device_memory_transfers_[i].set_bytes_transferred( device_memory_transfers_[i].bytes_transferred() + device_memory_transfers[i].bytes_transferred()); device_memory_transfers_[i].set_time_us( device_memory_transfers_[i].time_us() + device_memory_transfers[i].time_us()); } } void StepDetails::AddCollectiveOpEvent(uint64 core_id, const AllReduceInfo& e) { *collectives_[core_id].add_all_reduce_info() = e; } void StepDetails::AddDeviceMemoryTransferEvent(EventType event_type, const Timespan& time_span, uint64 bytes) { int index = 0; switch (event_type) { case HOST_TO_DEVICE: index = 0; break; case DEVICE_TO_HOST: index = 1; break; case DEVICE_TO_DEVICE: index = 2; break; default: return; } device_memory_transfers_[index].set_occurrence( device_memory_transfers_[index].occurrence() + 1); device_memory_transfers_[index].set_time_us( device_memory_transfers_[index].time_us() + time_span.duration_ps() / 1000000.0); device_memory_transfers_[index].set_bytes_transferred( device_memory_transfers_[index].bytes_transferred() + bytes); } Timespan StepDetails::StepTime() const { Timespan max_host_step_time; Timespan max_device_step_time; for (const auto& marker : markers_) { Timespan& cur_max_step_time = marker.type == StepMarkerType::kDeviceStepMarker ? max_device_step_time : max_host_step_time; const Timespan& new_step_time = marker.span; if (new_step_time.duration_ps() > cur_max_step_time.duration_ps()) cur_max_step_time = new_step_time; } // CPU-only profile. if (max_device_step_time.Empty()) { return max_host_step_time; } // If the host step time includes the device step time, use the host step // time. This covers the case where the device is synchronized at the end of // each step. if (max_host_step_time.Includes(max_device_step_time)) { return max_host_step_time; } return max_device_step_time; } std::string StepDetails::DebugString() const { std::string result = "(["; for (int i = 0, end = markers_.size(); i < end; i++) { if (i > 0) absl::StrAppend(&result, ", "); absl::StrAppend(&result, PrintStepMarker(markers_[i])); } absl::StrAppend(&result, "], ["); for (int i = 0, end = events_.size(); i < end; i++) { if (i > 0) absl::StrAppend(&result, ", "); absl::StrAppend(&result, PrintEventTypeSpan(events_[i])); } return absl::StrCat(result, "])"); } bool StepDetails::operator==(const StepDetails& other) const { const auto& other_markers = other.Markers(); if (markers_.size() != other_markers.size()) return false; for (uint64 i = 0; i < markers_.size(); i++) { if (markers_[i] != other_markers[i]) return false; } const auto& other_events = other.Events(); if (events_.size() != other_events.size()) return false; for (uint64 i = 0; i < events_.size(); i++) { if (events_[i] != other_events[i]) return false; } return true; } <|fim▁hole|> if (a.size() != b.size()) return false; for (const auto& id_details : a) { const auto a_id = id_details.first; const auto& a_details = id_details.second; const auto* b_details = gtl::FindOrNull(b, a_id); if (b_details == nullptr) return false; if (a_details != *b_details) return false; } return true; } PrecisionStats ComputePrecisionStats( const StepEvents& nonoverlapped_step_events) { int64 compute_32bit_ps = 0; int64 compute_16bit_ps = 0; for (const auto& id_details : nonoverlapped_step_events) { for (const auto& event : id_details.second.Events()) { switch (event.type) { case DEVICE_COMPUTE_32: compute_32bit_ps += event.span.duration_ps(); break; case DEVICE_COMPUTE_16: compute_16bit_ps += event.span.duration_ps(); break; default: break; } } } PrecisionStats precision_stats; precision_stats.set_compute_32bit_ps(compute_32bit_ps); precision_stats.set_compute_16bit_ps(compute_16bit_ps); return precision_stats; } } // namespace profiler } // namespace tensorflow<|fim▁end|>
bool operator==(const StepEvents& a, const StepEvents& b) {
<|file_name|>base.py<|end_file_name|><|fim▁begin|># Django settings for breeze project. from unipath import Path DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', '[email protected]'), ) MANAGERS = ADMINS PROJECT_DIR = Path(__file__).ancestor(3) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 'NAME': 'breeze', #PROJECT_DIR.child('sqlite').child('data.sqlite'), # Or path to database file if using sqlite3. # The following settings are not used with sqlite3: 'USER': 'breeze_user', 'PASSWORD': 'time2shine', 'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP. 'PORT': '', # Set to empty string for default. } } # Hosts/domain names that are valid for this site; required if DEBUG is False # See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts ALLOWED_HOSTS = [] # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # In a Windows environment this must be set to your system time zone. TIME_ZONE = 'America/New_York' # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'en-us' SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True # If you set this to False, Django will not format dates, numbers and # calendars according to the current locale. USE_L10N = True # If you set this to False, Django will not use timezone-aware datetimes. USE_TZ = True # Absolute filesystem path to the directory that will hold user-uploaded files. # Example: "/var/www/example.com/media/" MEDIA_ROOT = '' # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash. # Examples: "http://example.com/media/", "http://media.example.com/" MEDIA_URL = '' # Absolute path to the directory static files should be collected to. # Don't put anything in this directory yourself; store your static files # in apps' "static/" subdirectories and in STATICFILES_DIRS. # Example: "/var/www/example.com/static/" STATIC_ROOT = '' # URL prefix for static files. # Example: "http://example.com/static/", "http://static.example.com/" STATIC_URL = '/static/' # Additional locations of static files STATICFILES_DIRS = ( # Put strings here, like "/home/html/static" or "C:/www/django/static". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. ) # List of finder classes that know how to find static files in # various locations. STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', 'compressor.finders.CompressorFinder', # 'django.contrib.staticfiles.finders.DefaultStorageFinder', ) COMPRESS_ENABLED = True COMPRESS_OFFLINE = False COMPRESS_CSS_FILTERS = [ #creates absolute urls from relative ones 'compressor.filters.css_default.CssAbsoluteFilter', #css minimizer 'compressor.filters.cssmin.CSSMinFilter' ] COMPRESS_JS_FILTERS = [ 'compressor.filters.jsmin.JSMinFilter' ] COMPRESS_PRECOMPILERS = ( ('text/coffeescript', 'coffee --compile --stdio'), ('text/x-sass', 'sass {infile} {outfile}'), ('text/x-scss', 'sass --scss {infile} {outfile}'), ) # Make this unique, and don't share it with anybody. SECRET_KEY = '#+i&uj1rny=b8a0^9^(umm##^7v%myiz^@jebwbn6$-yj13tco' # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', # 'django.template.loaders.eggs.Loader', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', # Uncomment the next line for simple clickjacking protection: # 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'breeze.urls' # Python dotted path to the WSGI application used by Django's runserver. WSGI_APPLICATION = 'breeze.wsgi.application' TEMPLATE_DIRS = ( # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. PROJECT_DIR.child("templates") ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites',<|fim▁hole|> # Uncomment the next line to enable the admin: 'django.contrib.admin', # Uncomment the next line to enable admin documentation: # 'django.contrib.admindocs', 'compressor', 'lightside', 'api', 'tasks', 'breeze' ) # A sample logging configuration. The only tangible logging # performed by this configuration is to send an email to # the site admins on every HTTP 500 error when DEBUG=False. # See http://docs.djangoproject.com/en/dev/topics/logging for # more details on how to customize your logging configuration. LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' } }, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' } }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, } }<|fim▁end|>
'django.contrib.messages', 'django.contrib.staticfiles',
<|file_name|>mixins.py<|end_file_name|><|fim▁begin|>from django.test import TestCase from ...models import Office class PublishedMixinTest(TestCase): def test_only_published_manager_and_queryset_default_datetime(self): data = [ dict(published=False, address='office', office='not_published'), dict(office='published', address='some adress'), dict(office='published again', address='some address') ] published_offices = [] not_published_offices = [] for item in data: office = Office.objects.create(**item) if office.is_published(): published_offices.append(office) else: not_published_offices.append(office) self.assertQuerysetEqual( Office.objects.published(), reversed(published_offices), transform=lambda o: o) self.assertQuerysetEqual(<|fim▁hole|> Office.published_objects.all(), reversed(published_offices), transform=lambda o: o)<|fim▁end|>
<|file_name|>filterintra_predictors_test.cc<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2016, Alliance for Open Media. All rights reserved * * This source code is subject to the terms of the BSD 2 Clause License and * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License * was not distributed with this source code in the LICENSE file, you can * obtain it at www.aomedia.org/license/software. If the Alliance for Open * Media Patent License 1.0 was not distributed with this source code in the * PATENTS file, you can obtain it at www.aomedia.org/license/patent. */ #include "third_party/googletest/src/googletest/include/gtest/gtest.h" #include "./av1_rtcd.h" #include "test/acm_random.h" #include "test/clear_system_state.h" #include "test/register_state_check.h" #include "test/util.h" #include "av1/common/enums.h" namespace { using std::tr1::tuple; using libaom_test::ACMRandom; typedef void (*Predictor)(uint8_t *dst, ptrdiff_t stride, int bs, const uint8_t *above, const uint8_t *left); // Note: // Test parameter list: // Reference predictor, optimized predictor, prediction mode, block size // typedef tuple<Predictor, Predictor, int> PredFuncMode; typedef tuple<PredFuncMode, int> PredParams; #if CONFIG_AOM_HIGHBITDEPTH typedef void (*HbdPredictor)(uint16_t *dst, ptrdiff_t stride, int bs, const uint16_t *above, const uint16_t *left, int bd); // Note: // Test parameter list: // Reference predictor, optimized predictor, prediction mode, block size, // bit depth // typedef tuple<HbdPredictor, HbdPredictor, int> HbdPredFuncMode; typedef tuple<HbdPredFuncMode, int, int> HbdPredParams; #endif const int MaxBlkSize = 32; // By default, disable speed test #define PREDICTORS_SPEED_TEST (0) #if PREDICTORS_SPEED_TEST const int MaxTestNum = 100000; #else const int MaxTestNum = 100; #endif class AV1FilterIntraPredOptimzTest : public ::testing::TestWithParam<PredParams> { public:<|fim▁hole|> predFunc_ = std::tr1::get<1>(funcMode); mode_ = std::tr1::get<2>(funcMode); blockSize_ = GET_PARAM(1); alloc_ = new uint8_t[3 * MaxBlkSize + 2]; predRef_ = new uint8_t[MaxBlkSize * MaxBlkSize]; pred_ = new uint8_t[MaxBlkSize * MaxBlkSize]; } virtual void TearDown() { delete[] alloc_; delete[] predRef_; delete[] pred_; libaom_test::ClearSystemState(); } protected: void RunTest() const { int tstIndex = 0; int stride = blockSize_; uint8_t *left = alloc_; uint8_t *above = alloc_ + MaxBlkSize + 1; while (tstIndex < MaxTestNum) { PrepareBuffer(); predFuncRef_(predRef_, stride, blockSize_, &above[1], left); ASM_REGISTER_STATE_CHECK( predFunc_(pred_, stride, blockSize_, &above[1], left)); DiffPred(tstIndex); tstIndex += 1; } } void RunSpeedTestC() const { int tstIndex = 0; int stride = blockSize_; uint8_t *left = alloc_; uint8_t *above = alloc_ + MaxBlkSize + 1; PrepareBuffer(); while (tstIndex < MaxTestNum) { predFuncRef_(predRef_, stride, blockSize_, &above[1], left); tstIndex += 1; } } void RunSpeedTestSSE() const { int tstIndex = 0; int stride = blockSize_; uint8_t *left = alloc_; uint8_t *above = alloc_ + MaxBlkSize + 1; PrepareBuffer(); while (tstIndex < MaxTestNum) { predFunc_(predRef_, stride, blockSize_, &above[1], left); tstIndex += 1; } } private: void PrepareBuffer() const { ACMRandom rnd(ACMRandom::DeterministicSeed()); int i = 0; while (i < (3 * MaxBlkSize + 2)) { alloc_[i] = rnd.Rand8(); i += 1; } } void DiffPred(int testNum) const { int i = 0; while (i < blockSize_ * blockSize_) { EXPECT_EQ(predRef_[i], pred_[i]) << "Error at position: " << i << " " << "Block size: " << blockSize_ << " " << "Test number: " << testNum; i += 1; } } Predictor predFunc_; Predictor predFuncRef_; int mode_; int blockSize_; uint8_t *alloc_; uint8_t *pred_; uint8_t *predRef_; }; #if CONFIG_AOM_HIGHBITDEPTH class AV1HbdFilterIntraPredOptimzTest : public ::testing::TestWithParam<HbdPredParams> { public: virtual ~AV1HbdFilterIntraPredOptimzTest() {} virtual void SetUp() { HbdPredFuncMode funcMode = GET_PARAM(0); predFuncRef_ = std::tr1::get<0>(funcMode); predFunc_ = std::tr1::get<1>(funcMode); mode_ = std::tr1::get<2>(funcMode); blockSize_ = GET_PARAM(1); bd_ = GET_PARAM(2); alloc_ = new uint16_t[3 * MaxBlkSize + 2]; predRef_ = new uint16_t[MaxBlkSize * MaxBlkSize]; pred_ = new uint16_t[MaxBlkSize * MaxBlkSize]; } virtual void TearDown() { delete[] alloc_; delete[] predRef_; delete[] pred_; libaom_test::ClearSystemState(); } protected: void RunTest() const { int tstIndex = 0; int stride = blockSize_; uint16_t *left = alloc_; uint16_t *above = alloc_ + MaxBlkSize + 1; while (tstIndex < MaxTestNum) { PrepareBuffer(); predFuncRef_(predRef_, stride, blockSize_, &above[1], left, bd_); ASM_REGISTER_STATE_CHECK( predFunc_(pred_, stride, blockSize_, &above[1], left, bd_)); DiffPred(tstIndex); tstIndex += 1; } } void RunSpeedTestC() const { int tstIndex = 0; int stride = blockSize_; uint16_t *left = alloc_; uint16_t *above = alloc_ + MaxBlkSize + 1; PrepareBuffer(); while (tstIndex < MaxTestNum) { predFuncRef_(predRef_, stride, blockSize_, &above[1], left, bd_); tstIndex += 1; } } void RunSpeedTestSSE() const { int tstIndex = 0; int stride = blockSize_; uint16_t *left = alloc_; uint16_t *above = alloc_ + MaxBlkSize + 1; PrepareBuffer(); while (tstIndex < MaxTestNum) { predFunc_(predRef_, stride, blockSize_, &above[1], left, bd_); tstIndex += 1; } } private: void PrepareBuffer() const { ACMRandom rnd(ACMRandom::DeterministicSeed()); int i = 0; while (i < (3 * MaxBlkSize + 2)) { alloc_[i] = rnd.Rand16() & ((1 << bd_) - 1); i += 1; } } void DiffPred(int testNum) const { int i = 0; while (i < blockSize_ * blockSize_) { EXPECT_EQ(predRef_[i], pred_[i]) << "Error at position: " << i << " " << "Block size: " << blockSize_ << " " << "Bit depth: " << bd_ << " " << "Test number: " << testNum; i += 1; } } HbdPredictor predFunc_; HbdPredictor predFuncRef_; int mode_; int blockSize_; int bd_; uint16_t *alloc_; uint16_t *pred_; uint16_t *predRef_; }; #endif // CONFIG_AOM_HIGHBITDEPTH TEST_P(AV1FilterIntraPredOptimzTest, BitExactCheck) { RunTest(); } #if PREDICTORS_SPEED_TEST TEST_P(AV1FilterIntraPredOptimzTest, SpeedCheckC) { RunSpeedTestC(); } TEST_P(AV1FilterIntraPredOptimzTest, SpeedCheckSSE) { RunSpeedTestSSE(); } #endif #if CONFIG_AOM_HIGHBITDEPTH TEST_P(AV1HbdFilterIntraPredOptimzTest, BitExactCheck) { RunTest(); } #if PREDICTORS_SPEED_TEST TEST_P(AV1HbdFilterIntraPredOptimzTest, SpeedCheckC) { RunSpeedTestC(); } TEST_P(AV1HbdFilterIntraPredOptimzTest, SpeedCheckSSE) { RunSpeedTestSSE(); } #endif // PREDICTORS_SPEED_TEST #endif // CONFIG_AOM_HIGHBITDEPTH using std::tr1::make_tuple; const PredFuncMode kPredFuncMdArray[] = { make_tuple(av1_dc_filter_predictor_c, av1_dc_filter_predictor_sse4_1, DC_PRED), make_tuple(av1_v_filter_predictor_c, av1_v_filter_predictor_sse4_1, V_PRED), make_tuple(av1_h_filter_predictor_c, av1_h_filter_predictor_sse4_1, H_PRED), make_tuple(av1_d45_filter_predictor_c, av1_d45_filter_predictor_sse4_1, D45_PRED), make_tuple(av1_d135_filter_predictor_c, av1_d135_filter_predictor_sse4_1, D135_PRED), make_tuple(av1_d117_filter_predictor_c, av1_d117_filter_predictor_sse4_1, D117_PRED), make_tuple(av1_d153_filter_predictor_c, av1_d153_filter_predictor_sse4_1, D153_PRED), make_tuple(av1_d207_filter_predictor_c, av1_d207_filter_predictor_sse4_1, D207_PRED), make_tuple(av1_d63_filter_predictor_c, av1_d63_filter_predictor_sse4_1, D63_PRED), make_tuple(av1_tm_filter_predictor_c, av1_tm_filter_predictor_sse4_1, TM_PRED), }; const int kBlkSize[] = { 4, 8, 16, 32 }; INSTANTIATE_TEST_CASE_P( SSE4_1, AV1FilterIntraPredOptimzTest, ::testing::Combine(::testing::ValuesIn(kPredFuncMdArray), ::testing::ValuesIn(kBlkSize))); #if CONFIG_AOM_HIGHBITDEPTH const HbdPredFuncMode kHbdPredFuncMdArray[] = { make_tuple(av1_highbd_dc_filter_predictor_c, av1_highbd_dc_filter_predictor_sse4_1, DC_PRED), make_tuple(av1_highbd_v_filter_predictor_c, av1_highbd_v_filter_predictor_sse4_1, V_PRED), make_tuple(av1_highbd_h_filter_predictor_c, av1_highbd_h_filter_predictor_sse4_1, H_PRED), make_tuple(av1_highbd_d45_filter_predictor_c, av1_highbd_d45_filter_predictor_sse4_1, D45_PRED), make_tuple(av1_highbd_d135_filter_predictor_c, av1_highbd_d135_filter_predictor_sse4_1, D135_PRED), make_tuple(av1_highbd_d117_filter_predictor_c, av1_highbd_d117_filter_predictor_sse4_1, D117_PRED), make_tuple(av1_highbd_d153_filter_predictor_c, av1_highbd_d153_filter_predictor_sse4_1, D153_PRED), make_tuple(av1_highbd_d207_filter_predictor_c, av1_highbd_d207_filter_predictor_sse4_1, D207_PRED), make_tuple(av1_highbd_d63_filter_predictor_c, av1_highbd_d63_filter_predictor_sse4_1, D63_PRED), make_tuple(av1_highbd_tm_filter_predictor_c, av1_highbd_tm_filter_predictor_sse4_1, TM_PRED), }; const int kBd[] = { 10, 12 }; INSTANTIATE_TEST_CASE_P( SSE4_1, AV1HbdFilterIntraPredOptimzTest, ::testing::Combine(::testing::ValuesIn(kHbdPredFuncMdArray), ::testing::ValuesIn(kBlkSize), ::testing::ValuesIn(kBd))); #endif // CONFIG_AOM_HIGHBITDEPTH } // namespace<|fim▁end|>
virtual ~AV1FilterIntraPredOptimzTest() {} virtual void SetUp() { PredFuncMode funcMode = GET_PARAM(0); predFuncRef_ = std::tr1::get<0>(funcMode);
<|file_name|>find_distributions_urlbuilder.go<|end_file_name|><|fim▁begin|>// Code generated by go-swagger; DO NOT EDIT. package distribution // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the generate command import ( "errors" "net/url" golangswaggerpaths "path" "strings" "github.com/go-openapi/swag" ) // FindDistributionsURL generates an URL for the find distributions operation type FindDistributionsURL struct { FlagID int64 SegmentID int64 _basePath string // avoid unkeyed usage _ struct{} } // WithBasePath sets the base path for this url builder, only required when it's different from the // base path specified in the swagger spec. // When the value of the base path is an empty string func (o *FindDistributionsURL) WithBasePath(bp string) *FindDistributionsURL { o.SetBasePath(bp) return o } // SetBasePath sets the base path for this url builder, only required when it's different from the // base path specified in the swagger spec. // When the value of the base path is an empty string func (o *FindDistributionsURL) SetBasePath(bp string) { o._basePath = bp } // Build a url path and query string func (o *FindDistributionsURL) Build() (*url.URL, error) { var _result url.URL var _path = "/flags/{flagID}/segments/{segmentID}/distributions" flagID := swag.FormatInt64(o.FlagID) if flagID != "" { _path = strings.Replace(_path, "{flagID}", flagID, -1) } else { return nil, errors.New("flagId is required on FindDistributionsURL") } segmentID := swag.FormatInt64(o.SegmentID) if segmentID != "" { _path = strings.Replace(_path, "{segmentID}", segmentID, -1) } else { return nil, errors.New("segmentId is required on FindDistributionsURL") } _basePath := o._basePath if _basePath == "" { _basePath = "/api/v1" } _result.Path = golangswaggerpaths.Join(_basePath, _path) return &_result, nil } // Must is a helper function to panic when the url builder returns an error func (o *FindDistributionsURL) Must(u *url.URL, err error) *url.URL { if err != nil { panic(err) } if u == nil { panic("url can't be nil") } return u } // String returns the string representation of the path with query string func (o *FindDistributionsURL) String() string { return o.Must(o.Build()).String() } // BuildFull builds a full url with scheme, host, path and query string func (o *FindDistributionsURL) BuildFull(scheme, host string) (*url.URL, error) { if scheme == "" { return nil, errors.New("scheme is required for a full url on FindDistributionsURL") } if host == "" { return nil, errors.New("host is required for a full url on FindDistributionsURL") } base, err := o.Build() if err != nil {<|fim▁hole|> base.Host = host return base, nil } // StringFull returns the string representation of a complete url func (o *FindDistributionsURL) StringFull(scheme, host string) string { return o.Must(o.BuildFull(scheme, host)).String() }<|fim▁end|>
return nil, err } base.Scheme = scheme
<|file_name|>LetsEncryptConfig.py<|end_file_name|><|fim▁begin|>ROOT = '/.well-known/acme-challenge' ENDPOINT = '/k9s7WeOPg3HdSjwlAqEVRxnezsGGe-CFOwPfOcU3VgU'<|fim▁hole|><|fim▁end|>
RESPONSE = 'k9s7WeOPg3HdSjwlAqEVRxnezsGGe-CFOwPfOcU3VgU.QBkCfzPq0mKXIJSktgl4_b7psKazh3MSZ8juWnZbJbg'
<|file_name|>zepto.touch.js<|end_file_name|><|fim▁begin|>define(function(require, exports, module) { require('vendor/zepto/zepto'); var $ = window.Zepto; // Zepto.js // (c) 2010-2014 Thomas Fuchs // Zepto.js may be freely distributed under the MIT license. var touch = {}, touchTimeout, tapTimeout, swipeTimeout, longTapTimeout, longTapDelay = 750, gesture; function swipeDirection(x1, x2, y1, y2) { return Math.abs(x1 - x2) >= Math.abs(y1 - y2) ? (x1 - x2 > 0 ? 'Left' : 'Right') : (y1 - y2 > 0 ? 'Up' : 'Down') } function longTap() { longTapTimeout = null; if (touch.last) { touch.el.trigger('longTap'); touch = {} } } function cancelLongTap() { if (longTapTimeout) clearTimeout(longTapTimeout); longTapTimeout = null; } function cancelAll() { if (touchTimeout) clearTimeout(touchTimeout); if (tapTimeout) clearTimeout(tapTimeout); if (swipeTimeout) clearTimeout(swipeTimeout); if (longTapTimeout) clearTimeout(longTapTimeout); touchTimeout = tapTimeout = swipeTimeout = longTapTimeout = null; touch = {} } function isPrimaryTouch(event) { return (event.pointerType == 'touch' || event.pointerType == event.MSPOINTER_TYPE_TOUCH) && event.isPrimary; } function isPointerEventType(e, type) { return (e.type == 'pointer' + type || e.type.toLowerCase() == 'mspointer' + type); } $(document).ready(function() { var now, delta, deltaX = 0, deltaY = 0, firstTouch, _isPointerType; if ('MSGesture' in window) { gesture = new MSGesture(); gesture.target = document.body; } $(document) .on('MSGestureEnd', function(e) { var swipeDirectionFromVelocity = e.velocityX > 1 ? 'Right' : e.velocityX < -1 ? 'Left' : e.velocityY > 1 ? 'Down' : e.velocityY < -1 ? 'Up' : null; if (swipeDirectionFromVelocity) { touch.el.trigger('swipe'); touch.el.trigger('swipe' + swipeDirectionFromVelocity); } }) .on('touchstart MSPointerDown pointerdown', function(e) { if ((_isPointerType = isPointerEventType(e, 'down')) && !isPrimaryTouch(e)) return; firstTouch = _isPointerType ? e : e.touches[0]; if (e.touches && e.touches.length === 1 && touch.x2) { // Clear out touch movement data if we have it sticking around<|fim▁hole|> touch.x2 = undefined; touch.y2 = undefined; } now = Date.now(); delta = now - (touch.last || now); touch.el = $('tagName' in firstTouch.target ? firstTouch.target : firstTouch.target.parentNode); touchTimeout && clearTimeout(touchTimeout); touch.x1 = firstTouch.pageX; touch.y1 = firstTouch.pageY; if (delta > 0 && delta <= 250) touch.isDoubleTap = true; touch.last = now; longTapTimeout = setTimeout(longTap, longTapDelay); // adds the current touch contact for IE gesture recognition if (gesture && _isPointerType) gesture.addPointer(e.pointerId); }) .on('touchmove MSPointerMove pointermove', function(e) { if ((_isPointerType = isPointerEventType(e, 'move')) && !isPrimaryTouch(e)) return firstTouch = _isPointerType ? e : e.touches[0]; cancelLongTap(); touch.x2 = firstTouch.pageX; touch.y2 = firstTouch.pageY; deltaX += Math.abs(touch.x1 - touch.x2); deltaY += Math.abs(touch.y1 - touch.y2); }) .on('touchend MSPointerUp pointerup', function(e) { if ((_isPointerType = isPointerEventType(e, 'up')) && !isPrimaryTouch(e)) return; cancelLongTap(); // swipe if ((touch.x2 && Math.abs(touch.x1 - touch.x2) > 30) || (touch.y2 && Math.abs(touch.y1 - touch.y2) > 30)) swipeTimeout = setTimeout(function() { touch.el.trigger('swipe'); touch.el.trigger('swipe' + (swipeDirection(touch.x1, touch.x2, touch.y1, touch.y2))) touch = {} }, 0); // normal tap else if ('last' in touch) // don't fire tap when delta position changed by more than 30 pixels, // for instance when moving to a point and back to origin if (deltaX < 30 && deltaY < 30) { // delay by one tick so we can cancel the 'tap' event if 'scroll' fires // ('tap' fires before 'scroll') tapTimeout = setTimeout(function() { // trigger universal 'tap' with the option to cancelTouch() // (cancelTouch cancels processing of single vs double taps for faster 'tap' response) var event = $.Event('tap'); event.cancelTouch = cancelAll; touch.el.trigger(event); // trigger double tap immediately if (touch.isDoubleTap) { if (touch.el) touch.el.trigger('doubleTap'); touch = {} } // trigger single tap after 250ms of inactivity else { touchTimeout = setTimeout(function() { touchTimeout = null; if (touch.el) touch.el.trigger('singleTap'); touch = {} }, 250) } }, 0) } else { touch = {} } deltaX = deltaY = 0; }) // when the browser window loses focus, // for example when a modal dialog is shown, // cancel all ongoing events .on('touchcancel MSPointerCancel pointercancel', cancelAll); // scrolling the window indicates intention of the user // to scroll, not tap or swipe, so cancel all ongoing events $(window).on('scroll', cancelAll); }); ['swipe', 'swipeLeft', 'swipeRight', 'swipeUp', 'swipeDown', 'doubleTap', 'tap', 'singleTap', 'longTap'].forEach(function(eventName) { $.fn[eventName] = function(callback) { return this.on(eventName, callback) } }); });<|fim▁end|>
// This can occur if touchcancel doesn't fire due to preventDefault, etc.
<|file_name|>test_sensor.py<|end_file_name|><|fim▁begin|>"""Test sensor of Brother integration.""" from datetime import datetime, timedelta import json from homeassistant.components.brother.const import UNIT_PAGES from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, DEVICE_CLASS_TIMESTAMP, PERCENTAGE, STATE_UNAVAILABLE, ) from homeassistant.setup import async_setup_component from homeassistant.util.dt import UTC, utcnow from tests.async_mock import patch from tests.common import async_fire_time_changed, load_fixture from tests.components.brother import init_integration ATTR_REMAINING_PAGES = "remaining_pages" ATTR_COUNTER = "counter" async def test_sensors(hass): """Test states of the sensors.""" test_time = datetime(2019, 11, 11, 9, 10, 32, tzinfo=UTC) with patch( "homeassistant.components.brother.sensor.utcnow", return_value=test_time ): await init_integration(hass) registry = await hass.helpers.entity_registry.async_get_registry() state = hass.states.get("sensor.hl_l2340dw_status") assert state assert state.attributes.get(ATTR_ICON) == "mdi:printer" assert state.state == "waiting" entry = registry.async_get("sensor.hl_l2340dw_status") assert entry assert entry.unique_id == "0123456789_status" state = hass.states.get("sensor.hl_l2340dw_black_toner_remaining") assert state assert state.attributes.get(ATTR_ICON) == "mdi:printer-3d-nozzle" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE assert state.state == "75" entry = registry.async_get("sensor.hl_l2340dw_black_toner_remaining") assert entry assert entry.unique_id == "0123456789_black_toner_remaining" state = hass.states.get("sensor.hl_l2340dw_cyan_toner_remaining") assert state assert state.attributes.get(ATTR_ICON) == "mdi:printer-3d-nozzle" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE assert state.state == "10" entry = registry.async_get("sensor.hl_l2340dw_cyan_toner_remaining") assert entry assert entry.unique_id == "0123456789_cyan_toner_remaining" state = hass.states.get("sensor.hl_l2340dw_magenta_toner_remaining") assert state assert state.attributes.get(ATTR_ICON) == "mdi:printer-3d-nozzle" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE assert state.state == "8" entry = registry.async_get("sensor.hl_l2340dw_magenta_toner_remaining") assert entry assert entry.unique_id == "0123456789_magenta_toner_remaining" state = hass.states.get("sensor.hl_l2340dw_yellow_toner_remaining") assert state assert state.attributes.get(ATTR_ICON) == "mdi:printer-3d-nozzle" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE assert state.state == "2" entry = registry.async_get("sensor.hl_l2340dw_yellow_toner_remaining") assert entry assert entry.unique_id == "0123456789_yellow_toner_remaining" state = hass.states.get("sensor.hl_l2340dw_drum_remaining_life") assert state assert state.attributes.get(ATTR_ICON) == "mdi:chart-donut" assert state.attributes.get(ATTR_REMAINING_PAGES) == 11014 assert state.attributes.get(ATTR_COUNTER) == 986 assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE assert state.state == "92" entry = registry.async_get("sensor.hl_l2340dw_drum_remaining_life") assert entry assert entry.unique_id == "0123456789_drum_remaining_life" state = hass.states.get("sensor.hl_l2340dw_black_drum_remaining_life") assert state assert state.attributes.get(ATTR_ICON) == "mdi:chart-donut" assert state.attributes.get(ATTR_REMAINING_PAGES) == 16389 assert state.attributes.get(ATTR_COUNTER) == 1611 assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE assert state.state == "92" entry = registry.async_get("sensor.hl_l2340dw_black_drum_remaining_life") assert entry assert entry.unique_id == "0123456789_black_drum_remaining_life" state = hass.states.get("sensor.hl_l2340dw_cyan_drum_remaining_life") assert state assert state.attributes.get(ATTR_ICON) == "mdi:chart-donut" assert state.attributes.get(ATTR_REMAINING_PAGES) == 16389 assert state.attributes.get(ATTR_COUNTER) == 1611<|fim▁hole|> assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE assert state.state == "92" entry = registry.async_get("sensor.hl_l2340dw_cyan_drum_remaining_life") assert entry assert entry.unique_id == "0123456789_cyan_drum_remaining_life" state = hass.states.get("sensor.hl_l2340dw_magenta_drum_remaining_life") assert state assert state.attributes.get(ATTR_ICON) == "mdi:chart-donut" assert state.attributes.get(ATTR_REMAINING_PAGES) == 16389 assert state.attributes.get(ATTR_COUNTER) == 1611 assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE assert state.state == "92" entry = registry.async_get("sensor.hl_l2340dw_magenta_drum_remaining_life") assert entry assert entry.unique_id == "0123456789_magenta_drum_remaining_life" state = hass.states.get("sensor.hl_l2340dw_yellow_drum_remaining_life") assert state assert state.attributes.get(ATTR_ICON) == "mdi:chart-donut" assert state.attributes.get(ATTR_REMAINING_PAGES) == 16389 assert state.attributes.get(ATTR_COUNTER) == 1611 assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE assert state.state == "92" entry = registry.async_get("sensor.hl_l2340dw_yellow_drum_remaining_life") assert entry assert entry.unique_id == "0123456789_yellow_drum_remaining_life" state = hass.states.get("sensor.hl_l2340dw_fuser_remaining_life") assert state assert state.attributes.get(ATTR_ICON) == "mdi:water-outline" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE assert state.state == "97" entry = registry.async_get("sensor.hl_l2340dw_fuser_remaining_life") assert entry assert entry.unique_id == "0123456789_fuser_remaining_life" state = hass.states.get("sensor.hl_l2340dw_belt_unit_remaining_life") assert state assert state.attributes.get(ATTR_ICON) == "mdi:current-ac" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE assert state.state == "97" entry = registry.async_get("sensor.hl_l2340dw_belt_unit_remaining_life") assert entry assert entry.unique_id == "0123456789_belt_unit_remaining_life" state = hass.states.get("sensor.hl_l2340dw_pf_kit_1_remaining_life") assert state assert state.attributes.get(ATTR_ICON) == "mdi:printer-3d" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE assert state.state == "98" entry = registry.async_get("sensor.hl_l2340dw_pf_kit_1_remaining_life") assert entry assert entry.unique_id == "0123456789_pf_kit_1_remaining_life" state = hass.states.get("sensor.hl_l2340dw_page_counter") assert state assert state.attributes.get(ATTR_ICON) == "mdi:file-document-outline" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES assert state.state == "986" entry = registry.async_get("sensor.hl_l2340dw_page_counter") assert entry assert entry.unique_id == "0123456789_page_counter" state = hass.states.get("sensor.hl_l2340dw_duplex_unit_pages_counter") assert state assert state.attributes.get(ATTR_ICON) == "mdi:file-document-outline" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES assert state.state == "538" entry = registry.async_get("sensor.hl_l2340dw_duplex_unit_pages_counter") assert entry assert entry.unique_id == "0123456789_duplex_unit_pages_counter" state = hass.states.get("sensor.hl_l2340dw_b_w_counter") assert state assert state.attributes.get(ATTR_ICON) == "mdi:file-document-outline" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES assert state.state == "709" entry = registry.async_get("sensor.hl_l2340dw_b_w_counter") assert entry assert entry.unique_id == "0123456789_b/w_counter" state = hass.states.get("sensor.hl_l2340dw_color_counter") assert state assert state.attributes.get(ATTR_ICON) == "mdi:file-document-outline" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES assert state.state == "902" entry = registry.async_get("sensor.hl_l2340dw_color_counter") assert entry assert entry.unique_id == "0123456789_color_counter" state = hass.states.get("sensor.hl_l2340dw_uptime") assert state assert state.attributes.get(ATTR_ICON) is None assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TIMESTAMP assert state.state == "2019-09-24T12:14:56+00:00" entry = registry.async_get("sensor.hl_l2340dw_uptime") assert entry assert entry.unique_id == "0123456789_uptime" async def test_availability(hass): """Ensure that we mark the entities unavailable correctly when device is offline.""" await init_integration(hass) state = hass.states.get("sensor.hl_l2340dw_status") assert state assert state.state != STATE_UNAVAILABLE assert state.state == "waiting" future = utcnow() + timedelta(minutes=5) with patch("brother.Brother._get_data", side_effect=ConnectionError()): async_fire_time_changed(hass, future) await hass.async_block_till_done() state = hass.states.get("sensor.hl_l2340dw_status") assert state assert state.state == STATE_UNAVAILABLE future = utcnow() + timedelta(minutes=10) with patch( "brother.Brother._get_data", return_value=json.loads(load_fixture("brother_printer_data.json")), ): async_fire_time_changed(hass, future) await hass.async_block_till_done() state = hass.states.get("sensor.hl_l2340dw_status") assert state assert state.state != STATE_UNAVAILABLE assert state.state == "waiting" async def test_manual_update_entity(hass): """Test manual update entity via service homeasasistant/update_entity.""" await init_integration(hass) await async_setup_component(hass, "homeassistant", {}) with patch("homeassistant.components.brother.Brother.async_update") as mock_update: await hass.services.async_call( "homeassistant", "update_entity", {ATTR_ENTITY_ID: ["sensor.hl_l2340dw_status"]}, blocking=True, ) assert len(mock_update.mock_calls) == 1<|fim▁end|>
<|file_name|>blocks.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals from django.utils.functional import cached_property from django.contrib.contenttypes.models import ContentType from wagtail.wagtailcore.blocks import ChooserBlock class SnippetChooserBlock(ChooserBlock): def __init__(self, target_model, **kwargs): super(SnippetChooserBlock, self).__init__(**kwargs) self.target_model = target_model<|fim▁hole|> def widget(self): from wagtail.wagtailsnippets.widgets import AdminSnippetChooser content_type = ContentType.objects.get_for_model(self.target_model) return AdminSnippetChooser(content_type)<|fim▁end|>
@cached_property
<|file_name|>freeinfo1.cc<|end_file_name|><|fim▁begin|>#include <iostream> #include "common.h" using namespace storage; using namespace std; StorageInterface* s = NULL; void test(const string& device) { ResizeInfo resize_info; ContentInfo content_info; if (s->getFreeInfo(device, true, resize_info, true, content_info, true)) { cout << device << " true" << endl; cout << " " << resize_info.resize_freeK << " " << resize_info.df_freeK << " "<|fim▁hole|> << resize_info.usedK << " " << resize_info.resize_ok << endl; cout << " " << content_info.windows << " " << content_info.efi << " " << content_info.homes << endl; } else { cout << device << " false" << endl; } } int main() { cout.setf(std::ios::boolalpha); setup_logger(); setup_system("thalassa"); s = createStorageInterface(TestEnvironment()); test("/dev/system/arvin"); test("/dev/system/root"); delete s; }<|fim▁end|>
<|file_name|>BucketsPage.ts<|end_file_name|><|fim▁begin|>import { SelectionModel } from '@angular/cdk/collections'; import { AfterViewInit, ChangeDetectionStrategy, Component, ViewChild } from '@angular/core'; import { FormControl, FormGroup } from '@angular/forms'; import { MatDialog } from '@angular/material/dialog'; import { MatSort } from '@angular/material/sort'; import { MatTableDataSource } from '@angular/material/table'; import { Title } from '@angular/platform-browser'; import { ActivatedRoute, Router } from '@angular/router'; import { BehaviorSubject } from 'rxjs'; import { Bucket, StorageClient } from '../../client'; import { YamcsService } from '../../core/services/YamcsService'; import { Option } from '../../shared/forms/Select'; import { CreateBucketDialog } from './CreateBucketDialog'; @Component({ templateUrl: './BucketsPage.html', changeDetection: ChangeDetectionStrategy.OnPush, }) export class BucketsPage implements AfterViewInit { @ViewChild(MatSort, { static: true }) sort: MatSort; instance = '_global'; displayedColumns = [ 'select', 'name', 'size', 'numObjects', 'actions', ]; filterForm = new FormGroup({ instance: new FormControl('_global'), }); instanceOptions$ = new BehaviorSubject<Option[]>([ { id: '_global', label: '_global' }, ]); dataSource = new MatTableDataSource<Bucket>(); selection = new SelectionModel<Bucket>(true, []); private storageClient: StorageClient; constructor( private yamcs: YamcsService, private dialog: MatDialog, private router: Router, private route: ActivatedRoute, title: Title, ) { title.setTitle('Buckets'); this.storageClient = this.yamcs.createStorageClient(); yamcs.yamcsClient.getInstances({ filter: 'state=RUNNING', }).then(instances => { for (const instance of instances) { this.instanceOptions$.next([ ...this.instanceOptions$.value, { id: instance.name, label: instance.name, } ]); } }); this.initializeOptions(); this.refreshDataSources(); this.filterForm.get('instance')!.valueChanges.forEach(instance => { this.instance = instance; this.refreshDataSources(); }); } private initializeOptions() { const queryParams = this.route.snapshot.queryParamMap; if (queryParams.has('instance')) { this.instance = queryParams.get('instance')!; this.filterForm.get('instance')!.setValue(this.instance); } } ngAfterViewInit() { this.dataSource.sort = this.sort; } isAllSelected() { const numSelected = this.selection.selected.length; const numRows = this.dataSource.data.length; return numSelected === numRows; } masterToggle() { this.isAllSelected() ? this.selection.clear() : this.dataSource.data.forEach(row => this.selection.select(row)); } toggleOne(row: Bucket) { if (!this.selection.isSelected(row) || this.selection.selected.length > 1) { this.selection.clear(); } this.selection.toggle(row); } createBucket() { const dialogRef = this.dialog.open(CreateBucketDialog, { width: '400px', data: { bucketInstance: this.instance, }, }); dialogRef.afterClosed().subscribe(result => { if (result) { this.refreshDataSources(); } }); }<|fim▁hole|> if (confirm('Are you sure you want to delete the selected buckets?')) { const deletePromises = []; for (const bucket of this.selection.selected) { const promise = this.storageClient.deleteBucket(this.instance, bucket.name); deletePromises.push(promise); } Promise.all(deletePromises).then(() => { this.selection.clear(); this.refreshDataSources(); }); } } private refreshDataSources() { this.updateURL(); this.storageClient.getBuckets(this.instance).then(buckets => { this.dataSource.data = buckets; }); } private updateURL() { this.router.navigate([], { replaceUrl: true, relativeTo: this.route, queryParams: { instance: this.instance || null, }, queryParamsHandling: 'merge', }); } }<|fim▁end|>
deleteSelectedBuckets() {
<|file_name|>domtokenlist.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::attr::Attr; use dom::bindings::codegen::Bindings::DOMTokenListBinding; use dom::bindings::codegen::Bindings::DOMTokenListBinding::DOMTokenListMethods; use dom::bindings::error::Error::{InvalidCharacter, Syntax}; use dom::bindings::error::{ErrorResult, Fallible}; use dom::bindings::global::GlobalRef; use dom::bindings::js::{JS, Root}; use dom::bindings::utils::{Reflector, reflect_dom_object}; use dom::element::Element; use dom::node::window_from_node; use std::borrow::ToOwned; use string_cache::Atom; use util::str::{DOMString, HTML_SPACE_CHARACTERS, str_join}; #[dom_struct] pub struct DOMTokenList { reflector_: Reflector, element: JS<Element>, local_name: Atom, } impl DOMTokenList { pub fn new_inherited(element: &Element, local_name: Atom) -> DOMTokenList { DOMTokenList { reflector_: Reflector::new(), element: JS::from_ref(element), local_name: local_name, } } pub fn new(element: &Element, local_name: &Atom) -> Root<DOMTokenList> { let window = window_from_node(element); reflect_dom_object(box DOMTokenList::new_inherited(element, local_name.clone()), GlobalRef::Window(window.r()), DOMTokenListBinding::Wrap) }<|fim▁hole|> fn attribute(&self) -> Option<Root<Attr>> { let element = self.element.root(); element.r().get_attribute(&ns!(""), &self.local_name) } fn check_token_exceptions(&self, token: &str) -> Fallible<Atom> { match token { "" => Err(Syntax), slice if slice.find(HTML_SPACE_CHARACTERS).is_some() => Err(InvalidCharacter), slice => Ok(Atom::from_slice(slice)) } } } // https://dom.spec.whatwg.org/#domtokenlist impl DOMTokenListMethods for DOMTokenList { // https://dom.spec.whatwg.org/#dom-domtokenlist-length fn Length(&self) -> u32 { self.attribute().map(|attr| { let attr = attr.r(); attr.value().as_tokens().len() }).unwrap_or(0) as u32 } // https://dom.spec.whatwg.org/#dom-domtokenlist-item fn Item(&self, index: u32) -> Option<DOMString> { self.attribute().and_then(|attr| { let attr = attr.r(); Some(attr.value().as_tokens()).and_then(|tokens| { tokens.get(index as usize).map(|token| (**token).to_owned()) }) }) } // https://dom.spec.whatwg.org/#dom-domtokenlist-contains fn Contains(&self, token: DOMString) -> Fallible<bool> { self.check_token_exceptions(&token).map(|token| { self.attribute().map(|attr| { let attr = attr.r(); attr.value() .as_tokens() .iter() .any(|atom: &Atom| *atom == token) }).unwrap_or(false) }) } // https://dom.spec.whatwg.org/#dom-domtokenlist-add fn Add(&self, tokens: Vec<DOMString>) -> ErrorResult { let element = self.element.root(); let mut atoms = element.r().get_tokenlist_attribute(&self.local_name); for token in &tokens { let token = try!(self.check_token_exceptions(&token)); if !atoms.iter().any(|atom| *atom == token) { atoms.push(token); } } element.r().set_atomic_tokenlist_attribute(&self.local_name, atoms); Ok(()) } // https://dom.spec.whatwg.org/#dom-domtokenlist-remove fn Remove(&self, tokens: Vec<DOMString>) -> ErrorResult { let element = self.element.root(); let mut atoms = element.r().get_tokenlist_attribute(&self.local_name); for token in &tokens { let token = try!(self.check_token_exceptions(&token)); atoms.iter().position(|atom| *atom == token).map(|index| { atoms.remove(index) }); } element.r().set_atomic_tokenlist_attribute(&self.local_name, atoms); Ok(()) } // https://dom.spec.whatwg.org/#dom-domtokenlist-toggle fn Toggle(&self, token: DOMString, force: Option<bool>) -> Fallible<bool> { let element = self.element.root(); let mut atoms = element.r().get_tokenlist_attribute(&self.local_name); let token = try!(self.check_token_exceptions(&token)); match atoms.iter().position(|atom| *atom == token) { Some(index) => match force { Some(true) => Ok(true), _ => { atoms.remove(index); element.r().set_atomic_tokenlist_attribute(&self.local_name, atoms); Ok(false) } }, None => match force { Some(false) => Ok(false), _ => { atoms.push(token); element.r().set_atomic_tokenlist_attribute(&self.local_name, atoms); Ok(true) } } } } // https://dom.spec.whatwg.org/#stringification-behavior fn Stringifier(&self) -> DOMString { let tokenlist = self.element.root().r().get_tokenlist_attribute(&self.local_name); str_join(&tokenlist, "\x20") } // check-tidy: no specs after this line fn IndexedGetter(&self, index: u32, found: &mut bool) -> Option<DOMString> { let item = self.Item(index); *found = item.is_some(); item } }<|fim▁end|>
<|file_name|>Binary_Tree_Upside_Down.java<|end_file_name|><|fim▁begin|>/* Binary Tree Upside Down Given a binary tree where all the right nodes are either leaf nodes with a sibling (a left node that shares the same parent node) or empty, flip it upside down and turn it into a tree where the original right nodes turned into left leaf nodes. Return the new root. For example: Given a binary tree {1,2,3,4,5}, 1 / \ 2 3 / \ 4 5 return the root of the binary tree [4,5,2,#,#,3,1]. 4 / \ 5 2 / \ 3 1 confused what "{1,#,2,3}" means? > read more on how binary tree is serialized on OJ. Hide Tags Tree Hide Similar Problems (E) Reverse Linked List */<|fim▁hole|> * public class TreeNode { * int val; * TreeNode left; * TreeNode right; * TreeNode(int x) { val = x; } * } */ public class Solution { public TreeNode upsideDownBinaryTree(TreeNode root) { TreeNode p = root, parent = null, parentRight = null; while (p!=null) { TreeNode left = p.left; p.left = parentRight; parentRight = p.right; p.right = parent; parent = p; p = left; } return parent; } } /** * Definition for a binary tree node. * public class TreeNode { * int val; * TreeNode left; * TreeNode right; * TreeNode(int x) { val = x; } * } */ public class Solution { public TreeNode upsideDownBinaryTree(TreeNode root) { if(root==null)return root; if(root.left==null && root.right==null)return root; TreeNode newRoot = upsideDownBinaryTree(root.left); root.left.left=root.right; root.left.right=root; root.left=null; root.right=null; return newRoot; } } /* 1 /\ 2 3 /\ 4 5 1 / 2 -3 / 4 - 5 */<|fim▁end|>
/** * Definition for a binary tree node.
<|file_name|>aql-helper.js<|end_file_name|><|fim▁begin|>/* jshint strict: false */ /* global assertTrue, assertFalse, assertEqual, fail, AQL_EXECUTE, AQL_PARSE, AQL_EXPLAIN, AQL_EXECUTEJSON */ // ////////////////////////////////////////////////////////////////////////////// // / @brief aql test helper functions // / // / @file // / // / DISCLAIMER // / // / Copyright 2011-2012 triagens GmbH, Cologne, Germany // / // / Licensed under the Apache License, Version 2.0 (the "License") // / you may not use this file except in compliance with the License. // / You may obtain a copy of the License at // / // / http://www.apache.org/licenses/LICENSE-2.0 // / // / Unless required by applicable law or agreed to in writing, software // / distributed under the License is distributed on an "AS IS" BASIS, // / WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // / See the License for the specific language governing permissions and // / limitations under the License. // / // / Copyright holder is triAGENS GmbH, Cologne, Germany // / // / @author Jan Steemann // / @author Copyright 2013, triAGENS GmbH, Cologne, Germany // ////////////////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////////////////// // / @brief normalize a single row result // ////////////////////////////////////////////////////////////////////////////// let isEqual = require("@arangodb/test-helper").isEqual; exports.isEqual = isEqual; function normalizeRow (row, recursive) { if (row !== null && typeof row === 'object' && !Array.isArray(row)) { var keys = Object.keys(row); keys.sort(); var i, n = keys.length, out = { }; for (i = 0; i < n; ++i) { var key = keys[i]; if (key[0] !== '_') { out[key] = row[key]; } } return out; } if (recursive && Array.isArray(row)) { row = row.map(normalizeRow); } return row; } // ////////////////////////////////////////////////////////////////////////////// // / @brief return the parse results for a query // ////////////////////////////////////////////////////////////////////////////// function getParseResults (query) { return AQL_PARSE(query); } // ////////////////////////////////////////////////////////////////////////////// // / @brief assert a specific error code when parsing a query // ////////////////////////////////////////////////////////////////////////////// function assertParseError (errorCode, query) { try { getParseResults(query); fail(); } catch (e) { assertTrue(e.errorNum !== undefined, 'unexpected error format'); assertEqual(errorCode, e.errorNum, 'unexpected error code (' + e.errorMessage + '): '); } } // ////////////////////////////////////////////////////////////////////////////// // / @brief return the results of a query explanation // ////////////////////////////////////////////////////////////////////////////// function getQueryExplanation (query, bindVars) { return AQL_EXPLAIN(query, bindVars); } // ////////////////////////////////////////////////////////////////////////////// // / @brief return the results of a modify-query // ////////////////////////////////////////////////////////////////////////////// function getModifyQueryResults (query, bindVars, options = {}) { var queryResult = AQL_EXECUTE(query, bindVars, options); return queryResult.stats; } // ////////////////////////////////////////////////////////////////////////////// // / @brief return the results of a modify-query // ////////////////////////////////////////////////////////////////////////////// function getModifyQueryResultsRaw (query, bindVars, options = {}) { var queryResult = AQL_EXECUTE(query, bindVars, options); return queryResult; } // ////////////////////////////////////////////////////////////////////////////// // / @brief return the results of a query, version // ////////////////////////////////////////////////////////////////////////////// function getRawQueryResults (query, bindVars, options = {}) { var finalOptions = Object.assign({ count: true, batchSize: 3000 }, options); var queryResult = AQL_EXECUTE(query, bindVars, finalOptions); return queryResult.json; } // ////////////////////////////////////////////////////////////////////////////// // / @brief return the results of a query in a normalized way // ////////////////////////////////////////////////////////////////////////////// function getQueryResults (query, bindVars, recursive, options = {}) { var result = getRawQueryResults(query, bindVars, options); if (Array.isArray(result)) { result = result.map(function (row) { return normalizeRow(row, recursive); }); } return result; } // ////////////////////////////////////////////////////////////////////////////// // / @brief assert a specific error code when running a query // ////////////////////////////////////////////////////////////////////////////// function assertQueryError (errorCode, query, bindVars, options = {}) { try { getQueryResults(query, bindVars, options); fail(); } catch (e) { assertFalse(e === "fail", "no exception thrown by query"); assertTrue(e.errorNum !== undefined, 'unexpected error format while calling [' + query + ']'); assertEqual(errorCode, e.errorNum, 'unexpected error code (' + e.errorMessage + " while executing: '" + query + "' expecting: " + errorCode + '): '); } } // ////////////////////////////////////////////////////////////////////////////// // / @brief assert a specific warning running a query // ////////////////////////////////////////////////////////////////////////////// function assertQueryWarningAndNull (errorCode, query, bindVars) { var result = AQL_EXECUTE(query, bindVars), i, found = { }; for (i = 0; i < result.warnings.length; ++i) { found[result.warnings[i].code] = true; } assertTrue(found[errorCode]); assertEqual([ null ], result.json); } // ////////////////////////////////////////////////////////////////////////////// // / @brief get a linearized version of an execution plan // ////////////////////////////////////////////////////////////////////////////// function getLinearizedPlan (explainResult) { var nodes = explainResult.plan.nodes, i; var lookup = { }, deps = { }; for (i = 0; i < nodes.length; ++i) { var node = nodes[i]; lookup[node.id] = node; var dependency = -1; if (node.dependencies.length > 0) { dependency = node.dependencies[0]; } deps[dependency] = node.id; } var current = -1; var out = []; while (true) { if (!deps.hasOwnProperty(current)) { break; } var n = lookup[deps[current]]; current = n.id; out.push(n); } return out; } function getCompactPlan (explainResult) { var out = []; function buildExpression (node) { var out = node.type; if (node.hasOwnProperty('name')) { out += '[' + node.name + ']'; } if (node.hasOwnProperty('value')) { out += '[' + node.value + ']'; } if (Array.isArray(node.subNodes)) { out += '('; node.subNodes.forEach(function (node, i) { if (i > 0) { out += ', '; } out += buildExpression(node); }); out += ')'; } return out; } getLinearizedPlan(explainResult).forEach(function (node) { var data = { type: node.type }; if (node.expression) { data.expression = buildExpression(node.expression); } if (node.outVariable) { data.outVariable = node.outVariable.name; } out.push(data); }); return out; } function findExecutionNodes (plan, nodeType) { let what = plan; if (plan.hasOwnProperty('plan')) { what = plan.plan; } return what.nodes.filter((node) => nodeType === undefined || node.type === nodeType); } function findReferencedNodes (plan, testNode) { var matches = []; if (testNode.elements) { testNode.elements.forEach(function (element) { plan.plan.nodes.forEach(function (node) { if (node.hasOwnProperty('outVariable') && node.outVariable.id === element.inVariable.id) { matches.push(node); } }); }); } else { plan.plan.nodes.forEach(function (node) { if (node.outVariable.id === testNode.inVariable.id) { matches.push(node); } }); } return matches; } function getQueryMultiplePlansAndExecutions (query, bindVars, testObject, debug) { var printYaml = function (plan) { require('internal').print(require('js-yaml').safeDump(plan)); }; var i; var plans = []; var allPlans = []; var results = [];<|fim▁hole|> var resetTest = false; var paramNone = { optimizer: { rules: [ '-all' ]}, verbosePlans: true}; var paramAllPlans = { allPlans: true, verbosePlans: true}; if (testObject !== undefined) { resetTest = true; } if (debug === undefined) { debug = false; } // first fetch the unmodified version if (debug) { require('internal').print('Analyzing Query unoptimized: ' + query); } plans[0] = AQL_EXPLAIN(query, bindVars, paramNone); // then all of the ones permuted by by the optimizer. if (debug) { require('internal').print('Unoptimized Plan (0):'); printYaml(plans[0]); } allPlans = AQL_EXPLAIN(query, bindVars, paramAllPlans); for (i = 0; i < allPlans.plans.length; i++) { if (debug) { require('internal').print('Optimized Plan [' + (i + 1) + ']:'); printYaml(allPlans.plans[i]); } plans[i + 1] = { plan: allPlans.plans[i]}; } // Now execute each of these variations. for (i = 0; i < plans.length; i++) { if (debug) { require('internal').print('Executing Plan No: ' + i + '\n'); } if (resetTest) { if (debug) { require('internal').print('\nFLUSHING\n'); } testObject.tearDown(); testObject.setUp(); if (debug) { require('internal').print('\n' + i + ' FLUSH DONE\n'); } } results[i] = AQL_EXECUTEJSON(plans[i].plan, paramNone); // ignore these statistics for comparisons delete results[i].stats.scannedFull; delete results[i].stats.scannedIndex; delete results[i].stats.cursorsCreated; delete results[i].stats.cursorsRearmed; delete results[i].stats.filtered; delete results[i].stats.executionTime; delete results[i].stats.httpRequests; delete results[i].stats.peakMemoryUsage; delete results[i].stats.fullCount; if (debug) { require('internal').print('\n' + i + ' DONE\n'); } } if (debug) { require('internal').print('done\n'); } return {'plans': plans, 'results': results}; } function removeAlwaysOnClusterRules (rules) { return rules.filter(function (rule) { return ([ 'distribute-filtercalc-to-cluster', 'scatter-in-cluster', 'distribute-in-cluster', 'remove-unnecessary-remote-scatter', 'parallelize-gather' ].indexOf(rule) === -1); }); } function removeClusterNodes (nodeTypes) { return nodeTypes.filter(function (nodeType) { return ([ 'ScatterNode', 'GatherNode', 'DistributeNode', 'RemoteNode' ].indexOf(nodeType) === -1); }); } function removeClusterNodesFromPlan (nodes) { return nodes.filter(function (node) { return ([ 'ScatterNode', 'GatherNode', 'DistributeNode', 'RemoteNode' ].indexOf(node.type) === -1); }); } /// @brief recursively removes keys named "estimatedCost" or "selectivityEstimate" of a given object /// used in tests where we do not want to test those values because of floating-point values used in "AsserEqual" /// This method should only be used where we explicitly don't want to test those values. function removeCost (obj) { if (Array.isArray(obj)) { return obj.map(removeCost); } else if (typeof obj === 'object') { let result = {}; for (var key in obj) { if (obj.hasOwnProperty(key) && key !== "estimatedCost" && key !== "selectivityEstimate") { result[key] = removeCost(obj[key]); } } return result; } else { return obj; } } exports.getParseResults = getParseResults; exports.assertParseError = assertParseError; exports.getQueryExplanation = getQueryExplanation; exports.getModifyQueryResults = getModifyQueryResults; exports.getModifyQueryResultsRaw = getModifyQueryResultsRaw; exports.getRawQueryResults = getRawQueryResults; exports.getQueryResults = getQueryResults; exports.assertQueryError = assertQueryError; exports.assertQueryWarningAndNull = assertQueryWarningAndNull; exports.getLinearizedPlan = getLinearizedPlan; exports.getCompactPlan = getCompactPlan; exports.findExecutionNodes = findExecutionNodes; exports.findReferencedNodes = findReferencedNodes; exports.getQueryMultiplePlansAndExecutions = getQueryMultiplePlansAndExecutions; exports.removeAlwaysOnClusterRules = removeAlwaysOnClusterRules; exports.removeClusterNodes = removeClusterNodes; exports.removeClusterNodesFromPlan = removeClusterNodesFromPlan; exports.removeCost = removeCost;<|fim▁end|>
<|file_name|>resume.ts<|end_file_name|><|fim▁begin|>import 'rxjs/add/operator/catch'; import 'rxjs/add/operator/map'; import 'rxjs/add/operator/switchMap'; import 'rxjs/add/operator/debounceTime'; import 'rxjs/add/operator/withLatestFrom'; import 'rxjs/add/operator/skip'; import 'rxjs/add/operator/takeUntil'; import { Injectable } from '@angular/core'; import { Effect, Actions, toPayload } from '@ngrx/effects'; import { Action, Store } from '@ngrx/store'; import { Observable } from 'rxjs/Observable'; import { from } from 'rxjs/observable/from'; import Constants from '../../constants'; import { IResume } from '../../models'; import { ResumeService } from '../../services/resume'; import { RESUME_GET, RESUME_PATCH_AND_UPDATE, RESUME_DELETE_AND_UPDATE, ResumeGetCompleteAction, ResumePatchAndUpdateCompleteAction, ResumeErrorAction, } from '../actions/resume'; import { ResumesGetAction } from '../actions/resumes'; import { AddAlertMessageAction } from '../actions/alertMessages'; import { ErrorsAction } from '../actions/errors'; import { IState as IStoreState } from '../reducers'; @Injectable() export class ResumeEffects { @Effect() private getResume$: Observable<Action> = this.actions$ .ofType(RESUME_GET) .debounceTime(300) .map(toPayload) .switchMap((payload) => { return this.resumeService.getResume(payload) .map((response) => new ResumeGetCompleteAction(response)) .catch((err) => from([ new ResumeErrorAction(), new ErrorsAction(err.json(), err.status), ])); }); @Effect() private patchResume$: Observable<Action> = this.actions$ .ofType(RESUME_PATCH_AND_UPDATE) .debounceTime(300) .map(toPayload) .withLatestFrom(this.store, (payload, state) => ({ payload, token: state.session.sessionObject.authorization_token, })) .switchMap(({ payload, token }) => { return this.resumeService.patchResume(payload, token) .mergeMap( (response: IResume) => from ([ new ResumePatchAndUpdateCompleteAction({ resume: response }), new ResumesGetAction({ pagination: { per: Constants.SHARED_LINKS_PER_PAGE, page: payload.location.page ? payload.location.page : null, }, filter: { full_name: payload.location.full_name ? payload.location.full_name: null, }, }), new AddAlertMessageAction('effects.message.update'), ]), ) .catch((err) => from([ new ResumeErrorAction(), new ErrorsAction(err.json(), err.status), ])); }); @Effect() private deleteResume$: Observable<Action> = this.actions$ .ofType(RESUME_DELETE_AND_UPDATE) .debounceTime(300) .map(toPayload) .withLatestFrom(this.store, (payload, state) => ({ payload, token: state.session.sessionObject.authorization_token, })) .switchMap(({ payload, token }) => { return this.resumeService.deleteResume(payload, token) .map((response: IResume) => ( new ResumesGetAction({ pagination: { per: Constants.SHARED_LINKS_PER_PAGE, page: payload.location.page ? payload.location.page : null, }, }) )) .catch((err) => from([ new ResumeErrorAction(), new ErrorsAction(err.json(), err.status), ])); }); constructor( private actions$: Actions, private resumeService: ResumeService,<|fim▁hole|>}<|fim▁end|>
private store: Store<IStoreState>, ) { }
<|file_name|>online_accounts.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python3 from collections import OrderedDict import datetime import os import sys import textwrap from plaid import Client from plaid import errors as plaid_errors import plaid2text.config_manager as cm from plaid2text.interact import prompt, clear_screen, NullValidator from plaid2text.interact import NumberValidator, NumLengthValidator, YesNoValidator, PATH_COMPLETER class PlaidAccess(): def __init__(self, client_id=None, secret=None): if client_id and secret:<|fim▁hole|> self.client_id = client_id self.secret = secret else: self.client_id, self.secret = cm.get_plaid_config() self.client = Client(self.client_id, self.secret, "development", suppress_warnings=True) def get_transactions(self, access_token, start_date, end_date, account_ids): """Get transaction for a given account for the given dates""" ret = [] total_transactions = None page = 0 account_array = [] account_array.append(account_ids) while True: page += 1 if total_transactions: print("Fetching page %d, already fetched %d/%d transactions" % ( page, len(ret), total_transactions)) else: print("Fetching page 1") try: response = self.client.Transactions.get( access_token, start_date.strftime("%Y-%m-%d"), end_date.strftime("%Y-%m-%d"), account_ids=account_array, offset=len(ret)) except plaid_errors.ItemError as ex: print("Unable to update plaid account [%s] due to: " % account_ids, file=sys.stderr) print(" %s" % ex, file=sys.stderr ) sys.exit(1) total_transactions = response['total_transactions'] ret.extend(response['transactions']) if len(ret) >= total_transactions: break print("Downloaded %d transactions for %s - %s" % ( len(ret), start_date.strftime("%Y-%m-%d"), end_date.strftime("%Y-%m-%d"))) return ret<|fim▁end|>
<|file_name|>settingsData.py<|end_file_name|><|fim▁begin|><|fim▁hole|>#!/bin/python # -*- coding: utf-8 -*- # Fenrir TTY screen reader # By Chrys, Storm Dragon, and contributers. from fenrirscreenreader.core import debug settingsData = { 'sound': { 'enabled': True, 'driver': 'genericDriver', 'theme': 'default', 'volume': 1.0, 'genericPlayFileCommand': 'play -q -v fenrirVolume fenrirSoundFile', 'genericFrequencyCommand': 'play -q -v fenrirVolume -n -c1 synth fenrirDuration sine fenrirFrequence' }, 'speech':{ 'enabled': True, 'driver': 'genericDriver', 'serverPath': '', 'rate': 0.75, 'pitch': 0.5, 'capitalPitch':0.8, 'volume': 1.0, 'module': '', 'voice': 'en-us', 'language': '', 'autoReadIncoming': True, 'genericSpeechCommand':'espeak -a fenrirVolume -s fenrirRate -p fenrirPitch -v fenrirVoice "fenrirText"', 'fenrirMinVolume':0, 'fenrirMaxVolume':200, 'fenrirMinPitch':0, 'fenrirMaxPitch':99, 'fenrirMinRate':80, 'fenrirMaxRate':450, }, 'braille':{ 'enabled': False, 'driver':'brlapiDriver', 'layout': 'en', 'flushMode': 'word', #NONE,FIX,CHAR,WORD 'flushTimeout': 3, 'cursorFocusMode':'page', # page,fixCell 'fixCursorOnCell': -1, 'cursorFollowMode': 'review', # none, review, last, text 'panSizeHorizontal': 0 # 0 = display size }, 'screen':{ 'driver': 'vcsaDriver', 'encoding': 'auto', 'screenUpdateDelay': 0.1, 'suspendingScreen': '', 'autodetectSuspendingScreen': False, }, 'general':{ 'debugLevel': debug.debugLevel.DEACTIVE, 'debugMode': 'FILE', 'debugFile': '', 'punctuationProfile':'default', 'punctuationLevel': 'some', 'respectPunctuationPause':True, 'newLinePause':True, 'numberOfClipboards': 10, 'emoticons': True, 'fenrirKeys': 'KEY_KP0,KEY_META', 'scriptKeys': 'KEY_COMPOSE', 'timeFormat': '%I:%M%P', 'dateFormat': '%A, %B %d, %Y', 'autoSpellCheck': False, 'spellCheckLanguage': 'en_US', 'scriptPath': '/usr/share/fenrirscreenreader/scripts', 'commandPath': '/usr/share/fenrirscreenreader/commands', 'attributeFormatString': 'Background fenrirBGColor,Foreground fenrirFGColor,fenrirUnderline,fenrirBold,fenrirBlink, Font fenrirFont,Fontsize fenrirFontSize', 'autoPresentIndent': False, 'autoPresentIndentMode': 1, 'hasAttributes': True, 'shell': '', }, 'focus':{ 'cursor': True, 'highlight': False, }, 'remote':{ 'enabled': True, 'driver': 'unixDriver', 'port': 22447, 'socketFile':'', 'enableSettingsRemote': True, 'enableCommandRemote': True, }, 'barrier':{ 'enabled': True, 'leftBarriers': '│└┌─', 'rightBarriers': '│┘┐─', }, 'review':{ 'lineBreak': True, 'endOfScreen': True, 'leaveReviewOnCursorChange': True, 'leaveReviewOnScreenChange': True, }, 'menu':{ 'vmenuPath': '', 'quickMenu': 'speech#rate;speech#pitch;speech#volume', }, 'promote':{ 'enabled': True, 'inactiveTimeoutSec': 120, 'list': '', }, 'time':{ 'enabled': False, 'presentTime': True, 'presentDate': True, 'delaySec': 0, 'onMinutes': '00,30', 'announce': True, 'interrupt': False, }, 'keyboard':{ 'driver': 'evdev', 'device': 'all', 'grabDevices': True, 'ignoreShortcuts': False, 'keyboardLayout': "desktop", 'charEchoMode': 2, # while capslock 'charDeleteEcho': True, 'wordEcho': True, 'interruptOnKeyPress': True, 'interruptOnKeyPressFilter': '', 'doubleTapTimeout': 0.2, } }<|fim▁end|>
<|file_name|>monkey_scraper.py<|end_file_name|><|fim▁begin|>from sys import argv import logging from MonkeyScraper import MonkeyScraper LOG_FILENAME = 'MonkeyScraper.log'<|fim▁hole|> def main(username, password, survey_url): """ Creates a MonkeyScraper, logs in, and scrapes the survey at the provided url :param username: str: surveymonkey username :param password: str: surveymonkey password :param survey_url: str: the "analyze" page url for your survey :return: """ # scraper = MonkeyScraper() # scraper.init() # scraper.log_in(username=username, password=password) # scraper.scrape(survey_url) # scraper.log_out() # scraper.close() with MonkeyScraper(username=username, password=password) as scraper: survey = scraper.scrape(survey_url) if __name__ == '__main__': main(*argv[1:])<|fim▁end|>
logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG)
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- """ Landing pages administration module =============================================== <|fim▁hole|> :platform: Django :synopsis: Landing pages administration module .. moduleauthor:: (C) 2015 Oliver Gutiérrez """ # Django imports from django.contrib import admin # Site tools imports from sitetools.admin import BaseModelAdmin # Application imports from landingpages.models import LandingPage class LandingPageAdmin(BaseModelAdmin): """ Landing page administration class """ list_display = ('name','url','language','template',) list_filter = ('language','template',) search_fields = ('name','title','keywords',) # Register models admin.site.register(LandingPage,LandingPageAdmin)<|fim▁end|>
.. module:: landingpages.admin
<|file_name|>test.js<|end_file_name|><|fim▁begin|><|fim▁hole|>var throttle = require( "../throttle" ); // Fire callback at end of detection period var func = throttle(function() { // Do stuff here console.log( "throttled" ); }, 200 ); func();<|fim▁end|>
<|file_name|>recipe.py<|end_file_name|><|fim▁begin|>import logging, os, random from zc.buildout import UserError, easy_install from zc.recipe.egg import Egg SETTINGS_TEMPLATE = ''' from %(settings_module)s import * SECRET_KEY = "%(secret)s" %(settings_override)s ''' SCRIPT_TEMPLATES = { 'wsgi': easy_install.script_header + ''' %(relative_paths_setup)s import sys sys.path[0:0] = [ %(path)s, ] %(initialization)s import os try: from django.core.wsgi import get_wsgi_application IS_14_PLUS = True except ImportError: from django.core.handlers.wsgi import WSGIHandler IS_14_PLUS = False os.environ['DJANGO_SETTINGS_MODULE'] = "%(module_name)s%(attrs)s" def app_factory(global_config, **local_config): """This function wraps our simple WSGI app so it can be used with paste.deploy""" if IS_14_PLUS: return get_wsgi_application() else: return WSGIHandler() application = app_factory(%(arguments)s) ''', 'manage': easy_install.script_header + ''' %(relative_paths_setup)s import sys sys.path[0:0] = [ %(path)s, ] %(initialization)s import os try: from django.core.management import execute_from_command_line IS_14_PLUS = True except ImportError: from django.core.management import ManagementUtility IS_14_PLUS = False os.environ['DJANGO_SETTINGS_MODULE'] = "%(module_name)s%(attrs)s" if IS_14_PLUS: execute_from_command_line(%(arguments)s) else: utility = ManagementUtility(%(arguments)s) utility.execute() ''' } class Recipe(object): wsgi_file = 'wsgi.py' settings_file = 'settings.py' sites_default = 'sites' site_settings_template = '%(name)s_site_config' secret_cfg = '.secret.cfg' def __init__(self, buildout, name, options): self.buildout, self.name, self.options = buildout, name, options self.logger = logging.getLogger(name) self.options['location'] = os.path.join( self.buildout['buildout']['parts-directory'], self.name ) self.options.setdefault('extra-paths', '') self.options.setdefault('environment-vars', '') self.options.setdefault('sites-directory', self.sites_default) self.options.setdefault('settings-override', '') self.options.setdefault('settings-file', self.settings_file) self.options.setdefault('wsgi-file', self.wsgi_file) self.options.setdefault('manage-py-file', 'django') self.eggs = [ ] if 'eggs' in self.buildout['buildout']: self.eggs.extend(self.buildout['buildout']['eggs'].split()) if 'eggs' in self.options: self.eggs.extend(self.options['eggs'].split()) self.working_set = None self.extra_paths = [ self.options['location'] ] sites_path = os.path.join( self.buildout['buildout']['directory'], self.options['sites-directory'] ) if os.path.isdir(sites_path): self.extra_paths.append(sites_path) if os.path.isdir(sites_path) and 'settings-module' not in self.options: # Check if the user has created a module %(name)s_config settings_module = self.site_settings_template % { 'name': self.name } settings_module_path = os.path.join(sites_path, settings_module) initpy = os.path.join(settings_module_path, '__init__.py') settingspy = os.path.join(settings_module_path, 'settings.py') if os.path.isdir(settings_module_path) and \ os.path.isfile(initpy) and os.path.isfile(settingspy): self.options.setdefault('settings-module', '%s.settings' % settings_module) self.extra_paths.extend(self.options['extra-paths'].split()) self.secret_key = None def setup_working_set(self): egg = Egg( self.buildout, 'Django', self.options ) self.working_set = egg.working_set(self.eggs) def setup_secret(self): secret_file = os.path.join( self.buildout['buildout']['directory'], self.secret_cfg ) if os.path.isfile(secret_file): stream = open(secret_file, 'rb') data = stream.read().decode('utf-8').strip() stream.close() self.logger.debug("Read secret: %s" % data) else: stream = open(secret_file, 'wb') chars = u'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)' data = u''.join([random.choice(chars) for __ in range(50)]) stream.write(data.encode('utf-8')+u"\n") stream.close() self.logger.debug( "Generated secret: %s (and written to %s)" % (data, secret_file) ) self.secret_key = data return secret_file def setup_module_file(self, module, name, data): with open(os.path.join(module, name), 'wb') as stream: stream.write(data) def get_settings(self, static_directory=None, media_directory=None): if 'settings-module' not in self.options: raise UserError( ("You should specify 'settings-module' in %(name)s " "or create a module named '"+self.site_settings_template+"' " "in '%(sites)s' with a 'settings.py' file in it") % { 'name': self.name, 'sites': self.options['sites-directory'] } ) settings_override = self.options['settings-override'] if static_directory is not None: settings_override += '\nSTATIC_ROOT = "%s"\n' % ( static_directory, ) if media_directory is not None: settings_override += '\nMEDIA_ROOT = "%s"\n' % ( media_directory, ) return SETTINGS_TEMPLATE % { 'settings_module': self.options['settings-module'], 'secret': self.secret_key, 'settings_override': settings_override } def setup_directories(self): result = [] for directory in [ 'static-directory', 'media-directory' ]: result.append(None) if directory in self.options: path = os.path.join( self.buildout['buildout']['directory'], self.options[directory] ) if not os.path.isdir(path): os.makedirs(path) result[-1] = path return result def get_initialization(self): # The initialization code is expressed as a list of lines initialization = [] # Gets the initialization code: the tricky part here is to preserve # indentation. # Since buildout does totally waste whitespace, if one wants to # preserve indentation must prefix its lines with '>>> ' or '... ' raw_value = self.options.get('initialization', '') is_indented = False indentations = ('>>> ', '... ') for line in raw_value.splitlines(): if line != "": if len(initialization) == 0: if line.startswith(indentations[0]): is_indented = True else: if is_indented and not line.startswith(indentations[1]): raise UserError( ("Line '%s' should be indented " "properly but is not") % line ) if is_indented: line = line[4:] initialization.append(line) # Gets the environment-vars option and generates code to set the # enviroment variables via os.environ environment_vars = [] for line in self.options.get('environment-vars', '').splitlines(): line = line.strip() if len(line) > 0: try: var_name, raw_value = line.split(' ', 1) except ValueError: raise RuntimeError( "Bad djc.recipe2 environment-vars contents: %s" % line ) environment_vars.append( 'os.environ["%s"] = r"%s"' % ( var_name, raw_value.strip() ) ) if len(environment_vars) > 0: initialization.append("import os") initialization.extend(environment_vars) if len(initialization) > 0: return "\n"+"\n".join(initialization)+"\n" return "" def create_script(self, name, path, settings, template, arguments): """Create arbitrary script. This script will also include the eventual code found in ``initialization`` and will also set (via ``os.environ``) the environment variables found in ``environment-vars`` """ self.logger.info( "Creating script at %s" % (os.path.join(path, name),) ) settings = settings.rsplit(".", 1) module = settings[0] attrs = "" if len(settings) > 1:<|fim▁hole|> script = easy_install.scripts( reqs=[(name, module, attrs)], working_set=self.working_set[1], executable=self.options['executable'], dest=path, extra_paths=self.extra_paths, initialization=self.get_initialization(), arguments=str(arguments) ) easy_install.script_template = old_script_template return script def setup_manage_script(self, settings): arguments = "sys.argv" return self.create_script( self.options['manage-py-file'], self.buildout['buildout']['bin-directory'], settings, SCRIPT_TEMPLATES['manage'], arguments ) def setup_wsgi_script(self, module_path, settings): arguments = "global_config={}" return self.create_script( self.options['wsgi-file'], module_path, settings, SCRIPT_TEMPLATES['wsgi'], arguments ) def setup(self, static_directory=None, media_directory=None): part_module = '%s_part_site' % self.name part_module_path = os.path.join(self.options['location'], part_module) settings_module = "%s.%s" % ( part_module, os.path.splitext(self.options['settings-file'])[0] ) if not os.path.exists(part_module_path): os.makedirs(part_module_path) self.setup_module_file(part_module_path, '__init__.py', "#\n") self.setup_module_file( part_module_path, self.options['settings-file'], self.get_settings(static_directory, media_directory) ) self.setup_wsgi_script(part_module_path, settings_module) files = [ self.options['location'] ] files.extend(self.setup_manage_script(settings_module)) return files def install(self): files = [] self.setup_working_set() # The .secret.cfg file is not reported so it doesn't get deleted self.setup_secret() static_directory, media_directory = self.setup_directories() # static and media are not added to files so that updates # won't delete them, nor reinstallations of parts files.extend(self.setup(static_directory, media_directory)) return tuple(files) update = install<|fim▁end|>
attrs = "." + settings[1] old_script_template = easy_install.script_template easy_install.script_template = template
<|file_name|>workflow-template.model.ts<|end_file_name|><|fim▁begin|>import { WithKey } from '../shared/table/data-table.component'; import { AuditWorkflowTemplate, AuditWorkflowTemplateInstance } from './audit.model'; import { Group } from './group.model'; import { Project } from './project.model'; import { Workflow } from './workflow.model'; <|fim▁hole|> group_id: number; name: string; slug: string; description: string; parameters: Array<WorkflowTemplateParameter>; value: string; pipelines: Array<PipelineTemplate>; applications: Array<ApplicationTemplate>; environments: Array<EnvironmentTemplate>; version: number; group: Group; first_audit: AuditWorkflowTemplate; last_audit: AuditWorkflowTemplate; editable: boolean; change_message: string; import_url: string; } export class WorkflowTemplateParameter { key: string; type: string; required: boolean; } export class PipelineTemplate { value: string; } export class ApplicationTemplate { value: string; } export class EnvironmentTemplate { value: string; } export class ParamData { [key: string]: string; } export class WorkflowTemplateRequest { project_key: string; workflow_name: string; parameters: ParamData; detached: boolean; } export class WorkflowTemplateApplyResult { msgs: Array<string>; workflow_name: string; } export class WorkflowTemplateInstance implements WithKey { id: number; workflow_template_version: number; request: WorkflowTemplateRequest; project: Project; workflow: Workflow; workflow_id: Workflow; first_audit: AuditWorkflowTemplateInstance; last_audit: AuditWorkflowTemplateInstance; workflow_name: string; constructor(wti?: any) { if (wti) { this.id = wti.id; this.workflow_template_version = wti.workflow_template_version; this.request = wti.request; this.project = wti.project; this.workflow = wti.workflow; this.first_audit = wti.first_audit; this.last_audit = wti.last_audit; this.workflow_name = wti.workflow_name; } } key(): string { return this.project.key + '/' + (this.workflow ? this.workflow.name : this.workflow_name); } status(wt: WorkflowTemplate): InstanceStatus { if (!this.workflow) { return InstanceStatus.NOT_IMPORTED; } return this.workflow_template_version === wt.version ? InstanceStatus.UP_TO_DATE : InstanceStatus.NOT_UP_TO_DATE; } } export enum InstanceStatus { NOT_IMPORTED = 'workflow_template_not_imported_yet', UP_TO_DATE = 'common_up_to_date', NOT_UP_TO_DATE = 'common_not_up_to_date' } export class InstanceStatusUtil { public static color(status: InstanceStatus): string { switch (status) { case InstanceStatus.UP_TO_DATE: return 'green'; case InstanceStatus.NOT_UP_TO_DATE: return 'red'; case InstanceStatus.NOT_IMPORTED: return 'orange'; } return 'blue'; } } export class WorkflowTemplateBulk { id: number; operations: Array<WorkflowTemplateBulkOperation>; } export class WorkflowTemplateBulkOperation { status: OperationStatus; error: string; request: WorkflowTemplateRequest; } export enum OperationStatus { PENDING = 0, PROCESSING = 1, DONE = 2, ERROR = 3 } export class OperationStatusUtil { public static color(status: OperationStatus): string { switch (status) { case OperationStatus.PENDING: return 'blue'; case OperationStatus.PROCESSING: return 'orange'; case OperationStatus.DONE: return 'green'; case OperationStatus.ERROR: return 'red'; } return ''; } public static translate(status: OperationStatus): string { switch (status) { case OperationStatus.PENDING: return 'common_pending'; case OperationStatus.PROCESSING: return 'common_processing'; case OperationStatus.DONE: return 'common_done'; case OperationStatus.ERROR: return 'common_error'; } return ''; } } export class WorkflowTemplateError { type: string; number: number; line: number; message: string; }<|fim▁end|>
export class WorkflowTemplate { id: number;
<|file_name|>proxy.py<|end_file_name|><|fim▁begin|>"""LDAP protocol proxy server""" from twisted.internet import reactor, defer from ldaptor.protocols.ldap import ldapserver, ldapconnector, ldapclient from ldaptor.protocols import pureldap class Proxy(ldapserver.BaseLDAPServer): protocol = ldapclient.LDAPClient client = None waitingConnect = [] unbound = False def __init__(self, config): """ Initialize the object. @param config: The configuration. @type config: ldaptor.interfaces.ILDAPConfig """ ldapserver.BaseLDAPServer.__init__(self) self.config = config def _whenConnected(self, fn, *a, **kw): if self.client is None: d = defer.Deferred() self.waitingConnect.append((d, fn, a, kw)) return d else: return defer.maybeDeferred(fn, *a, **kw) def _cbConnectionMade(self, proto): self.client = proto while self.waitingConnect: d, fn, a, kw = self.waitingConnect.pop(0) d2 = defer.maybeDeferred(fn, *a, **kw) d2.chainDeferred(d) def _clientQueue(self, request, controls, reply): # TODO controls if request.needs_answer: d = self.client.send_multiResponse(request, self._gotResponse, reply) # TODO handle d errbacks else: self.client.send_noResponse(request) def _gotResponse(self, response, reply): reply(response) # TODO this is ugly return isinstance(response, ( pureldap.LDAPSearchResultDone, pureldap.LDAPBindResponse, )) def _failConnection(self, reason):<|fim▁hole|> def connectionMade(self): clientCreator = ldapconnector.LDAPClientCreator( reactor, self.protocol) d = clientCreator.connect( dn='', overrides=self.config.getServiceLocationOverrides()) d.addCallback(self._cbConnectionMade) d.addErrback(self._failConnection) ldapserver.BaseLDAPServer.connectionMade(self) def connectionLost(self, reason): assert self.client is not None if self.client.connected: if not self.unbound: self.client.unbind() self.unbound = True else: self.client.transport.loseConnection() self.client = None ldapserver.BaseLDAPServer.connectionLost(self, reason) def _handleUnknown(self, request, controls, reply): self._whenConnected(self._clientQueue, request, controls, reply) return None def handleUnknown(self, request, controls, reply): d = defer.succeed(request) d.addCallback(self._handleUnknown, controls, reply) return d def handle_LDAPUnbindRequest(self, request, controls, reply): self.unbound = True self.handleUnknown(request, controls, reply) if __name__ == '__main__': """ Demonstration LDAP proxy; passes all requests to localhost:389. """ from twisted.internet import protocol from twisted.python import log import sys log.startLogging(sys.stderr) factory = protocol.ServerFactory() factory.protocol = lambda : Proxy(overrides={ '': ('localhost', 389), }) reactor.listenTCP(10389, factory) reactor.run()<|fim▁end|>
#TODO self.loseConnection() return reason # TODO
<|file_name|>test_mixing.py<|end_file_name|><|fim▁begin|>import pytest np = pytest.importorskip("numpy") npt = pytest.importorskip("numpy.testing") import networkx as nx from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing class TestDegreeMixingDict(BaseTestDegreeMixing): def test_degree_mixing_dict_undirected(self): d = nx.degree_mixing_dict(self.P4) d_result = { 1: {2: 2}, 2: {1: 2, 2: 2}, } assert d == d_result def test_degree_mixing_dict_undirected_normalized(self): d = nx.degree_mixing_dict(self.P4, normalized=True) d_result = { 1: {2: 1.0 / 3}, 2: {1: 1.0 / 3, 2: 1.0 / 3}, } assert d == d_result def test_degree_mixing_dict_directed(self): d = nx.degree_mixing_dict(self.D) print(d) d_result = {1: {3: 2}, 2: {1: 1, 3: 1}, 3: {}} assert d == d_result def test_degree_mixing_dict_multigraph(self): d = nx.degree_mixing_dict(self.M) d_result = {1: {2: 1}, 2: {1: 1, 3: 3}, 3: {2: 3}} assert d == d_result class TestDegreeMixingMatrix(BaseTestDegreeMixing): def test_degree_mixing_matrix_undirected(self): # fmt: off a_result = np.array([[0, 0, 0], [0, 0, 2], [0, 2, 2]] ) # fmt: on a = nx.degree_mixing_matrix(self.P4, normalized=False) npt.assert_equal(a, a_result) a = nx.degree_mixing_matrix(self.P4) npt.assert_equal(a, a_result / float(a_result.sum())) def test_degree_mixing_matrix_directed(self): # fmt: off a_result = np.array([[0, 0, 0, 0], [0, 0, 0, 2], [0, 1, 0, 1], [0, 0, 0, 0]] ) # fmt: on a = nx.degree_mixing_matrix(self.D, normalized=False) npt.assert_equal(a, a_result) a = nx.degree_mixing_matrix(self.D) npt.assert_equal(a, a_result / float(a_result.sum())) def test_degree_mixing_matrix_multigraph(self): # fmt: off a_result = np.array([[0, 0, 0, 0], [0, 0, 1, 0], [0, 1, 0, 3], [0, 0, 3, 0]] ) # fmt: on a = nx.degree_mixing_matrix(self.M, normalized=False) npt.assert_equal(a, a_result) a = nx.degree_mixing_matrix(self.M) npt.assert_equal(a, a_result / float(a_result.sum())) def test_degree_mixing_matrix_selfloop(self): # fmt: off a_result = np.array([[0, 0, 0], [0, 0, 0], [0, 0, 2]] ) # fmt: on a = nx.degree_mixing_matrix(self.S, normalized=False) npt.assert_equal(a, a_result) a = nx.degree_mixing_matrix(self.S) npt.assert_equal(a, a_result / float(a_result.sum())) class TestAttributeMixingDict(BaseTestAttributeMixing): def test_attribute_mixing_dict_undirected(self): d = nx.attribute_mixing_dict(self.G, "fish") d_result = { "one": {"one": 2, "red": 1}, "two": {"two": 2, "blue": 1}, "red": {"one": 1}, "blue": {"two": 1}, } assert d == d_result def test_attribute_mixing_dict_directed(self): d = nx.attribute_mixing_dict(self.D, "fish") d_result = { "one": {"one": 1, "red": 1}, "two": {"two": 1, "blue": 1}, "red": {}, "blue": {}, } assert d == d_result def test_attribute_mixing_dict_multigraph(self): d = nx.attribute_mixing_dict(self.M, "fish") d_result = { "one": {"one": 4}, "two": {"two": 2}, } assert d == d_result class TestAttributeMixingMatrix(BaseTestAttributeMixing): def test_attribute_mixing_matrix_undirected(self): mapping = {"one": 0, "two": 1, "red": 2, "blue": 3} a_result = np.array([[2, 0, 1, 0], [0, 2, 0, 1], [1, 0, 0, 0], [0, 1, 0, 0]]) a = nx.attribute_mixing_matrix(<|fim▁hole|> a = nx.attribute_mixing_matrix(self.G, "fish", mapping=mapping) npt.assert_equal(a, a_result / float(a_result.sum())) def test_attribute_mixing_matrix_directed(self): mapping = {"one": 0, "two": 1, "red": 2, "blue": 3} a_result = np.array([[1, 0, 1, 0], [0, 1, 0, 1], [0, 0, 0, 0], [0, 0, 0, 0]]) a = nx.attribute_mixing_matrix( self.D, "fish", mapping=mapping, normalized=False ) npt.assert_equal(a, a_result) a = nx.attribute_mixing_matrix(self.D, "fish", mapping=mapping) npt.assert_equal(a, a_result / float(a_result.sum())) def test_attribute_mixing_matrix_multigraph(self): mapping = {"one": 0, "two": 1, "red": 2, "blue": 3} a_result = np.array([[4, 0, 0, 0], [0, 2, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]) a = nx.attribute_mixing_matrix( self.M, "fish", mapping=mapping, normalized=False ) npt.assert_equal(a, a_result) a = nx.attribute_mixing_matrix(self.M, "fish", mapping=mapping) npt.assert_equal(a, a_result / float(a_result.sum()))<|fim▁end|>
self.G, "fish", mapping=mapping, normalized=False ) npt.assert_equal(a, a_result)
<|file_name|>test_artificial_32_None_MovingAverage_30_12_20.py<|end_file_name|><|fim▁begin|>import pyaf.Bench.TS_datasets as tsds import tests.artificial.process_artificial_dataset as art <|fim▁hole|>art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "MovingAverage", cycle_length = 30, transform = "None", sigma = 0.0, exog_count = 20, ar_order = 12);<|fim▁end|>
<|file_name|>test_devstack.py<|end_file_name|><|fim▁begin|># Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import jsonschema import mock import uuid from rally.deploy.engines import devstack from rally.openstack.common import test SAMPLE_CONFIG = { 'name': 'DevstackEngine', 'provider': { 'name': 'ExistingServers', 'credentials': [{'user': 'root', 'host': 'example.com'}], }, 'localrc': { 'ADMIN_PASSWORD': 'secret', }, } DEVSTACK_REPO = 'https://github.com/openstack-dev/devstack.git' class DevstackEngineTestCase(test.BaseTestCase): def setUp(self): super(DevstackEngineTestCase, self).setUp() self.deployment = { 'uuid': str(uuid.uuid4()), 'config': SAMPLE_CONFIG, } self.engine = devstack.DevstackEngine(self.deployment) def test_invalid_config(self): self.deployment = SAMPLE_CONFIG.copy() self.deployment['config'] = {'name': 42} self.assertRaises(jsonschema.ValidationError, devstack.DevstackEngine, self.deployment) def test_construct(self): self.assertEqual(self.engine.localrc['ADMIN_PASSWORD'], 'secret') @mock.patch('rally.deploy.engines.devstack.open', create=True) def test_prepare_server(self, m_open): m_open.return_value = 'fake_file' server = mock.Mock() server.password = 'secret' self.engine.prepare_server(server) calls = [ mock.call('/bin/sh -e', stdin='fake_file'), mock.call('chpasswd', stdin='rally:secret'), ] self.assertEqual(calls, server.ssh.run.mock_calls) filename = m_open.mock_calls[0][1][0] self.assertTrue(filename.endswith('rally/deploy/engines/' 'devstack/install.sh')) self.assertEqual([mock.call(filename, 'rb')], m_open.mock_calls) @mock.patch('rally.deploy.engine.EngineFactory.get_provider') @mock.patch('rally.deploy.engines.devstack.get_updated_server') @mock.patch('rally.deploy.engines.devstack.get_script') @mock.patch('rally.deploy.serverprovider.provider.Server') @mock.patch('rally.deploy.engines.devstack.objects.Endpoint') def test_deploy(self, m_endpoint, m_server, m_gs, m_gus, m_gp): m_gp.return_value = fake_provider = mock.Mock()<|fim▁hole|> m_gs.return_value = 'fake_script' server.get_credentials.return_value = 'fake_credentials' fake_provider.create_servers.return_value = [server] with mock.patch.object(self.engine, 'deployment') as m_d: endpoints = self.engine.deploy() self.assertEqual(['fake_endpoint'], endpoints) m_endpoint.assert_called_once_with('http://host:5000/v2.0/', 'admin', 'secret', 'admin', 'admin') m_d.add_resource.assert_called_once_with( info='fake_credentials', provider_name='DevstackEngine', type='credentials') repo = 'https://github.com/openstack-dev/devstack.git' cmd = '/bin/sh -e -s %s master' % repo server.ssh.run.assert_called_once_with(cmd, stdin='fake_script') ds_calls = [ mock.call.ssh.run('cat > ~/devstack/localrc', stdin=mock.ANY), mock.call.ssh.run('~/devstack/stack.sh') ] self.assertEqual(ds_calls, ds_server.mock_calls) localrc = ds_server.mock_calls[0][2]['stdin'] self.assertIn('ADMIN_PASSWORD=secret', localrc)<|fim▁end|>
server = mock.Mock() server.host = 'host' m_endpoint.return_value = 'fake_endpoint' m_gus.return_value = ds_server = mock.Mock()
<|file_name|>verify.py<|end_file_name|><|fim▁begin|># Copyright 2010 http://www.collabq.com import logging from django.conf import settings from django.http import HttpResponseRedirect from common import api from common import exception class VerifyInstallMiddleware(object): def process_request(self, request):<|fim▁hole|> logging.info("VerifyInstallMiddleware") logging.info("Path %s" % request.path) if not request.path == '/install': try: root_user = api.actor_get(api.ROOT, settings.ROOT_NICK) logging.info("Root Exists") except: logging.info("Root Does Not Exists") return HttpResponseRedirect('/install')<|fim▁end|>
<|file_name|>countdown.component.ts<|end_file_name|><|fim▁begin|>import { Component, Input } from '@angular/core'; @Component({ selector: 'my-countdown', templateUrl: './countdown.component.html' }) export class CountdownComponent { @Input() milliseconds: number;<|fim▁hole|> constructor() { } getHours() { return ('0' + Math.floor(this.milliseconds / 3600000)).slice(-2); } getMinutes() { return ('0' + Math.floor((this.milliseconds % 3600000) / 60000)).slice(-2); } getSeconds() { return ('0' + Math.floor((this.milliseconds % 60000) / 1000)).slice(-2); } }<|fim▁end|>
<|file_name|>main.py<|end_file_name|><|fim▁begin|>from couchpotato.core.event import addEvent from couchpotato.core.logger import CPLog from couchpotato.core.plugins.base import Plugin log = CPLog(__name__) class ClientScript(Plugin): urls = { 'style': {}, 'script': {}, } html = { 'style': '<link rel="stylesheet" href="%s" type="text/css">', 'script': '<script type="text/javascript" src="%s"></script>', } def __init__(self): addEvent('register_style', self.registerStyle) addEvent('register_script', self.registerScript) addEvent('clientscript.get_styles', self.getStyles)<|fim▁hole|> def getStyles(self, *args, **kwargs): return self.get('style', *args, **kwargs) def getScripts(self, *args, **kwargs): return self.get('script', *args, **kwargs) def get(self, type, as_html = False, location = 'head'): data = '' if as_html else [] try: return self.urls[type][location] except Exception, e: log.error(e) return data def registerStyle(self, path, position = 'head'): self.register(path, 'style', position) def registerScript(self, path, position = 'head'): self.register(path, 'script', position) def register(self, filepath, type, location): if not self.urls[type].get(location): self.urls[type][location] = [] filePath = filepath self.urls[type][location].append(filePath)<|fim▁end|>
addEvent('clientscript.get_scripts', self.getScripts)
<|file_name|>head-title.js<|end_file_name|><|fim▁begin|>Ember.Handlebars.helper('headTitle', function(title) {<|fim▁hole|> Ember.$('head').find('title').text(title); }, 'title');<|fim▁end|>
<|file_name|>font.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! Specified values for font properties use Atom; use app_units::Au; use byteorder::{BigEndian, ByteOrder}; use cssparser::{Parser, Token}; #[cfg(feature = "gecko")] use gecko_bindings::bindings; #[cfg(feature = "gecko")] use malloc_size_of::{MallocSizeOf, MallocSizeOfOps}; use parser::{Parse, ParserContext}; use properties::longhands::system_font::SystemFont; use std::fmt::{self, Write}; use style_traits::{CssWriter, KeywordsCollectFn, ParseError}; use style_traits::{SpecifiedValueInfo, StyleParseErrorKind, ToCss}; use style_traits::values::SequenceWriter; use values::CustomIdent; use values::computed::{Angle as ComputedAngle, Percentage as ComputedPercentage}; use values::computed::{font as computed, Context, Length, NonNegativeLength, ToComputedValue}; use values::computed::font::{FamilyName, FontFamilyList, FontStyleAngle, SingleFontFamily}; use values::generics::NonNegative; use values::generics::font::{KeywordSize, VariationValue}; use values::generics::font::{self as generics, FeatureTagValue, FontSettings, FontTag}; use values::specified::{AllowQuirks, Angle, Integer, LengthOrPercentage, NoCalcLength, Number, Percentage}; use values::specified::length::{FontBaseSize, AU_PER_PT, AU_PER_PX}; // FIXME(emilio): The system font code is copy-pasta, and should be cleaned up. macro_rules! system_font_methods { ($ty:ident, $field:ident) => { system_font_methods!($ty); fn compute_system(&self, _context: &Context) -> <$ty as ToComputedValue>::ComputedValue { debug_assert!(matches!(*self, $ty::System(..))); #[cfg(feature = "gecko")] { _context.cached_system_font.as_ref().unwrap().$field.clone() } #[cfg(feature = "servo")] { unreachable!() } } }; ($ty:ident) => { /// Get a specified value that represents a system font. pub fn system_font(f: SystemFont) -> Self { $ty::System(f) } /// Retreive a SystemFont from the specified value. pub fn get_system(&self) -> Option<SystemFont> { if let $ty::System(s) = *self { Some(s) } else { None } } } } const DEFAULT_SCRIPT_MIN_SIZE_PT: u32 = 8; const DEFAULT_SCRIPT_SIZE_MULTIPLIER: f64 = 0.71; /// The minimum font-weight value per: /// /// https://drafts.csswg.org/css-fonts-4/#font-weight-numeric-values pub const MIN_FONT_WEIGHT: f32 = 1.; /// The maximum font-weight value per: /// /// https://drafts.csswg.org/css-fonts-4/#font-weight-numeric-values pub const MAX_FONT_WEIGHT: f32 = 1000.; /// A specified font-weight value. /// /// https://drafts.csswg.org/css-fonts-4/#propdef-font-weight #[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)] pub enum FontWeight { /// `<font-weight-absolute>` Absolute(AbsoluteFontWeight), /// Bolder variant Bolder, /// Lighter variant Lighter, /// System font variant. #[css(skip)] System(SystemFont), } impl FontWeight { system_font_methods!(FontWeight, font_weight); /// `normal` #[inline] pub fn normal() -> Self { FontWeight::Absolute(AbsoluteFontWeight::Normal) } /// Get a specified FontWeight from a gecko keyword pub fn from_gecko_keyword(kw: u32) -> Self { debug_assert!(kw % 100 == 0); debug_assert!(kw as f32 <= MAX_FONT_WEIGHT); FontWeight::Absolute(AbsoluteFontWeight::Weight(Number::new(kw as f32))) } } impl Parse for FontWeight { fn parse<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<FontWeight, ParseError<'i>> { if let Ok(absolute) = input.try(|input| AbsoluteFontWeight::parse(context, input)) { return Ok(FontWeight::Absolute(absolute)); } Ok(try_match_ident_ignore_ascii_case! { input, "bolder" => FontWeight::Bolder, "lighter" => FontWeight::Lighter, }) } } impl ToComputedValue for FontWeight { type ComputedValue = computed::FontWeight; #[inline] fn to_computed_value(&self, context: &Context) -> Self::ComputedValue { match *self { FontWeight::Absolute(ref abs) => abs.compute(), FontWeight::Bolder => context .builder .get_parent_font() .clone_font_weight() .bolder(), FontWeight::Lighter => context .builder .get_parent_font() .clone_font_weight() .lighter(), FontWeight::System(_) => self.compute_system(context), } } #[inline] fn from_computed_value(computed: &computed::FontWeight) -> Self { FontWeight::Absolute(AbsoluteFontWeight::Weight( Number::from_computed_value(&computed.0) )) } } /// An absolute font-weight value for a @font-face rule. /// /// https://drafts.csswg.org/css-fonts-4/#font-weight-absolute-values #[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)] pub enum AbsoluteFontWeight { /// A `<number>`, with the additional constraints specified in: /// /// https://drafts.csswg.org/css-fonts-4/#font-weight-numeric-values Weight(Number), /// Normal font weight. Same as 400. Normal, /// Bold font weight. Same as 700. Bold, } impl AbsoluteFontWeight { /// Returns the computed value for this absolute font weight. pub fn compute(&self) -> computed::FontWeight { match *self { AbsoluteFontWeight::Weight(weight) => { computed::FontWeight( weight.get().max(MIN_FONT_WEIGHT).min(MAX_FONT_WEIGHT) ) }, AbsoluteFontWeight::Normal => computed::FontWeight::normal(), AbsoluteFontWeight::Bold => computed::FontWeight::bold(), } } } impl Parse for AbsoluteFontWeight { fn parse<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<Self, ParseError<'i>> { if let Ok(number) = input.try(|input| Number::parse(context, input)) { // We could add another AllowedNumericType value, but it doesn't // seem worth it just for a single property with such a weird range, // so we do the clamping here manually. if !number.was_calc() && (number.get() < MIN_FONT_WEIGHT || number.get() > MAX_FONT_WEIGHT) { return Err(input.new_custom_error( StyleParseErrorKind::UnspecifiedError )) } return Ok(AbsoluteFontWeight::Weight(number)) } Ok(try_match_ident_ignore_ascii_case! { input, "normal" => AbsoluteFontWeight::Normal, "bold" => AbsoluteFontWeight::Bold, }) } } /// The specified value of the `font-style` property, without the system font /// crap. pub type SpecifiedFontStyle = generics::FontStyle<Angle>; impl ToCss for SpecifiedFontStyle { fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result where W: Write, { match *self { generics::FontStyle::Normal => dest.write_str("normal"), generics::FontStyle::Italic => dest.write_str("italic"), generics::FontStyle::Oblique(ref angle) => { dest.write_str("oblique")?; if *angle != Self::default_angle() { dest.write_char(' ')?; angle.to_css(dest)?; } Ok(()) } } } } impl Parse for SpecifiedFontStyle { fn parse<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<Self, ParseError<'i>> { Ok(try_match_ident_ignore_ascii_case! { input, "normal" => generics::FontStyle::Normal, "italic" => generics::FontStyle::Italic, "oblique" => { let angle = input.try(|input| Self::parse_angle(context, input)) .unwrap_or_else(|_| Self::default_angle()); generics::FontStyle::Oblique(angle) } }) } } impl ToComputedValue for SpecifiedFontStyle { type ComputedValue = computed::FontStyle; fn to_computed_value(&self, _: &Context) -> Self::ComputedValue { match *self { generics::FontStyle::Normal => generics::FontStyle::Normal, generics::FontStyle::Italic => generics::FontStyle::Italic, generics::FontStyle::Oblique(ref angle) => { generics::FontStyle::Oblique(FontStyleAngle(Self::compute_angle(angle))) } } } fn from_computed_value(computed: &Self::ComputedValue) -> Self { match *computed { generics::FontStyle::Normal => generics::FontStyle::Normal, generics::FontStyle::Italic => generics::FontStyle::Italic, generics::FontStyle::Oblique(ref angle) => { generics::FontStyle::Oblique(Angle::from_computed_value(&angle.0)) } } } } /// The default angle for `font-style: oblique`. /// /// NOTE(emilio): As of right now this diverges from the spec, which specifies /// 20, because it's not updated yet to account for the resolution in: /// /// https://github.com/w3c/csswg-drafts/issues/2295 pub const DEFAULT_FONT_STYLE_OBLIQUE_ANGLE_DEGREES: f32 = 14.; /// From https://drafts.csswg.org/css-fonts-4/#valdef-font-style-oblique-angle: /// /// Values less than -90deg or values greater than 90deg are /// invalid and are treated as parse errors. /// /// The maximum angle value that `font-style: oblique` should compute to. pub const FONT_STYLE_OBLIQUE_MAX_ANGLE_DEGREES: f32 = 90.; /// The minimum angle value that `font-style: oblique` should compute to. pub const FONT_STYLE_OBLIQUE_MIN_ANGLE_DEGREES: f32 = -90.; impl SpecifiedFontStyle { /// Gets a clamped angle from a specified Angle. pub fn compute_angle(angle: &Angle) -> ComputedAngle { ComputedAngle::Deg( angle.degrees() .max(FONT_STYLE_OBLIQUE_MIN_ANGLE_DEGREES) .min(FONT_STYLE_OBLIQUE_MAX_ANGLE_DEGREES) ) } /// Parse a suitable angle for font-style: oblique. pub fn parse_angle<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<Angle, ParseError<'i>> { let angle = Angle::parse(context, input)?; if angle.was_calc() { return Ok(angle); } let degrees = angle.degrees(); if degrees < FONT_STYLE_OBLIQUE_MIN_ANGLE_DEGREES || degrees > FONT_STYLE_OBLIQUE_MAX_ANGLE_DEGREES { return Err(input.new_custom_error( StyleParseErrorKind::UnspecifiedError )); } return Ok(angle) } /// The default angle for `font-style: oblique`. pub fn default_angle() -> Angle { Angle::from_degrees( DEFAULT_FONT_STYLE_OBLIQUE_ANGLE_DEGREES, /* was_calc = */ false, ) } } /// The specified value of the `font-style` property. #[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)] #[allow(missing_docs)] pub enum FontStyle { Specified(SpecifiedFontStyle), #[css(skip)] System(SystemFont), } impl FontStyle { /// Return the `normal` value. #[inline] pub fn normal() -> Self { FontStyle::Specified(generics::FontStyle::Normal) } system_font_methods!(FontStyle, font_style); } impl ToComputedValue for FontStyle { type ComputedValue = computed::FontStyle; fn to_computed_value(&self, context: &Context) -> Self::ComputedValue { match *self { FontStyle::Specified(ref specified) => specified.to_computed_value(context), FontStyle::System(..) => self.compute_system(context), } } fn from_computed_value(computed: &Self::ComputedValue) -> Self { FontStyle::Specified(SpecifiedFontStyle::from_computed_value(computed)) } } impl Parse for FontStyle { fn parse<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<Self, ParseError<'i>> { Ok(FontStyle::Specified(SpecifiedFontStyle::parse(context, input)?)) } } /// A value for the `font-stretch` property. /// /// https://drafts.csswg.org/css-fonts-4/#font-stretch-prop #[allow(missing_docs)] #[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)] pub enum FontStretch { Stretch(Percentage), Keyword(FontStretchKeyword), #[css(skip)] System(SystemFont), } /// A keyword value for `font-stretch`. #[derive(Clone, Copy, Debug, MallocSizeOf, Parse, PartialEq, SpecifiedValueInfo, ToCss)] #[allow(missing_docs)] pub enum FontStretchKeyword { Normal, Condensed, UltraCondensed, ExtraCondensed, SemiCondensed, SemiExpanded, Expanded, ExtraExpanded, UltraExpanded, } impl FontStretchKeyword { /// Resolves the value of the keyword as specified in: /// /// https://drafts.csswg.org/css-fonts-4/#font-stretch-prop pub fn compute(&self) -> ComputedPercentage { use self::FontStretchKeyword::*; ComputedPercentage(match *self { UltraCondensed => 0.5, ExtraCondensed => 0.625, Condensed => 0.75, SemiCondensed => 0.875, Normal => 1., SemiExpanded => 1.125, Expanded => 1.25, ExtraExpanded => 1.5, UltraExpanded => 2., }) } /// Does the opposite operation to `compute`, in order to serialize keywords /// if possible. pub fn from_percentage(percentage: f32) -> Option<Self> { use self::FontStretchKeyword::*; // NOTE(emilio): Can't use `match` because of rust-lang/rust#41620. if percentage == 0.5 { return Some(UltraCondensed); } if percentage == 0.625 { return Some(ExtraCondensed); } if percentage == 0.75 { return Some(Condensed); } if percentage == 0.875 { return Some(SemiCondensed); } if percentage == 1. { return Some(Normal); } if percentage == 1.125 { return Some(SemiExpanded); } if percentage == 1.25 { return Some(Expanded); } if percentage == 1.5 { return Some(ExtraExpanded); } if percentage == 2. { return Some(UltraExpanded); } None } } impl FontStretch { /// `normal`. pub fn normal() -> Self { FontStretch::Keyword(FontStretchKeyword::Normal) } system_font_methods!(FontStretch, font_stretch); } impl Parse for FontStretch { fn parse<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<Self, ParseError<'i>> { // From https://drafts.csswg.org/css-fonts-4/#font-stretch-prop: // // Values less than 0% are not allowed and are treated as parse // errors. if let Ok(percentage) = input.try(|input| Percentage::parse_non_negative(context, input)) { return Ok(FontStretch::Stretch(percentage)); } Ok(FontStretch::Keyword(FontStretchKeyword::parse(input)?)) } } impl ToComputedValue for FontStretch { type ComputedValue = NonNegative<ComputedPercentage>; fn to_computed_value(&self, context: &Context) -> Self::ComputedValue { match *self { FontStretch::Stretch(ref percentage) => { NonNegative(percentage.to_computed_value(context)) }, FontStretch::Keyword(ref kw) => { NonNegative(kw.compute()) }, FontStretch::System(_) => self.compute_system(context), } } fn from_computed_value(computed: &Self::ComputedValue) -> Self { FontStretch::Stretch(Percentage::from_computed_value(&computed.0)) } } #[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)] /// A specified font-size value pub enum FontSize { /// A length; e.g. 10px. Length(LengthOrPercentage), /// A keyword value, along with a ratio and absolute offset. /// The ratio in any specified keyword value /// will be 1 (with offset 0), but we cascade keywordness even /// after font-relative (percent and em) values /// have been applied, which is where the ratio /// comes in. The offset comes in if we cascaded a calc value, /// where the font-relative portion (em and percentage) will /// go into the ratio, and the remaining units all computed together /// will go into the offset. /// See bug 1355707. Keyword(KeywordInfo), /// font-size: smaller Smaller, /// font-size: larger Larger, /// Derived from a specified system font. #[css(skip)] System(SystemFont), } impl From<LengthOrPercentage> for FontSize { fn from(other: LengthOrPercentage) -> Self { FontSize::Length(other) } } /// Specifies a prioritized list of font family names or generic family names. #[derive(Clone, Debug, Eq, Hash, PartialEq, ToCss)] pub enum FontFamily { /// List of `font-family` #[css(comma)] Values(#[css(iterable)] FontFamilyList), /// System font #[css(skip)] System(SystemFont), } impl FontFamily { system_font_methods!(FontFamily, font_family); /// Parse a specified font-family value pub fn parse_specified<'i, 't>(input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> { input .parse_comma_separated(|input| SingleFontFamily::parse(input)) .map(|v| FontFamily::Values(FontFamilyList::new(v.into_boxed_slice()))) } #[cfg(feature = "gecko")] /// Return the generic ID if it is a single generic font pub fn single_generic(&self) -> Option<u8> { match *self { FontFamily::Values(ref values) => values.single_generic(), _ => None, } } } impl ToComputedValue for FontFamily { type ComputedValue = computed::FontFamily; fn to_computed_value(&self, context: &Context) -> Self::ComputedValue { match *self { FontFamily::Values(ref v) => computed::FontFamily(v.clone()), FontFamily::System(_) => self.compute_system(context), } } fn from_computed_value(other: &computed::FontFamily) -> Self { FontFamily::Values(other.0.clone()) } } #[cfg(feature = "gecko")] impl MallocSizeOf for FontFamily { fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize { match *self { FontFamily::Values(ref v) => { // Although a SharedFontList object is refcounted, we always // attribute its size to the specified value. unsafe { bindings::Gecko_SharedFontList_SizeOfIncludingThis(v.0.get()) } }, FontFamily::System(_) => 0, } } } impl Parse for FontFamily { /// <family-name># /// <family-name> = <string> | [ <ident>+ ] /// TODO: <generic-family> fn parse<'i, 't>( _: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<FontFamily, ParseError<'i>> { FontFamily::parse_specified(input) } } impl SpecifiedValueInfo for FontFamily {} /// `FamilyName::parse` is based on `SingleFontFamily::parse` and not the other way around /// because we want the former to exclude generic family keywords. impl Parse for FamilyName { fn parse<'i, 't>( _: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<Self, ParseError<'i>> { match SingleFontFamily::parse(input) { Ok(SingleFontFamily::FamilyName(name)) => Ok(name), Ok(SingleFontFamily::Generic(_)) => { Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError)) }, Err(e) => Err(e), } } } #[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)] /// Preserve the readability of text when font fallback occurs pub enum FontSizeAdjust { /// None variant None, /// Number variant Number(Number), /// system font #[css(skip)] System(SystemFont), } impl FontSizeAdjust { #[inline] /// Default value of font-size-adjust pub fn none() -> Self { FontSizeAdjust::None } system_font_methods!(FontSizeAdjust, font_size_adjust); } impl ToComputedValue for FontSizeAdjust { type ComputedValue = computed::FontSizeAdjust; fn to_computed_value(&self, context: &Context) -> Self::ComputedValue { match *self { FontSizeAdjust::None => computed::FontSizeAdjust::None, FontSizeAdjust::Number(ref n) => { computed::FontSizeAdjust::Number(n.to_computed_value(context)) }, FontSizeAdjust::System(_) => self.compute_system(context), } } fn from_computed_value(computed: &computed::FontSizeAdjust) -> Self { match *computed { computed::FontSizeAdjust::None => FontSizeAdjust::None, computed::FontSizeAdjust::Number(ref v) => { FontSizeAdjust::Number(Number::from_computed_value(v)) }, } } } impl Parse for FontSizeAdjust { /// none | <number> fn parse<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<FontSizeAdjust, ParseError<'i>> { if input .try(|input| input.expect_ident_matching("none")) .is_ok() { return Ok(FontSizeAdjust::None); } Ok(FontSizeAdjust::Number(Number::parse_non_negative( context, input, )?)) } } /// Additional information for specified keyword-derived font sizes. pub type KeywordInfo = generics::KeywordInfo<NonNegativeLength>; impl KeywordInfo { /// Computes the final size for this font-size keyword, accounting for /// text-zoom. pub fn to_computed_value(&self, context: &Context) -> NonNegativeLength { let base = context.maybe_zoom_text(self.kw.to_computed_value(context)); base.scale_by(self.factor) + context.maybe_zoom_text(self.offset) } /// Given a parent keyword info (self), apply an additional factor/offset to it pub fn compose(self, factor: f32, offset: NonNegativeLength) -> Self { KeywordInfo { kw: self.kw, factor: self.factor * factor, offset: self.offset.scale_by(factor) + offset, } } } /// This is the ratio applied for font-size: larger /// and smaller by both Firefox and Chrome const LARGER_FONT_SIZE_RATIO: f32 = 1.2; /// The default font size. pub const FONT_MEDIUM_PX: i32 = 16; #[cfg(feature = "servo")] impl ToComputedValue for KeywordSize { type ComputedValue = NonNegativeLength; #[inline] fn to_computed_value(&self, _: &Context) -> NonNegativeLength { // https://drafts.csswg.org/css-fonts-3/#font-size-prop match *self { KeywordSize::XXSmall => Au::from_px(FONT_MEDIUM_PX) * 3 / 5, KeywordSize::XSmall => Au::from_px(FONT_MEDIUM_PX) * 3 / 4, KeywordSize::Small => Au::from_px(FONT_MEDIUM_PX) * 8 / 9, KeywordSize::Medium => Au::from_px(FONT_MEDIUM_PX), KeywordSize::Large => Au::from_px(FONT_MEDIUM_PX) * 6 / 5, KeywordSize::XLarge => Au::from_px(FONT_MEDIUM_PX) * 3 / 2, KeywordSize::XXLarge => Au::from_px(FONT_MEDIUM_PX) * 2, KeywordSize::XXXLarge => Au::from_px(FONT_MEDIUM_PX) * 3, }.into() } #[inline] fn from_computed_value(_: &NonNegativeLength) -> Self { unreachable!() } } #[cfg(feature = "gecko")] impl ToComputedValue for KeywordSize { type ComputedValue = NonNegativeLength; #[inline] fn to_computed_value(&self, cx: &Context) -> NonNegativeLength { use context::QuirksMode; use values::specified::length::au_to_int_px; // Data from nsRuleNode.cpp in Gecko // Mapping from base size and HTML size to pixels // The first index is (base_size - 9), the second is the // HTML size. "0" is CSS keyword xx-small, not HTML size 0, // since HTML size 0 is the same as 1. // // xxs xs s m l xl xxl - // - 0/1 2 3 4 5 6 7 static FONT_SIZE_MAPPING: [[i32; 8]; 8] = [ [9, 9, 9, 9, 11, 14, 18, 27], [9, 9, 9, 10, 12, 15, 20, 30], [9, 9, 10, 11, 13, 17, 22, 33], [9, 9, 10, 12, 14, 18, 24, 36], [9, 10, 12, 13, 16, 20, 26, 39], [9, 10, 12, 14, 17, 21, 28, 42], [9, 10, 13, 15, 18, 23, 30, 45], [9, 10, 13, 16, 18, 24, 32, 48], ]; // Data from nsRuleNode.cpp in Gecko // (https://dxr.mozilla.org/mozilla-central/rev/35fbf14b9/layout/style/nsRuleNode.cpp#3303) // // This table gives us compatibility with WinNav4 for the default fonts only. // In WinNav4, the default fonts were: // // Times/12pt == Times/16px at 96ppi // Courier/10pt == Courier/13px at 96ppi // // xxs xs s m l xl xxl - // - 1 2 3 4 5 6 7 static QUIRKS_FONT_SIZE_MAPPING: [[i32; 8]; 8] = [ [9, 9, 9, 9, 11, 14, 18, 28], [9, 9, 9, 10, 12, 15, 20, 31], [9, 9, 9, 11, 13, 17, 22, 34], [9, 9, 10, 12, 14, 18, 24, 37], [9, 9, 10, 13, 16, 20, 26, 40], [9, 9, 11, 14, 17, 21, 28, 42], [9, 10, 12, 15, 17, 23, 30, 45], [9, 10, 13, 16, 18, 24, 32, 48], ]; static FONT_SIZE_FACTORS: [i32; 8] = [60, 75, 89, 100, 120, 150, 200, 300]; let ref gecko_font = cx.style().get_font().gecko(); let base_size = unsafe { Atom::with(gecko_font.mLanguage.mRawPtr, |atom| { cx.font_metrics_provider .get_size(atom, gecko_font.mGenericID) .0 }) }; let base_size_px = au_to_int_px(base_size as f32); let html_size = self.html_size() as usize; if base_size_px >= 9 && base_size_px <= 16 { let mapping = if cx.quirks_mode == QuirksMode::Quirks { QUIRKS_FONT_SIZE_MAPPING } else { FONT_SIZE_MAPPING }; Au::from_px(mapping[(base_size_px - 9) as usize][html_size]).into() } else { Au(FONT_SIZE_FACTORS[html_size] * base_size / 100).into() } } #[inline] fn from_computed_value(_: &NonNegativeLength) -> Self { unreachable!() } } impl FontSize { /// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-a-legacy-font-size> pub fn from_html_size(size: u8) -> Self { FontSize::Keyword( match size { // If value is less than 1, let it be 1. 0 | 1 => KeywordSize::XSmall, 2 => KeywordSize::Small, 3 => KeywordSize::Medium, 4 => KeywordSize::Large, 5 => KeywordSize::XLarge, 6 => KeywordSize::XXLarge, // If value is greater than 7, let it be 7. _ => KeywordSize::XXXLarge, }.into(), ) } /// Compute it against a given base font size pub fn to_computed_value_against( &self, context: &Context, base_size: FontBaseSize, ) -> computed::FontSize { use values::specified::length::FontRelativeLength; let compose_keyword = |factor| { context .style() .get_parent_font() .clone_font_size() .keyword_info .map(|i| i.compose(factor, Au(0).into())) }; let mut info = None; let size = match *self { FontSize::Length(LengthOrPercentage::Length(NoCalcLength::FontRelative(value))) => { if let FontRelativeLength::Em(em) = value { // If the parent font was keyword-derived, this is too. // Tack the em unit onto the factor info = compose_keyword(em); } value.to_computed_value(context, base_size).into() }, FontSize::Length(LengthOrPercentage::Length(NoCalcLength::ServoCharacterWidth( value, ))) => value.to_computed_value(base_size.resolve(context)).into(), FontSize::Length(LengthOrPercentage::Length(NoCalcLength::Absolute(ref l))) => { context.maybe_zoom_text(l.to_computed_value(context).into()) }, FontSize::Length(LengthOrPercentage::Length(ref l)) => { l.to_computed_value(context).into() }, FontSize::Length(LengthOrPercentage::Percentage(pc)) => { // If the parent font was keyword-derived, this is too. // Tack the % onto the factor info = compose_keyword(pc.0); base_size.resolve(context).scale_by(pc.0).into() }, FontSize::Length(LengthOrPercentage::Calc(ref calc)) => { let parent = context.style().get_parent_font().clone_font_size(); // if we contain em/% units and the parent was keyword derived, this is too // Extract the ratio/offset and compose it if (calc.em.is_some() || calc.percentage.is_some()) && parent.keyword_info.is_some() { let ratio = calc.em.unwrap_or(0.) + calc.percentage.map_or(0., |pc| pc.0); // Compute it, but shave off the font-relative part (em, %). // // This will mean that other font-relative units like ex and // ch will be computed against the old parent font even when // the font changes. // // There's no particular "right answer" for what to do here, // Gecko recascades as if the font had changed, we instead // track the changes and reapply, which means that we carry // over old computed ex/ch values whilst Gecko recomputes // new ones. // // This is enough of an edge case to not really matter. let abs = calc.to_computed_value_zoomed( context, FontBaseSize::InheritedStyleButStripEmUnits, ).length_component(); info = parent.keyword_info.map(|i| i.compose(ratio, abs.into())); } let calc = calc.to_computed_value_zoomed(context, base_size); calc.to_used_value(Some(base_size.resolve(context))) .unwrap() .into() }, FontSize::Keyword(i) => { // As a specified keyword, this is keyword derived info = Some(i); i.to_computed_value(context) }, FontSize::Smaller => { info = compose_keyword(1. / LARGER_FONT_SIZE_RATIO); FontRelativeLength::Em(1. / LARGER_FONT_SIZE_RATIO) .to_computed_value(context, base_size) .into() }, FontSize::Larger => { info = compose_keyword(LARGER_FONT_SIZE_RATIO); FontRelativeLength::Em(LARGER_FONT_SIZE_RATIO) .to_computed_value(context, base_size) .into() }, FontSize::System(_) => { #[cfg(feature = "servo")] { unreachable!() } #[cfg(feature = "gecko")] { context.cached_system_font.as_ref().unwrap().font_size.size } }, }; computed::FontSize { size: size, keyword_info: info, } } } impl ToComputedValue for FontSize { type ComputedValue = computed::FontSize; #[inline] fn to_computed_value(&self, context: &Context) -> computed::FontSize { self.to_computed_value_against(context, FontBaseSize::InheritedStyle) } #[inline] fn from_computed_value(computed: &computed::FontSize) -> Self { FontSize::Length(LengthOrPercentage::Length( ToComputedValue::from_computed_value(&computed.size.0), )) } } impl FontSize { system_font_methods!(FontSize); /// Get initial value for specified font size. #[inline] pub fn medium() -> Self { FontSize::Keyword(KeywordInfo::medium()) } /// Parses a font-size, with quirks. pub fn parse_quirky<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, allow_quirks: AllowQuirks, ) -> Result<FontSize, ParseError<'i>> { if let Ok(lop) = input.try(|i| LengthOrPercentage::parse_non_negative_quirky(context, i, allow_quirks)) { return Ok(FontSize::Length(lop)); } if let Ok(kw) = input.try(KeywordSize::parse) { return Ok(FontSize::Keyword(kw.into())); } try_match_ident_ignore_ascii_case! { input, "smaller" => Ok(FontSize::Smaller), "larger" => Ok(FontSize::Larger), } } #[allow(unused_mut)] /// Cascade `font-size` with specified value pub fn cascade_specified_font_size( context: &mut Context, specified_value: &FontSize, mut computed: computed::FontSize, ) { // we could use clone_language and clone_font_family() here but that's // expensive. Do it only in gecko mode for now. #[cfg(feature = "gecko")] { // if the language or generic changed, we need to recalculate // the font size from the stored font-size origin information. if context.builder.get_font().gecko().mLanguage.mRawPtr != context.builder.get_parent_font().gecko().mLanguage.mRawPtr || context.builder.get_font().gecko().mGenericID != context.builder.get_parent_font().gecko().mGenericID { if let Some(info) = computed.keyword_info { computed.size = info.to_computed_value(context); } } } let device = context.builder.device; let mut font = context.builder.take_font(); let parent_unconstrained = { let parent_font = context.builder.get_parent_font(); font.apply_font_size(computed, parent_font, device) }; context.builder.put_font(font); if let Some(parent) = parent_unconstrained { let new_unconstrained = specified_value .to_computed_value_against(context, FontBaseSize::Custom(Au::from(parent))); context .builder .mutate_font() .apply_unconstrained_font_size(new_unconstrained.size); } } } impl Parse for FontSize { /// <length> | <percentage> | <absolute-size> | <relative-size> fn parse<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<FontSize, ParseError<'i>> { FontSize::parse_quirky(context, input, AllowQuirks::No) } } bitflags! { #[cfg_attr(feature = "servo", derive(MallocSizeOf))] /// Flags of variant alternates in bit struct VariantAlternatesParsingFlags: u8 { /// None of variant alternates enabled const NORMAL = 0; /// Historical forms const HISTORICAL_FORMS = 0x01; /// Stylistic Alternates const STYLISTIC = 0x02; /// Stylistic Sets const STYLESET = 0x04; /// Character Variant const CHARACTER_VARIANT = 0x08; /// Swash glyphs const SWASH = 0x10; /// Ornaments glyphs const ORNAMENTS = 0x20; /// Annotation forms const ANNOTATION = 0x40; } } #[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)] /// Set of variant alternates pub enum VariantAlternates { /// Enables display of stylistic alternates #[css(function)] Stylistic(CustomIdent), /// Enables display with stylistic sets #[css(comma, function)] Styleset(#[css(iterable)] Box<[CustomIdent]>), /// Enables display of specific character variants #[css(comma, function)] CharacterVariant(#[css(iterable)] Box<[CustomIdent]>), /// Enables display of swash glyphs #[css(function)] Swash(CustomIdent), /// Enables replacement of default glyphs with ornaments #[css(function)] Ornaments(CustomIdent), /// Enables display of alternate annotation forms #[css(function)] Annotation(CustomIdent), /// Enables display of historical forms HistoricalForms, } #[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)] /// List of Variant Alternates pub struct VariantAlternatesList( #[css(if_empty = "normal", iterable)] pub Box<[VariantAlternates]>, ); impl VariantAlternatesList { /// Returns the length of all variant alternates. pub fn len(&self) -> usize { self.0.iter().fold(0, |acc, alternate| match *alternate { VariantAlternates::Swash(_) | VariantAlternates::Stylistic(_) | VariantAlternates::Ornaments(_) | VariantAlternates::Annotation(_) => acc + 1, VariantAlternates::Styleset(ref slice) | VariantAlternates::CharacterVariant(ref slice) => acc + slice.len(), _ => acc, }) } } #[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)] /// Control over the selection of these alternate glyphs pub enum FontVariantAlternates { /// Use alternative glyph from value Value(VariantAlternatesList), /// Use system font glyph #[css(skip)] System(SystemFont), } impl FontVariantAlternates { #[inline] /// Get initial specified value with VariantAlternatesList pub fn get_initial_specified_value() -> Self { FontVariantAlternates::Value(VariantAlternatesList(vec![].into_boxed_slice())) } system_font_methods!(FontVariantAlternates, font_variant_alternates); } impl ToComputedValue for FontVariantAlternates { type ComputedValue = computed::FontVariantAlternates; fn to_computed_value(&self, context: &Context) -> computed::FontVariantAlternates { match *self { FontVariantAlternates::Value(ref v) => v.clone(), FontVariantAlternates::System(_) => self.compute_system(context), } } fn from_computed_value(other: &computed::FontVariantAlternates) -> Self { FontVariantAlternates::Value(other.clone()) } } impl Parse for FontVariantAlternates { /// normal | /// [ stylistic(<feature-value-name>) || /// historical-forms || /// styleset(<feature-value-name> #) || /// character-variant(<feature-value-name> #) || /// swash(<feature-value-name>) || /// ornaments(<feature-value-name>) || /// annotation(<feature-value-name>) ] fn parse<'i, 't>( _: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<FontVariantAlternates, ParseError<'i>> { let mut alternates = Vec::new(); if input .try(|input| input.expect_ident_matching("normal")) .is_ok() { return Ok(FontVariantAlternates::Value(VariantAlternatesList( alternates.into_boxed_slice(), ))); } let mut parsed_alternates = VariantAlternatesParsingFlags::empty(); macro_rules! check_if_parsed( ($input:expr, $flag:path) => ( if parsed_alternates.contains($flag) { return Err($input.new_custom_error(StyleParseErrorKind::UnspecifiedError)) } parsed_alternates |= $flag; ) ); while let Ok(_) = input.try(|input| { // FIXME: remove clone() when lifetimes are non-lexical match input.next()?.clone() { Token::Ident(ref value) if value.eq_ignore_ascii_case("historical-forms") => { check_if_parsed!(input, VariantAlternatesParsingFlags::HISTORICAL_FORMS); alternates.push(VariantAlternates::HistoricalForms); Ok(()) }, Token::Function(ref name) => input.parse_nested_block(|i| { match_ignore_ascii_case! { &name, "swash" => { check_if_parsed!(i, VariantAlternatesParsingFlags::SWASH); let location = i.current_source_location(); let ident = CustomIdent::from_ident(location, i.expect_ident()?, &[])?; alternates.push(VariantAlternates::Swash(ident)); Ok(()) }, "stylistic" => { check_if_parsed!(i, VariantAlternatesParsingFlags::STYLISTIC); let location = i.current_source_location(); let ident = CustomIdent::from_ident(location, i.expect_ident()?, &[])?; alternates.push(VariantAlternates::Stylistic(ident)); Ok(()) }, "ornaments" => { check_if_parsed!(i, VariantAlternatesParsingFlags::ORNAMENTS); let location = i.current_source_location(); let ident = CustomIdent::from_ident(location, i.expect_ident()?, &[])?; alternates.push(VariantAlternates::Ornaments(ident)); Ok(()) }, "annotation" => { check_if_parsed!(i, VariantAlternatesParsingFlags::ANNOTATION); let location = i.current_source_location(); let ident = CustomIdent::from_ident(location, i.expect_ident()?, &[])?; alternates.push(VariantAlternates::Annotation(ident)); Ok(()) }, "styleset" => { check_if_parsed!(i, VariantAlternatesParsingFlags::STYLESET); let idents = i.parse_comma_separated(|i| { let location = i.current_source_location(); CustomIdent::from_ident(location, i.expect_ident()?, &[]) })?; alternates.push(VariantAlternates::Styleset(idents.into_boxed_slice())); Ok(()) }, "character-variant" => { check_if_parsed!(i, VariantAlternatesParsingFlags::CHARACTER_VARIANT); let idents = i.parse_comma_separated(|i| { let location = i.current_source_location(); CustomIdent::from_ident(location, i.expect_ident()?, &[]) })?; alternates.push(VariantAlternates::CharacterVariant(idents.into_boxed_slice())); Ok(()) }, _ => return Err(i.new_custom_error(StyleParseErrorKind::UnspecifiedError)), } }), _ => Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError)), } }) {} if parsed_alternates.is_empty() { return Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError)); } Ok(FontVariantAlternates::Value(VariantAlternatesList( alternates.into_boxed_slice(), ))) } } macro_rules! impl_variant_east_asian { { $( $(#[$($meta:tt)+])* $ident:ident / $css:expr => $gecko:ident = $value:expr, )+ } => { bitflags! { #[derive(MallocSizeOf)] /// Vairants for east asian variant pub struct VariantEastAsian: u16 { /// None of the features const NORMAL = 0; $( $(#[$($meta)+])* const $ident = $value; )+ } } impl ToCss for VariantEastAsian { fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result where W: Write, { if self.is_empty() { return dest.write_str("normal"); } let mut writer = SequenceWriter::new(dest, " "); $( if self.intersects(VariantEastAsian::$ident) { writer.raw_item($css)?; } )+ Ok(()) } } /// Asserts that all variant-east-asian matches its NS_FONT_VARIANT_EAST_ASIAN_* value. #[cfg(feature = "gecko")] #[inline] pub fn assert_variant_east_asian_matches() { use gecko_bindings::structs; $( debug_assert_eq!(structs::$gecko as u16, VariantEastAsian::$ident.bits()); )+ } impl SpecifiedValueInfo for VariantEastAsian { fn collect_completion_keywords(f: KeywordsCollectFn) { f(&["normal", $($css,)+]); } } } } impl_variant_east_asian! { /// Enables rendering of JIS78 forms (OpenType feature: jp78) JIS78 / "jis78" => NS_FONT_VARIANT_EAST_ASIAN_JIS78 = 0x01, /// Enables rendering of JIS83 forms (OpenType feature: jp83). JIS83 / "jis83" => NS_FONT_VARIANT_EAST_ASIAN_JIS83 = 0x02, /// Enables rendering of JIS90 forms (OpenType feature: jp90). JIS90 / "jis90" => NS_FONT_VARIANT_EAST_ASIAN_JIS90 = 0x04, /// Enables rendering of JIS2004 forms (OpenType feature: jp04). JIS04 / "jis04" => NS_FONT_VARIANT_EAST_ASIAN_JIS04 = 0x08, /// Enables rendering of simplified forms (OpenType feature: smpl). SIMPLIFIED / "simplified" => NS_FONT_VARIANT_EAST_ASIAN_SIMPLIFIED = 0x10, /// Enables rendering of traditional forms (OpenType feature: trad). TRADITIONAL / "traditional" => NS_FONT_VARIANT_EAST_ASIAN_TRADITIONAL = 0x20, /// Enables rendering of full-width variants (OpenType feature: fwid). FULL_WIDTH / "full-width" => NS_FONT_VARIANT_EAST_ASIAN_FULL_WIDTH = 0x40, /// Enables rendering of proportionally-spaced variants (OpenType feature: pwid). PROPORTIONAL_WIDTH / "proportional-width" => NS_FONT_VARIANT_EAST_ASIAN_PROP_WIDTH = 0x80, /// Enables display of ruby variant glyphs (OpenType feature: ruby). RUBY / "ruby" => NS_FONT_VARIANT_EAST_ASIAN_RUBY = 0x100, } #[cfg(feature = "gecko")] impl VariantEastAsian { /// Obtain a specified value from a Gecko keyword value /// /// Intended for use with presentation attributes, not style structs pub fn from_gecko_keyword(kw: u16) -> Self { Self::from_bits_truncate(kw) } /// Transform into gecko keyword pub fn to_gecko_keyword(self) -> u16 { self.bits() } } #[cfg(feature = "gecko")] impl_gecko_keyword_conversions!(VariantEastAsian, u16); #[cfg_attr(feature = "gecko", derive(MallocSizeOf))] #[derive(Clone, Debug, PartialEq, SpecifiedValueInfo, ToCss)] /// Allows control of glyph substitution and sizing in East Asian text. pub enum FontVariantEastAsian { /// Value variant with `variant-east-asian` Value(VariantEastAsian), /// System font variant #[css(skip)] System(SystemFont), } impl FontVariantEastAsian { #[inline] /// Get default `font-variant-east-asian` with `empty` variant pub fn empty() -> Self { FontVariantEastAsian::Value(VariantEastAsian::empty()) } system_font_methods!(FontVariantEastAsian, font_variant_east_asian); } impl ToComputedValue for FontVariantEastAsian { type ComputedValue = computed::FontVariantEastAsian; fn to_computed_value(&self, context: &Context) -> computed::FontVariantEastAsian { match *self { FontVariantEastAsian::Value(ref v) => v.clone(), FontVariantEastAsian::System(_) => self.compute_system(context), } } fn from_computed_value(other: &computed::FontVariantEastAsian) -> Self { FontVariantEastAsian::Value(other.clone()) } } impl Parse for FontVariantEastAsian { /// normal | [ <east-asian-variant-values> || <east-asian-width-values> || ruby ] /// <east-asian-variant-values> = [ jis78 | jis83 | jis90 | jis04 | simplified | traditional ] /// <east-asian-width-values> = [ full-width | proportional-width ] fn parse<'i, 't>( _context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<FontVariantEastAsian, ParseError<'i>> { let mut result = VariantEastAsian::empty(); if input .try(|input| input.expect_ident_matching("normal")) .is_ok() { return Ok(FontVariantEastAsian::Value(result));<|fim▁hole|> Ok( match_ignore_ascii_case! { &input.expect_ident().map_err(|_| ())?, "jis78" => exclusive_value!((result, VariantEastAsian::JIS78 | VariantEastAsian::JIS83 | VariantEastAsian::JIS90 | VariantEastAsian::JIS04 | VariantEastAsian::SIMPLIFIED | VariantEastAsian::TRADITIONAL ) => VariantEastAsian::JIS78), "jis83" => exclusive_value!((result, VariantEastAsian::JIS78 | VariantEastAsian::JIS83 | VariantEastAsian::JIS90 | VariantEastAsian::JIS04 | VariantEastAsian::SIMPLIFIED | VariantEastAsian::TRADITIONAL ) => VariantEastAsian::JIS83), "jis90" => exclusive_value!((result, VariantEastAsian::JIS78 | VariantEastAsian::JIS83 | VariantEastAsian::JIS90 | VariantEastAsian::JIS04 | VariantEastAsian::SIMPLIFIED | VariantEastAsian::TRADITIONAL ) => VariantEastAsian::JIS90), "jis04" => exclusive_value!((result, VariantEastAsian::JIS78 | VariantEastAsian::JIS83 | VariantEastAsian::JIS90 | VariantEastAsian::JIS04 | VariantEastAsian::SIMPLIFIED | VariantEastAsian::TRADITIONAL ) => VariantEastAsian::JIS04), "simplified" => exclusive_value!((result, VariantEastAsian::JIS78 | VariantEastAsian::JIS83 | VariantEastAsian::JIS90 | VariantEastAsian::JIS04 | VariantEastAsian::SIMPLIFIED | VariantEastAsian::TRADITIONAL ) => VariantEastAsian::SIMPLIFIED), "traditional" => exclusive_value!((result, VariantEastAsian::JIS78 | VariantEastAsian::JIS83 | VariantEastAsian::JIS90 | VariantEastAsian::JIS04 | VariantEastAsian::SIMPLIFIED | VariantEastAsian::TRADITIONAL ) => VariantEastAsian::TRADITIONAL), "full-width" => exclusive_value!((result, VariantEastAsian::FULL_WIDTH | VariantEastAsian::PROPORTIONAL_WIDTH ) => VariantEastAsian::FULL_WIDTH), "proportional-width" => exclusive_value!((result, VariantEastAsian::FULL_WIDTH | VariantEastAsian::PROPORTIONAL_WIDTH ) => VariantEastAsian::PROPORTIONAL_WIDTH), "ruby" => exclusive_value!((result, VariantEastAsian::RUBY) => VariantEastAsian::RUBY), _ => return Err(()), }, ) }) { result.insert(flag); } if !result.is_empty() { Ok(FontVariantEastAsian::Value(result)) } else { Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError)) } } } macro_rules! impl_variant_ligatures { { $( $(#[$($meta:tt)+])* $ident:ident / $css:expr => $gecko:ident = $value:expr, )+ } => { bitflags! { #[derive(MallocSizeOf)] /// Variants of ligatures pub struct VariantLigatures: u16 { /// Specifies that common default features are enabled const NORMAL = 0; $( $(#[$($meta)+])* const $ident = $value; )+ } } impl ToCss for VariantLigatures { fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result where W: Write, { if self.is_empty() { return dest.write_str("normal"); } if self.contains(VariantLigatures::NONE) { return dest.write_str("none"); } let mut writer = SequenceWriter::new(dest, " "); $( if self.intersects(VariantLigatures::$ident) { writer.raw_item($css)?; } )+ Ok(()) } } /// Asserts that all variant-east-asian matches its NS_FONT_VARIANT_EAST_ASIAN_* value. #[cfg(feature = "gecko")] #[inline] pub fn assert_variant_ligatures_matches() { use gecko_bindings::structs; $( debug_assert_eq!(structs::$gecko as u16, VariantLigatures::$ident.bits()); )+ } impl SpecifiedValueInfo for VariantLigatures { fn collect_completion_keywords(f: KeywordsCollectFn) { f(&["normal", $($css,)+]); } } } } impl_variant_ligatures! { /// Specifies that all types of ligatures and contextual forms /// covered by this property are explicitly disabled NONE / "none" => NS_FONT_VARIANT_LIGATURES_NONE = 0x01, /// Enables display of common ligatures COMMON_LIGATURES / "common-ligatures" => NS_FONT_VARIANT_LIGATURES_COMMON = 0x02, /// Disables display of common ligatures NO_COMMON_LIGATURES / "no-common-ligatures" => NS_FONT_VARIANT_LIGATURES_NO_COMMON = 0x04, /// Enables display of discretionary ligatures DISCRETIONARY_LIGATURES / "discretionary-ligatures" => NS_FONT_VARIANT_LIGATURES_DISCRETIONARY = 0x08, /// Disables display of discretionary ligatures NO_DISCRETIONARY_LIGATURES / "no-discretionary-ligatures" => NS_FONT_VARIANT_LIGATURES_NO_DISCRETIONARY = 0x10, /// Enables display of historical ligatures HISTORICAL_LIGATURES / "historical-ligatures" => NS_FONT_VARIANT_LIGATURES_HISTORICAL = 0x20, /// Disables display of historical ligatures NO_HISTORICAL_LIGATURES / "no-historical-ligatures" => NS_FONT_VARIANT_LIGATURES_NO_HISTORICAL = 0x40, /// Enables display of contextual alternates CONTEXTUAL / "contextual" => NS_FONT_VARIANT_LIGATURES_CONTEXTUAL = 0x80, /// Disables display of contextual alternates NO_CONTEXTUAL / "no-contextual" => NS_FONT_VARIANT_LIGATURES_NO_CONTEXTUAL = 0x100, } #[cfg(feature = "gecko")] impl VariantLigatures { /// Obtain a specified value from a Gecko keyword value /// /// Intended for use with presentation attributes, not style structs pub fn from_gecko_keyword(kw: u16) -> Self { Self::from_bits_truncate(kw) } /// Transform into gecko keyword pub fn to_gecko_keyword(self) -> u16 { self.bits() } } #[cfg(feature = "gecko")] impl_gecko_keyword_conversions!(VariantLigatures, u16); #[cfg_attr(feature = "gecko", derive(MallocSizeOf))] #[derive(Clone, Debug, PartialEq, SpecifiedValueInfo, ToCss)] /// Ligatures and contextual forms are ways of combining glyphs /// to produce more harmonized forms pub enum FontVariantLigatures { /// Value variant with `variant-ligatures` Value(VariantLigatures), /// System font variant #[css(skip)] System(SystemFont), } impl FontVariantLigatures { system_font_methods!(FontVariantLigatures, font_variant_ligatures); /// Default value of `font-variant-ligatures` as `empty` #[inline] pub fn empty() -> FontVariantLigatures { FontVariantLigatures::Value(VariantLigatures::empty()) } #[inline] /// Get `none` variant of `font-variant-ligatures` pub fn none() -> FontVariantLigatures { FontVariantLigatures::Value(VariantLigatures::NONE) } } impl ToComputedValue for FontVariantLigatures { type ComputedValue = computed::FontVariantLigatures; fn to_computed_value(&self, context: &Context) -> computed::FontVariantLigatures { match *self { FontVariantLigatures::Value(ref v) => v.clone(), FontVariantLigatures::System(_) => self.compute_system(context), } } fn from_computed_value(other: &computed::FontVariantLigatures) -> Self { FontVariantLigatures::Value(other.clone()) } } impl Parse for FontVariantLigatures { /// normal | none | /// [ <common-lig-values> || /// <discretionary-lig-values> || /// <historical-lig-values> || /// <contextual-alt-values> ] /// <common-lig-values> = [ common-ligatures | no-common-ligatures ] /// <discretionary-lig-values> = [ discretionary-ligatures | no-discretionary-ligatures ] /// <historical-lig-values> = [ historical-ligatures | no-historical-ligatures ] /// <contextual-alt-values> = [ contextual | no-contextual ] fn parse<'i, 't>( _context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<FontVariantLigatures, ParseError<'i>> { let mut result = VariantLigatures::empty(); if input .try(|input| input.expect_ident_matching("normal")) .is_ok() { return Ok(FontVariantLigatures::Value(result)); } if input .try(|input| input.expect_ident_matching("none")) .is_ok() { return Ok(FontVariantLigatures::Value(VariantLigatures::NONE)); } while let Ok(flag) = input.try(|input| { Ok( match_ignore_ascii_case! { &input.expect_ident().map_err(|_| ())?, "common-ligatures" => exclusive_value!((result, VariantLigatures::COMMON_LIGATURES | VariantLigatures::NO_COMMON_LIGATURES ) => VariantLigatures::COMMON_LIGATURES), "no-common-ligatures" => exclusive_value!((result, VariantLigatures::COMMON_LIGATURES | VariantLigatures::NO_COMMON_LIGATURES ) => VariantLigatures::NO_COMMON_LIGATURES), "discretionary-ligatures" => exclusive_value!((result, VariantLigatures::DISCRETIONARY_LIGATURES | VariantLigatures::NO_DISCRETIONARY_LIGATURES ) => VariantLigatures::DISCRETIONARY_LIGATURES), "no-discretionary-ligatures" => exclusive_value!((result, VariantLigatures::DISCRETIONARY_LIGATURES | VariantLigatures::NO_DISCRETIONARY_LIGATURES ) => VariantLigatures::NO_DISCRETIONARY_LIGATURES), "historical-ligatures" => exclusive_value!((result, VariantLigatures::HISTORICAL_LIGATURES | VariantLigatures::NO_HISTORICAL_LIGATURES ) => VariantLigatures::HISTORICAL_LIGATURES), "no-historical-ligatures" => exclusive_value!((result, VariantLigatures::HISTORICAL_LIGATURES | VariantLigatures::NO_HISTORICAL_LIGATURES ) => VariantLigatures::NO_HISTORICAL_LIGATURES), "contextual" => exclusive_value!((result, VariantLigatures::CONTEXTUAL | VariantLigatures::NO_CONTEXTUAL ) => VariantLigatures::CONTEXTUAL), "no-contextual" => exclusive_value!((result, VariantLigatures::CONTEXTUAL | VariantLigatures::NO_CONTEXTUAL ) => VariantLigatures::NO_CONTEXTUAL), _ => return Err(()), }, ) }) { result.insert(flag); } if !result.is_empty() { Ok(FontVariantLigatures::Value(result)) } else { Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError)) } } } macro_rules! impl_variant_numeric { { $( $(#[$($meta:tt)+])* $ident:ident / $css:expr => $gecko:ident = $value:expr, )+ } => { bitflags! { #[derive(MallocSizeOf)] /// Vairants of numeric values pub struct VariantNumeric: u8 { /// None of other variants are enabled. const NORMAL = 0; $( $(#[$($meta)+])* const $ident = $value; )+ } } impl ToCss for VariantNumeric { fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result where W: Write, { if self.is_empty() { return dest.write_str("normal"); } let mut writer = SequenceWriter::new(dest, " "); $( if self.intersects(VariantNumeric::$ident) { writer.raw_item($css)?; } )+ Ok(()) } } /// Asserts that all variant-east-asian matches its NS_FONT_VARIANT_EAST_ASIAN_* value. #[cfg(feature = "gecko")] #[inline] pub fn assert_variant_numeric_matches() { use gecko_bindings::structs; $( debug_assert_eq!(structs::$gecko as u8, VariantNumeric::$ident.bits()); )+ } impl SpecifiedValueInfo for VariantNumeric { fn collect_completion_keywords(f: KeywordsCollectFn) { f(&["normal", $($css,)+]); } } } } impl_variant_numeric! { /// Enables display of lining numerals. LINING_NUMS / "lining-nums" => NS_FONT_VARIANT_NUMERIC_LINING = 0x01, /// Enables display of old-style numerals. OLDSTYLE_NUMS / "oldstyle-nums" => NS_FONT_VARIANT_NUMERIC_OLDSTYLE = 0x02, /// Enables display of proportional numerals. PROPORTIONAL_NUMS / "proportional-nums" => NS_FONT_VARIANT_NUMERIC_PROPORTIONAL = 0x04, /// Enables display of tabular numerals. TABULAR_NUMS / "tabular-nums" => NS_FONT_VARIANT_NUMERIC_TABULAR = 0x08, /// Enables display of lining diagonal fractions. DIAGONAL_FRACTIONS / "diagonal-fractions" => NS_FONT_VARIANT_NUMERIC_DIAGONAL_FRACTIONS = 0x10, /// Enables display of lining stacked fractions. STACKED_FRACTIONS / "stacked-fractions" => NS_FONT_VARIANT_NUMERIC_STACKED_FRACTIONS = 0x20, /// Enables display of letter forms used with ordinal numbers. ORDINAL / "ordinal" => NS_FONT_VARIANT_NUMERIC_ORDINAL = 0x80, /// Enables display of slashed zeros. SLASHED_ZERO / "slashed-zero" => NS_FONT_VARIANT_NUMERIC_SLASHZERO = 0x40, } #[cfg(feature = "gecko")] impl VariantNumeric { /// Obtain a specified value from a Gecko keyword value /// /// Intended for use with presentation attributes, not style structs pub fn from_gecko_keyword(kw: u8) -> Self { Self::from_bits_truncate(kw) } /// Transform into gecko keyword pub fn to_gecko_keyword(self) -> u8 { self.bits() } } #[cfg(feature = "gecko")] impl_gecko_keyword_conversions!(VariantNumeric, u8); #[cfg_attr(feature = "gecko", derive(MallocSizeOf))] #[derive(Clone, Debug, PartialEq, SpecifiedValueInfo, ToCss)] /// Specifies control over numerical forms. pub enum FontVariantNumeric { /// Value variant with `variant-numeric` Value(VariantNumeric), /// System font #[css(skip)] System(SystemFont), } impl FontVariantNumeric { #[inline] /// Default value of `font-variant-numeric` as `empty` pub fn empty() -> FontVariantNumeric { FontVariantNumeric::Value(VariantNumeric::empty()) } system_font_methods!(FontVariantNumeric, font_variant_numeric); } impl ToComputedValue for FontVariantNumeric { type ComputedValue = computed::FontVariantNumeric; fn to_computed_value(&self, context: &Context) -> computed::FontVariantNumeric { match *self { FontVariantNumeric::Value(ref v) => v.clone(), FontVariantNumeric::System(_) => self.compute_system(context), } } fn from_computed_value(other: &computed::FontVariantNumeric) -> Self { FontVariantNumeric::Value(other.clone()) } } impl Parse for FontVariantNumeric { /// normal | /// [ <numeric-figure-values> || /// <numeric-spacing-values> || /// <numeric-fraction-values> || /// ordinal || /// slashed-zero ] /// <numeric-figure-values> = [ lining-nums | oldstyle-nums ] /// <numeric-spacing-values> = [ proportional-nums | tabular-nums ] /// <numeric-fraction-values> = [ diagonal-fractions | stacked-fractions ] fn parse<'i, 't>( _context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<FontVariantNumeric, ParseError<'i>> { let mut result = VariantNumeric::empty(); if input .try(|input| input.expect_ident_matching("normal")) .is_ok() { return Ok(FontVariantNumeric::Value(result)); } while let Ok(flag) = input.try(|input| { Ok( match_ignore_ascii_case! { &input.expect_ident().map_err(|_| ())?, "ordinal" => exclusive_value!((result, VariantNumeric::ORDINAL) => VariantNumeric::ORDINAL), "slashed-zero" => exclusive_value!((result, VariantNumeric::SLASHED_ZERO) => VariantNumeric::SLASHED_ZERO), "lining-nums" => exclusive_value!((result, VariantNumeric::LINING_NUMS | VariantNumeric::OLDSTYLE_NUMS ) => VariantNumeric::LINING_NUMS), "oldstyle-nums" => exclusive_value!((result, VariantNumeric::LINING_NUMS | VariantNumeric::OLDSTYLE_NUMS ) => VariantNumeric::OLDSTYLE_NUMS), "proportional-nums" => exclusive_value!((result, VariantNumeric::PROPORTIONAL_NUMS | VariantNumeric::TABULAR_NUMS ) => VariantNumeric::PROPORTIONAL_NUMS), "tabular-nums" => exclusive_value!((result, VariantNumeric::PROPORTIONAL_NUMS | VariantNumeric::TABULAR_NUMS ) => VariantNumeric::TABULAR_NUMS), "diagonal-fractions" => exclusive_value!((result, VariantNumeric::DIAGONAL_FRACTIONS | VariantNumeric::STACKED_FRACTIONS ) => VariantNumeric::DIAGONAL_FRACTIONS), "stacked-fractions" => exclusive_value!((result, VariantNumeric::DIAGONAL_FRACTIONS | VariantNumeric::STACKED_FRACTIONS ) => VariantNumeric::STACKED_FRACTIONS), _ => return Err(()), }, ) }) { result.insert(flag); } if !result.is_empty() { Ok(FontVariantNumeric::Value(result)) } else { Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError)) } } } /// This property provides low-level control over OpenType or TrueType font features. pub type SpecifiedFontFeatureSettings = FontSettings<FeatureTagValue<Integer>>; /// Define initial settings that apply when the font defined by an @font-face /// rule is rendered. #[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)] pub enum FontFeatureSettings { /// Value of `FontSettings` Value(SpecifiedFontFeatureSettings), /// System font #[css(skip)] System(SystemFont), } impl FontFeatureSettings { #[inline] /// Get default value of `font-feature-settings` as normal pub fn normal() -> FontFeatureSettings { FontFeatureSettings::Value(FontSettings::normal()) } system_font_methods!(FontFeatureSettings, font_feature_settings); } impl ToComputedValue for FontFeatureSettings { type ComputedValue = computed::FontFeatureSettings; fn to_computed_value(&self, context: &Context) -> computed::FontFeatureSettings { match *self { FontFeatureSettings::Value(ref v) => v.to_computed_value(context), FontFeatureSettings::System(_) => self.compute_system(context), } } fn from_computed_value(other: &computed::FontFeatureSettings) -> Self { FontFeatureSettings::Value(ToComputedValue::from_computed_value(other)) } } impl Parse for FontFeatureSettings { /// normal | <feature-tag-value># fn parse<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<FontFeatureSettings, ParseError<'i>> { SpecifiedFontFeatureSettings::parse(context, input).map(FontFeatureSettings::Value) } } #[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue)] /// Whether user agents are allowed to synthesize bold or oblique font faces /// when a font family lacks bold or italic faces pub struct FontSynthesis { /// If a `font-weight` is requested that the font family does not contain, /// the user agent may synthesize the requested weight from the weights /// that do exist in the font family. #[value_info(represents_keyword)] pub weight: bool, /// If a font-style is requested that the font family does not contain, /// the user agent may synthesize the requested style from the normal face in the font family. #[value_info(represents_keyword)] pub style: bool, } impl FontSynthesis { #[inline] /// Get the default value of font-synthesis pub fn get_initial_value() -> Self { FontSynthesis { weight: true, style: true, } } } impl Parse for FontSynthesis { fn parse<'i, 't>( _: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<FontSynthesis, ParseError<'i>> { let mut result = FontSynthesis { weight: false, style: false, }; try_match_ident_ignore_ascii_case! { input, "none" => Ok(result), "weight" => { result.weight = true; if input.try(|input| input.expect_ident_matching("style")).is_ok() { result.style = true; } Ok(result) }, "style" => { result.style = true; if input.try(|input| input.expect_ident_matching("weight")).is_ok() { result.weight = true; } Ok(result) }, } } } impl ToCss for FontSynthesis { fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result where W: Write, { if self.weight && self.style { dest.write_str("weight style") } else if self.style { dest.write_str("style") } else if self.weight { dest.write_str("weight") } else { dest.write_str("none") } } } #[cfg(feature = "gecko")] impl From<u8> for FontSynthesis { fn from(bits: u8) -> FontSynthesis { use gecko_bindings::structs; FontSynthesis { weight: bits & structs::NS_FONT_SYNTHESIS_WEIGHT as u8 != 0, style: bits & structs::NS_FONT_SYNTHESIS_STYLE as u8 != 0, } } } #[cfg(feature = "gecko")] impl From<FontSynthesis> for u8 { fn from(v: FontSynthesis) -> u8 { use gecko_bindings::structs; let mut bits: u8 = 0; if v.weight { bits |= structs::NS_FONT_SYNTHESIS_WEIGHT as u8; } if v.style { bits |= structs::NS_FONT_SYNTHESIS_STYLE as u8; } bits } } #[derive(Clone, Debug, Eq, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)] /// Allows authors to explicitly specify the language system of the font, /// overriding the language system implied by the content language pub enum FontLanguageOverride { /// When rendering with OpenType fonts, /// the content language of the element is /// used to infer the OpenType language system Normal, /// Single three-letter case-sensitive OpenType language system tag, /// specifies the OpenType language system to be used instead of /// the language system implied by the language of the element Override(Box<str>), /// Use system font #[css(skip)] System(SystemFont), } impl FontLanguageOverride { #[inline] /// Get default value with `normal` pub fn normal() -> FontLanguageOverride { FontLanguageOverride::Normal } system_font_methods!(FontLanguageOverride, font_language_override); } impl ToComputedValue for FontLanguageOverride { type ComputedValue = computed::FontLanguageOverride; #[inline] fn to_computed_value(&self, context: &Context) -> computed::FontLanguageOverride { match *self { FontLanguageOverride::Normal => computed::FontLanguageOverride(0), FontLanguageOverride::Override(ref lang) => { if lang.is_empty() || lang.len() > 4 || !lang.is_ascii() { return computed::FontLanguageOverride(0); } let mut computed_lang = lang.to_string(); while computed_lang.len() < 4 { computed_lang.push(' '); } let bytes = computed_lang.into_bytes(); computed::FontLanguageOverride(BigEndian::read_u32(&bytes)) }, FontLanguageOverride::System(_) => self.compute_system(context), } } #[inline] fn from_computed_value(computed: &computed::FontLanguageOverride) -> Self { if computed.0 == 0 { return FontLanguageOverride::Normal; } let mut buf = [0; 4]; BigEndian::write_u32(&mut buf, computed.0); FontLanguageOverride::Override( if cfg!(debug_assertions) { String::from_utf8(buf.to_vec()).unwrap() } else { unsafe { String::from_utf8_unchecked(buf.to_vec()) } }.into_boxed_str(), ) } } impl Parse for FontLanguageOverride { /// normal | <string> fn parse<'i, 't>( _: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<FontLanguageOverride, ParseError<'i>> { if input .try(|input| input.expect_ident_matching("normal")) .is_ok() { return Ok(FontLanguageOverride::Normal); } let string = input.expect_string()?; Ok(FontLanguageOverride::Override( string.as_ref().to_owned().into_boxed_str(), )) } } /// This property provides low-level control over OpenType or TrueType font /// variations. pub type SpecifiedFontVariationSettings = FontSettings<VariationValue<Number>>; /// Define initial settings that apply when the font defined by an @font-face /// rule is rendered. #[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)] pub enum FontVariationSettings { /// Value of `FontSettings` Value(SpecifiedFontVariationSettings), /// System font #[css(skip)] System(SystemFont), } impl FontVariationSettings { #[inline] /// Get default value of `font-variation-settings` as normal pub fn normal() -> FontVariationSettings { FontVariationSettings::Value(FontSettings::normal()) } system_font_methods!(FontVariationSettings, font_variation_settings); } impl ToComputedValue for FontVariationSettings { type ComputedValue = computed::FontVariationSettings; fn to_computed_value(&self, context: &Context) -> computed::FontVariationSettings { match *self { FontVariationSettings::Value(ref v) => v.to_computed_value(context), FontVariationSettings::System(_) => self.compute_system(context), } } fn from_computed_value(other: &computed::FontVariationSettings) -> Self { FontVariationSettings::Value(ToComputedValue::from_computed_value(other)) } } impl Parse for FontVariationSettings { /// normal | <variation-tag-value># fn parse<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<FontVariationSettings, ParseError<'i>> { SpecifiedFontVariationSettings::parse(context, input).map(FontVariationSettings::Value) } } fn parse_one_feature_value<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<Integer, ParseError<'i>> { if let Ok(integer) = input.try(|i| Integer::parse_non_negative(context, i)) { return Ok(integer); } try_match_ident_ignore_ascii_case! { input, "on" => Ok(Integer::new(1)), "off" => Ok(Integer::new(0)), } } impl Parse for FeatureTagValue<Integer> { /// https://drafts.csswg.org/css-fonts-4/#feature-tag-value fn parse<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<Self, ParseError<'i>> { let tag = FontTag::parse(context, input)?; let value = input .try(|i| parse_one_feature_value(context, i)) .unwrap_or_else(|_| Integer::new(1)); Ok(Self { tag, value }) } } impl Parse for VariationValue<Number> { /// This is the `<string> <number>` part of the font-variation-settings /// syntax. fn parse<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<Self, ParseError<'i>> { let tag = FontTag::parse(context, input)?; let value = Number::parse(context, input)?; Ok(Self { tag, value }) } } #[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss)] /// text-zoom. Enable if true, disable if false pub struct XTextZoom(#[css(skip)] pub bool); impl Parse for XTextZoom { fn parse<'i, 't>( _: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<XTextZoom, ParseError<'i>> { debug_assert!( false, "Should be set directly by presentation attributes only." ); Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError)) } } #[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss)] /// Internal property that reflects the lang attribute pub struct XLang(#[css(skip)] pub Atom); impl XLang { #[inline] /// Get default value for `-x-lang` pub fn get_initial_value() -> XLang { XLang(atom!("")) } } impl Parse for XLang { fn parse<'i, 't>( _: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<XLang, ParseError<'i>> { debug_assert!( false, "Should be set directly by presentation attributes only." ); Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError)) } } #[cfg_attr(feature = "gecko", derive(MallocSizeOf))] #[derive(Clone, Copy, Debug, PartialEq, SpecifiedValueInfo, ToCss)] /// Specifies the minimum font size allowed due to changes in scriptlevel. /// Ref: https://wiki.mozilla.org/MathML:mstyle pub struct MozScriptMinSize(pub NoCalcLength); impl MozScriptMinSize { #[inline] /// Calculate initial value of -moz-script-min-size. pub fn get_initial_value() -> Length { Length::new(DEFAULT_SCRIPT_MIN_SIZE_PT as f32 * (AU_PER_PT / AU_PER_PX)) } } impl Parse for MozScriptMinSize { fn parse<'i, 't>( _: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<MozScriptMinSize, ParseError<'i>> { debug_assert!( false, "Should be set directly by presentation attributes only." ); Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError)) } } #[cfg_attr(feature = "gecko", derive(MallocSizeOf))] #[derive(Clone, Copy, Debug, PartialEq, SpecifiedValueInfo, ToCss)] /// Changes the scriptlevel in effect for the children. /// Ref: https://wiki.mozilla.org/MathML:mstyle /// /// The main effect of scriptlevel is to control the font size. /// https://www.w3.org/TR/MathML3/chapter3.html#presm.scriptlevel pub enum MozScriptLevel { /// Change `font-size` relatively. Relative(i32), /// Change `font-size` absolutely. /// /// Should only be serialized by presentation attributes, so even though /// serialization for this would look the same as for the `Relative` /// variant, it is unexposed, so no big deal. #[css(function)] MozAbsolute(i32), /// Change `font-size` automatically. Auto, } impl Parse for MozScriptLevel { fn parse<'i, 't>( _: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<MozScriptLevel, ParseError<'i>> { // We don't bother to handle calc here. if let Ok(i) = input.try(|i| i.expect_integer()) { return Ok(MozScriptLevel::Relative(i)); } input.expect_ident_matching("auto")?; Ok(MozScriptLevel::Auto) } } #[cfg_attr(feature = "gecko", derive(MallocSizeOf))] #[derive(Clone, Copy, Debug, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss)] /// Specifies the multiplier to be used to adjust font size /// due to changes in scriptlevel. /// /// Ref: https://www.w3.org/TR/MathML3/chapter3.html#presm.mstyle.attrs pub struct MozScriptSizeMultiplier(pub f32); impl MozScriptSizeMultiplier { #[inline] /// Get default value of `-moz-script-size-multiplier` pub fn get_initial_value() -> MozScriptSizeMultiplier { MozScriptSizeMultiplier(DEFAULT_SCRIPT_SIZE_MULTIPLIER as f32) } } impl Parse for MozScriptSizeMultiplier { fn parse<'i, 't>( _: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<MozScriptSizeMultiplier, ParseError<'i>> { debug_assert!( false, "Should be set directly by presentation attributes only." ); Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError)) } } impl From<f32> for MozScriptSizeMultiplier { fn from(v: f32) -> Self { MozScriptSizeMultiplier(v) } } impl From<MozScriptSizeMultiplier> for f32 { fn from(v: MozScriptSizeMultiplier) -> f32 { v.0 } }<|fim▁end|>
} while let Ok(flag) = input.try(|input| {
<|file_name|>doc.go<|end_file_name|><|fim▁begin|>/* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License");<|fim▁hole|> http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Docker integration using pkg/kubelet/api/v1alpha1/runtime/v1.pb.go. package containerdshim<|fim▁end|>
you may not use this file except in compliance with the License. You may obtain a copy of the License at
<|file_name|>sf_times.py<|end_file_name|><|fim▁begin|>from __future__ import division from libtbx.test_utils import approx_equal from libtbx.utils import Usage from libtbx import easy_run import libtbx.load_env import platform import time import sys, os op = os.path __this_script__ = "cctbx_project/fable/test/sf_times.py" # based on cctbx_project/compcomm/newsletter09/sf_times.py setup_dir = "/net/cci/setup/Linux" ifort_versions = ["intel121.sh", "intel111.sh", "ifort91.sh"] icc_versions = [ "intel121.sh", "intel111.sh", "icc101.sh", "icc91.sh"] gcc_versions = [ "gcc-4.6.1_fc8.sh", "gcc-4.5.3_fc8.sh", "gcc-4.4.6_fc8.sh", "gcc-4.3.6_fc8.sh", "gcc-4.2.4_fc8.sh"] fortran_template = r"""C %(this_script)s subroutine cos_wrapper(result, arg) REAL result REAL arg result = COS(arg) return end subroutine exp_wrapper(result, arg) REAL result REAL arg result = EXP(arg) return end subroutine sf(abcss, n_scatt, xyz, b_iso, n_refl, hkl, f_calc) implicit none REAL abcss(3) integer n_scatt REAL xyz(3, *) REAL b_iso(*) integer n_refl integer hkl(3, *) REAL f_calc(2, *) integer i_refl, i_scatt, j, h REAL phi, cphi, sphi, dss, ldw, dw, a, b DO i_refl=1,n_refl a = 0 b = 0 DO i_scatt=1,n_scatt phi = 0 DO j=1,3 phi = phi + hkl(j,i_refl) * xyz(j,i_scatt) enddo phi = phi * 2 * 3.1415926535897931 call cos_wrapper(cphi, phi) call cos_wrapper(sphi, phi - 3.1415926535897931*0.5) dss = 0 DO j=1,3 h = hkl(j,i_refl) dss = dss + h*h * abcss(j) enddo ldw = -0.25 * dss * b_iso(i_scatt) call exp_wrapper(dw, ldw) a = a + dw * cphi b = b + dw * sphi enddo f_calc(1, i_refl) = a f_calc(2, i_refl) = b enddo return end program run implicit none REAL abcss(3) integer n_scatt parameter(n_scatt=%(n_scatt)s) REAL xyz(3, n_scatt) REAL b_iso(n_scatt) integer n_refl parameter(n_refl=%(n_refl)s) integer hkl(3, n_refl) REAL f_calc(2, n_refl) integer i, j, jr REAL a, b, max_a, max_b abcss(1) = 1/(11.0*11.0) abcss(2) = 1/(12.0*12.0) abcss(3) = 1/(13.0*13.0) jr = 0 DO i=1,n_scatt DO j=1,3 jr = mod(jr*1366+150889, 714025) xyz(j,i) = (mod(jr, 20000) - 10000) / 10000.0 enddo enddo DO i=1,n_scatt jr = mod(jr*1366+150889, 714025) b_iso(i) = mod(jr, 10000) / 100.0 enddo if (n_scatt .le. 10) then DO i=1,n_scatt write(6, '(4(1x,f9.6))') & xyz(1,i), xyz(2,i), xyz(3, i), b_iso(i) enddo endif DO i=1,n_refl DO j=1,3 jr = mod(jr*1366+150889, 714025) hkl(j,i) = mod(jr, 10) - 5 enddo enddo call sf(abcss, n_scatt, xyz, b_iso, n_refl, hkl, f_calc) if (n_refl .le. 100) then DO i=1,n_refl write(6, '(3(1x,i3),1x,f12.6,1x,f12.6)') & hkl(1,i), hkl(2,i), hkl(3,i), & f_calc(1,i), f_calc(2,i) enddo else max_a = 0 max_b = 0 DO i=1,n_refl a = f_calc(1,i) b = f_calc(2,i) if (max_a .lt. a) max_a = a if (max_b .lt. b) max_b = b enddo write(6, '(2(1x,f12.6))') max_a, max_b endif end """ def compare_with_cctbx_structure_factors(n_scatt, n_refl, output_lines): from cctbx import xray from cctbx import miller from cctbx import crystal from cctbx.array_family import flex crystal_symmetry = crystal.symmetry( unit_cell=(11,12,13,90,90,90), space_group_symbol="P1") scatterers = flex.xray_scatterer() miller_indices = flex.miller_index() f_calc = flex.complex_double() for line in output_lines: flds = line.split() assert len(flds) in [4,5] if (len(flds) == 4): x,y,z,b_iso = [float(s) for s in flds] scatterers.append( xray.scatterer(site=(x,y,z), b=b_iso, scattering_type="const")) else: miller_indices.append([int(s) for s in flds[:3]]) f_calc.append(complex(float(flds[3]), float(flds[4]))) assert scatterers.size() == n_scatt assert miller_indices.size() == n_refl xs = xray.structure( crystal_symmetry=crystal_symmetry, scatterers=scatterers) fc = miller_array = miller.set( crystal_symmetry=crystal_symmetry, indices=miller_indices, anomalous_flag=False).array(data=f_calc) fc2 = fc.structure_factors_from_scatterers( xray_structure=xs, algorithm="direct", cos_sin_table=False).f_calc() for f1,f2 in zip(fc.data(), fc2.data()): assert approx_equal(f1, f2, eps=1e-5) def build_run( setup_cmd, ld_preload_flag, n_scatt, n_refl, build_cmd, check_max_a_b): if (op.isfile("a.out")): os.remove("a.out") assert not op.isfile("a.out") print build_cmd buffers = easy_run.fully_buffered(command=build_cmd) msg = buffers.format_errors_if_any() if (msg is not None): if (0): print build_cmd print print msg print STOP() return None assert op.isfile("a.out") run_cmd = setup_cmd if (ld_preload_flag): run_cmd += 'env LD_PRELOAD='\ '"/net/marbles/raid1/rwgk/dist/opt_resources/linux64/libimf.so:"'\ '"/net/marbles/raid1/rwgk/dist/opt_resources/linux64/libirc.so" ' utimes = [] run_cmd += '/usr/bin/time -p ./a.out' def run_once(): buffers = easy_run.fully_buffered(command=run_cmd) if (len(buffers.stderr_lines) != 3): print "v"*79 print "\n".join(buffers.stderr_lines) print "^"*79 raise RuntimeError( "Unexpected number of output lines" " (3 expected; acutal output see above).") if (n_scatt == 0): pass elif (n_scatt <= 10 and n_refl <= 100): assert len(buffers.stdout_lines) == n_scatt + n_refl else: assert len(buffers.stdout_lines) == 1 max_a, max_b = [float(s) for s in buffers.stdout_lines[0].split()] if (check_max_a_b): if (n_scatt == 2000 and n_refl == 20000): assert approx_equal(max_a, 35.047157, eps=1e-4) assert approx_equal(max_b, 25.212738, eps=1e-4) elif (n_scatt == 100 and n_refl == 1000): assert approx_equal(max_a, 4.493645, eps=1e-4) assert approx_equal(max_b, 10.515532, eps=1e-4) elif (n_scatt <= 10 and n_refl <= 100): if (libtbx.env.has_module(name="cctbx")): compare_with_cctbx_structure_factors( n_scatt=n_scatt, n_refl=n_refl, output_lines=buffers.stdout_lines) else: raise RuntimeError, (max_a, max_b) utime = float(buffers.stderr_lines[1].split()[1]) utimes.append(utime) print "sample utime: %.2f" % utime sys.stdout.flush() for _ in xrange(8): run_once() return min(utimes) def finalize_cpp_build_cmd(source_cpp): from fable import simple_compilation comp_env = simple_compilation.environment() return comp_env.assemble_include_search_paths(no_quotes=False) \ + " " + source_cpp def write_build_run( setup_cmd, ld_preload_flag, n_scatt, n_refl, real, lang, build_cmd, replace_cos, replace_exp): this_script = __this_script__ for_txt = fortran_template % vars() if (replace_cos): for_txt = for_txt.replace( "COS(arg)", "arg / (abs(arg)+1.0)") if (replace_exp): for_txt = for_txt.replace( "EXP(arg)", "max(0.0, 1.0 - arg*arg)") for_txt = for_txt.replace("REAL", real) open("tmp.f", "w").write(for_txt) from fable import cout cpp_txt = cout.process( file_names=["tmp.f"], namespace="sf_test", fem_do_safe=False, inline_all=True) open("tmp.cpp", "w").write("\n".join(cpp_txt)+"\n") if (lang.lower() == "f"): build_cmd += " tmp.f" elif (lang.lower() == "c"): build_cmd += finalize_cpp_build_cmd("tmp.cpp") else: raise RuntimeError('Unknown lang: "%s"' % lang) return build_run( setup_cmd=setup_cmd, ld_preload_flag=ld_preload_flag, n_scatt=n_scatt, n_refl=n_refl, build_cmd=build_cmd, check_max_a_b=(not (replace_cos or replace_exp))) def run_combinations( compiler_versions, all_utimes, n_scatt, n_refl, compiler_build_opts_list, real_list): for lang,setup_sh_list,compiler,build_opts in compiler_build_opts_list: for setup_sh in setup_sh_list: if (setup_sh is None): setup_cmd = "" else: setup_cmd = ". %s/%s; " % (setup_dir, setup_sh) compiler_version = easy_run.fully_buffered( command=setup_cmd+compiler+" --version", join_stdout_stderr=True).stdout_lines[0] if (lang in ["f", "c"]): ld_preload_flags = [False, True] else: ld_preload_flags = [False] for ld_preload_flag in ld_preload_flags: iml = ["", " Intel Math Lib"][int(ld_preload_flag)] compiler_versions.append(compiler_version + iml) build_cmd = " ".join([setup_cmd+compiler, build_opts]) print build_cmd utimes = [] if (n_scatt != 0): for real in real_list: print " %s" % real for replace_cos in [False, True]: print " replace_cos", replace_cos for replace_exp in [False, True]: print " replace_exp", replace_exp sys.stdout.flush() if (compiler_version != "n/a"): utime = write_build_run( setup_cmd=setup_cmd, ld_preload_flag=ld_preload_flag, n_scatt=n_scatt, n_refl=n_refl, real=real, lang=lang, build_cmd=build_cmd, replace_cos=replace_cos, replace_exp=replace_exp) if (utime is not None): print " %4.2f" % utime else: utime = -1.0 print " err" else: utime = -1.0 print " n/a" utimes.append(utime) sys.stdout.flush() else: if (lang.lower() == "f"): f_source = libtbx.env.find_in_repositories( relative_path="lapack_fem/dsyev_test.f", test=op.isfile, optional=False) build_cmd_compl = build_cmd + " " + f_source else: cpp_source = libtbx.env.find_in_repositories( relative_path="lapack_fem/dsyev_test.cpp", test=op.isfile, optional=False) build_cmd_compl = build_cmd + finalize_cpp_build_cmd(cpp_source) utime = build_run( setup_cmd=setup_cmd, ld_preload_flag=ld_preload_flag, n_scatt=n_scatt, n_refl=n_refl, build_cmd=build_cmd_compl, check_max_a_b=False) if (utime is None): print "err" utime = -1.0 else: print "min utime: %.2f" % utime sys.stdout.flush() utimes.append(utime) all_utimes.append((utimes, build_cmd + iml)) def usage(): raise Usage("fable.python sf_times.py unit_test|quick|production") def run(args): if (len(args) != 1): usage() t_start = time.time() build_platform = platform.platform() build_node = platform.node()<|fim▁hole|> n_scatt, n_refl = 10, 100 elif (args[0] == "quick"): n_scatt, n_refl = 100, 1000 elif (args[0] == "production"): n_scatt, n_refl = 2000, 20000 elif (args[0] == "dsyev"): n_scatt, n_refl = 0, 0 else: usage() gcc_sh = gcc_versions + [None] icc_sh = icc_versions if (args[0] == "quick"): gcc_sh = gcc_sh[:2] icc_sh = icc_sh[:1] all_utimes = [] run_combinations( compiler_versions, all_utimes, n_scatt=n_scatt, n_refl=n_refl, compiler_build_opts_list=[ ("F", ifort_versions, "ifort", "-O"), ("f", gcc_sh, "gfortran", "-O3 -ffast-math"), ("f", gcc_sh, "gfortran", "-O3 -ffast-math -march=native"), ("C", icc_sh, "icpc", "-O"), ("c", gcc_sh, "g++", "-O3 -ffast-math"), ("c", gcc_sh, "g++", "-O3 -ffast-math -march=native"), ("c", [None], "clang++", "-O3 -U__GXX_WEAK__ -Wno-logical-op-parentheses -ffast-math"), ("c", [None], "clang++", "-O3 -U__GXX_WEAK__ -Wno-logical-op-parentheses -ffast-math" " -march=native")], real_list=["real*4", "real*8"]) print print "current_platform:", platform.platform() print "current_node:", platform.node() print "build_platform:", build_platform print "build_node:", build_node for compiler_version in compiler_versions: print "compiler:", compiler_version if (n_scatt != 0): print "n_scatt * n_refl: %d * %d" % (n_scatt, n_refl) print '''\ "s" or "d": single-precision or double-precision floating-point variables "E" or "e": using the library exp(arg) function or "max(0.0, 1.0 - arg*arg)" "C" or "c": using the library cos(arg) function or "arg / (abs(arg)+1.0)"''' print " sEC seC sEc sec dEC deC dEc dec" else: print "dsyev times:" useful_utimes = [] for utimes,build_cmd in all_utimes: if (max(utimes) != -1.0): print " ".join(["%6.2f" % u for u in utimes]), build_cmd useful_utimes.append((utimes,build_cmd)) if (len(useful_utimes) > 1): print "Relative to first:" for utimes,build_cmd in useful_utimes: print " ".join(["%6.2f" % (u/max(u0,0.01)) for u,u0 in zip(utimes,useful_utimes[0][0])]), build_cmd print "Wall clock time: %.2f s" % (time.time()-t_start) if (__name__ == "__main__"): run(args=sys.argv[1:])<|fim▁end|>
compiler_versions = [] if (args[0] == "unit_test"):
<|file_name|>wikipages.rs<|end_file_name|><|fim▁begin|>use structs::common::{Taiga}; pub struct WikiPagesProxy<'a> { pub taiga_client: &'a Taiga, pub project_id: Option<i64>, } pub struct WikiPageProxy<'a> { pub taiga_client: &'a Taiga, pub wikipage_id: i64, } #[derive(Serialize, Deserialize, Debug)]<|fim▁hole|> pub content: String, pub is_watcher: bool, pub total_watchers: i64, pub editions: i64, pub version: i64, pub project: i64, pub owner: Option<i64>, pub last_modifier: Option<i64>, pub created_date: String, pub modified_date: String, } #[derive(Serialize, Deserialize, Debug)] pub struct WikiPageDetail { pub id: i64, pub slug: String, pub html: String, pub content: String, pub is_watcher: bool, pub total_watchers: i64, pub editions: i64, pub version: i64, pub project: i64, pub owner: Option<i64>, pub last_modifier: Option<i64>, pub created_date: String, pub modified_date: String, }<|fim▁end|>
pub struct WikiPageListItem { pub id: i64, pub slug: String, pub html: String,
<|file_name|>views.go<|end_file_name|><|fim▁begin|>package user import ( "errors" "github.com/ungerik/go-start/view" ) // The confirmation code will be passed in the GET parameter "code" func EmailConfirmationView(profileURL view.URL) view.View { return view.DynamicView( func(ctx *view.Context) (view.View, error) { confirmationCode, ok := ctx.Request.Params["code"] if !ok {<|fim▁hole|> } userID, email, confirmed, err := ConfirmEmail(confirmationCode) if !confirmed { return view.DIV("error", view.HTML("Invalid email confirmation code!")), err } LoginID(ctx.Session, userID) return view.Views{ view.DIV("success", view.Printf("Email address %s confirmed!", email)), &view.If{ Condition: profileURL != nil, Content: view.P( view.HTML("Continue to your "), view.A(profileURL, "profile..."), ), }, }, nil }, ) } func NewLoginForm(buttonText, class, errorMessageClass, successMessageClass string, redirectURL view.URL) view.View { return view.DynamicView( func(ctx *view.Context) (v view.View, err error) { if from, ok := ctx.Request.Params["from"]; ok { redirectURL = view.StringURL(from) } model := &LoginFormModel{} if email, ok := ctx.Request.Params["email"]; ok { model.Email.Set(email) } form := &view.Form{ Class: class, ErrorMessageClass: errorMessageClass, SuccessMessageClass: successMessageClass, SuccessMessage: "Login successful", SubmitButtonText: buttonText, FormID: "gostart_user_login", GetModel: view.FormModel(model), OnSubmit: func(form *view.Form, formModel interface{}, ctx *view.Context) (string, view.URL, error) { m := formModel.(*LoginFormModel) ok, err := LoginEmailPassword(ctx.Session, m.Email.Get(), m.Password.Get()) if err != nil { if view.Config.Debug.Mode { return "", nil, err } else { return "", nil, errors.New("An internal error ocoured") } } if !ok { return "", nil, errors.New("Wrong email and password combination") } return "", redirectURL, nil }, } return form, nil }, ) } // If redirect is nil, the redirect will go to "/" func LogoutView(redirect view.URL) view.View { return view.RenderView( func(ctx *view.Context) (err error) { Logout(ctx.Session) if redirect != nil { return view.Redirect(redirect.URL(ctx)) } return view.Redirect("/") }, ) } // confirmationPage must have the confirmation code as first URL parameter func NewSignupForm(buttonText, class, errorMessageClass, successMessageClass string, confirmationURL, redirectURL view.URL) *view.Form { return &view.Form{ Class: class, ErrorMessageClass: errorMessageClass, SuccessMessageClass: successMessageClass, SuccessMessage: Config.ConfirmationMessage.Sent, SubmitButtonText: buttonText, FormID: "gostart_user_signup", GetModel: func(form *view.Form, ctx *view.Context) (interface{}, error) { return &EmailPasswordFormModel{}, nil }, OnSubmit: func(form *view.Form, formModel interface{}, ctx *view.Context) (string, view.URL, error) { m := formModel.(*EmailPasswordFormModel) email := m.Email.Get() password := m.Password1.Get() var user User found, err := WithEmail(email, &user) if err != nil { return "", nil, err } if found { if user.EmailPasswordConfirmed() { return "", nil, errors.New("A user with that email and a password already exists") } user.Password.SetHashed(password) } else { // Config.Collection.InitDocument(&user) err = user.SetEmailPassword(email, password) if err != nil { return "", nil, err } } err = <-user.Email[0].SendConfirmationEmail(ctx, confirmationURL) if err != nil { return "", nil, err } if found { err = Config.Collection.UpdateSubDocumentWithID(user.ID, "", &user) } else { err = Config.Collection.InitAndSaveDocument(&user) } return "", redirectURL, err }, } }<|fim▁end|>
return view.DIV("error", view.HTML("Invalid email confirmation code!")), nil
<|file_name|>iDibo_tests.py<|end_file_name|><|fim▁begin|>from nose.tools import * import iDibo def setup(): print "SETUP!" def teardown(): print "TEAR DOWN!"<|fim▁hole|> print "I RAN!"<|fim▁end|>
def test_basic():
<|file_name|>musplayer_qt.cpp<|end_file_name|><|fim▁begin|>#ifndef MUSPLAY_USE_WINAPI #include <QtDebug> #include <QFileDialog> #include <QMessageBox> #include <QSlider> #include <QSettings> #include <QMenu> #include <QDesktopServices> #include <QUrl> #include "ui_mainwindow.h" #include "musplayer_qt.h" #include "../Player/mus_player.h" #include "../AssocFiles/assoc_files.h" #include "../Effects/reverb.h" #include <math.h> #include "../version.h" MusPlayer_Qt::MusPlayer_Qt(QWidget *parent) : QMainWindow(parent), MusPlayerBase(), ui(new Ui::MainWindow) { ui->setupUi(this); PGE_MusicPlayer::setMainWindow(this); #ifdef Q_OS_MAC this->setWindowIcon(QIcon(":/cat_musplay.icns")); #endif #ifdef Q_OS_WIN this->setWindowIcon(QIcon(":/cat_musplay.ico")); #endif ui->fmbank->clear(); int totalBakns = MIX_ADLMIDI_getTotalBanks(); const char *const *names = MIX_ADLMIDI_getBankNames(); for(int i = 0; i < totalBakns; i++) ui->fmbank->addItem(QString("%1 = %2").arg(i).arg(names[i])); ui->centralWidget->window()->setWindowFlags( Qt::WindowTitleHint | Qt::WindowSystemMenuHint | Qt::WindowCloseButtonHint | Qt::WindowMinimizeButtonHint | Qt::MSWindowsFixedSizeDialogHint); connect(&m_blinker, SIGNAL(timeout()), this, SLOT(_blink_red())); connect(this, SIGNAL(customContextMenuRequested(QPoint)), this, SLOT(contextMenu(QPoint))); connect(ui->volume, &QSlider::valueChanged, [this](int x) { on_volume_valueChanged(x); }); connect(ui->fmbank, static_cast<void(QComboBox::*)(int)>(&QComboBox::currentIndexChanged), this, [this](int x) { on_fmbank_currentIndexChanged(x); }); connect(ui->volumeModel, static_cast<void(QComboBox::*)(int)>(&QComboBox::currentIndexChanged), this, [this](int x) { on_volumeModel_currentIndexChanged(x); }); connect(ui->tremolo, &QCheckBox::clicked, this, [this](int x) { on_tremolo_clicked(x); }); connect(ui->vibrato, &QCheckBox::clicked, this, [this](int x) { on_vibrato_clicked(x); }); connect(ui->modulation, &QCheckBox::clicked, this, [this](int x) { on_modulation_clicked(x); }); connect(ui->adlibMode, &QCheckBox::clicked, this, [this](int x) { on_adlibMode_clicked(x); }); connect(ui->logVolumes, &QCheckBox::clicked, this, [this](int x) { on_logVolumes_clicked(x); }); connect(ui->playListPush, &QPushButton::clicked, this, &MusPlayer_Qt::playList_pushCurrent); connect(ui->playListPop, &QPushButton::clicked, this, &MusPlayer_Qt::playList_popCurrent); QApplication::setOrganizationName(_COMPANY); QApplication::setOrganizationDomain(_PGE_URL); QApplication::setApplicationName("PGE Music Player"); ui->playList->setModel(&playList); ui->playList->setVisible(false); ui->playListPush->setVisible(false); ui->playListPop->setVisible(false); ui->sfx_testing->setVisible(false); QSettings setup; restoreGeometry(setup.value("Window-Geometry").toByteArray()); ui->mididevice->setCurrentIndex(setup.value("MIDI-Device", 0).toInt()); ui->opnmidi_extra->setVisible(ui->mididevice->currentIndex() == 3); ui->adlmidi_xtra->setVisible(ui->mididevice->currentIndex() == 0); switch(ui->mididevice->currentIndex()) { case 0: MIX_SetMidiDevice(MIDI_ADLMIDI); break; case 1: MIX_SetMidiDevice(MIDI_Timidity); break; case 2: MIX_SetMidiDevice(MIDI_Native); break; case 3: MIX_SetMidiDevice(MIDI_OPNMIDI); break; case 4: MIX_SetMidiDevice(MIDI_Fluidsynth); break; default: MIX_SetMidiDevice(MIDI_ADLMIDI); break; } ui->fmbank->setCurrentIndex(setup.value("ADLMIDI-Bank-ID", 58).toInt()); MIX_ADLMIDI_setBankID(ui->fmbank->currentIndex()); ui->volumeModel->setCurrentIndex(setup.value("ADLMIDI-VolumeModel", 0).toInt()); MIX_ADLMIDI_setVolumeModel(ui->volumeModel->currentIndex()); ui->tremolo->setChecked(setup.value("ADLMIDI-Tremolo", true).toBool()); MIX_ADLMIDI_setTremolo(static_cast<int>(ui->tremolo->isChecked())); ui->vibrato->setChecked(setup.value("ADLMIDI-Vibrato", true).toBool()); MIX_ADLMIDI_setVibrato(static_cast<int>(ui->vibrato->isChecked())); ui->adlibMode->setChecked(setup.value("ADLMIDI-AdLib-Drums-Mode", false).toBool()); MIX_ADLMIDI_setAdLibMode(static_cast<int>(ui->adlibMode->isChecked())); ui->modulation->setChecked(setup.value("ADLMIDI-Scalable-Modulation", false).toBool()); MIX_ADLMIDI_setScaleMod(static_cast<int>(ui->modulation->isChecked())); ui->logVolumes->setChecked(setup.value("ADLMIDI-LogarithmicVolumes", false).toBool()); MIX_ADLMIDI_setScaleMod(static_cast<int>(ui->logVolumes->isChecked())); ui->volume->setValue(setup.value("Volume", 128).toInt()); m_prevTrackID = ui->trackID->value(); ui->adlmidi_xtra->setVisible(false); ui->opnmidi_extra->setVisible(false); ui->midi_setup->setVisible(false); ui->gme_setup->setVisible(false); currentMusic = setup.value("RecentMusic", "").toString(); m_testSfxDir = setup.value("RecentSfxDir", "").toString(); adjustSize(); } MusPlayer_Qt::~MusPlayer_Qt() { on_stop_clicked(); if(m_testSfx) Mix_FreeChunk(m_testSfx); m_testSfx = nullptr; Mix_CloseAudio(); QSettings setup; setup.setValue("Window-Geometry", saveGeometry()); setup.setValue("MIDI-Device", ui->mididevice->currentIndex()); setup.setValue("ADLMIDI-Bank-ID", ui->fmbank->currentIndex()); setup.setValue("ADLMIDI-VolumeModel", ui->volumeModel->currentIndex()); setup.setValue("ADLMIDI-Tremolo", ui->tremolo->isChecked()); setup.setValue("ADLMIDI-Vibrato", ui->vibrato->isChecked()); setup.setValue("ADLMIDI-AdLib-Drums-Mode", ui->adlibMode->isChecked()); setup.setValue("ADLMIDI-Scalable-Modulation", ui->modulation->isChecked()); setup.setValue("ADLMIDI-LogarithmicVolumes", ui->logVolumes->isChecked()); setup.setValue("Volume", ui->volume->value()); setup.setValue("RecentMusic", currentMusic); setup.setValue("RecentSfxDir", m_testSfxDir); delete ui; } void MusPlayer_Qt::dropEvent(QDropEvent *e) { this->raise(); this->setFocus(Qt::ActiveWindowFocusReason); if(ui->recordWav->isChecked()) return; for(const QUrl &url : e->mimeData()->urls()) { const QString &fileName = url.toLocalFile(); currentMusic = fileName; } ui->recordWav->setEnabled(!currentMusic.endsWith(".wav", Qt::CaseInsensitive));//Avoid self-trunkling! PGE_MusicPlayer::MUS_stopMusic(); on_play_clicked(); this->raise(); e->accept(); } void MusPlayer_Qt::dragEnterEvent(QDragEnterEvent *e) { if(e->mimeData()->hasUrls()) e->acceptProposedAction(); } void MusPlayer_Qt::contextMenu(const QPoint &pos) { QMenu x; QAction *open = x.addAction("Open"); QAction *playpause = x.addAction("Play/Pause"); QAction *stop = x.addAction("Stop"); x.addSeparator(); QAction *reverb = x.addAction("Reverb"); reverb->setCheckable(true); reverb->setChecked(PGE_MusicPlayer::reverbEnabled); QAction *assoc_files = x.addAction("Associate files"); QAction *play_list = x.addAction("Play-list mode [WIP]"); play_list->setCheckable(true); play_list->setChecked(playListMode); QAction *sfx_testing = x.addAction("SFX testing"); sfx_testing->setCheckable(true); sfx_testing->setChecked(ui->sfx_testing->isVisible()); x.addSeparator(); QMenu *about = x.addMenu("About"); QAction *version = about->addAction("SDL Mixer X Music Player v." _FILE_VERSION); version->setEnabled(false); QAction *license = about->addAction("Licensed under GNU GPLv3 license"); about->addSeparator(); QAction *source = about->addAction("Get source code"); QAction *ret = x.exec(this->mapToGlobal(pos)); if(open == ret) on_open_clicked(); else if(playpause == ret) on_play_clicked(); else if(stop == ret) on_stop_clicked(); else if(reverb == ret) { PGE_MusicPlayer::reverbEnabled = reverb->isChecked(); if(PGE_MusicPlayer::reverbEnabled) Mix_RegisterEffect(MIX_CHANNEL_POST, reverbEffect, reverbEffectDone, NULL); else Mix_UnregisterEffect(MIX_CHANNEL_POST, reverbEffect); } else if(assoc_files == ret) { AssocFiles af(this); af.setWindowModality(Qt::WindowModal); af.exec(); } else if(ret == play_list) { setPlayListMode(!playListMode); } else if(ret == sfx_testing) { ui->sfx_testing->setVisible(!ui->sfx_testing->isVisible()); updateGeometry(); adjustSize(); } else if(ret == license) QDesktopServices::openUrl(QUrl("http://www.gnu.org/licenses/gpl")); else if(ret == source) QDesktopServices::openUrl(QUrl("https://github.com/WohlSoft/PGE-Project")); } void MusPlayer_Qt::openMusicByArg(QString musPath) { if(ui->recordWav->isChecked()) return; currentMusic = musPath; //ui->recordWav->setEnabled(!currentMusic.endsWith(".wav", Qt::CaseInsensitive));//Avoid self-trunkling! PGE_MusicPlayer::MUS_stopMusic(); on_play_clicked(); } void MusPlayer_Qt::setPlayListMode(bool plMode) { on_stop_clicked(); playListMode = plMode; if(!plMode) { playList.clear(); } else { playList_pushCurrent(); } ui->playList->setVisible(plMode); ui->playListPush->setVisible(plMode); ui->playListPop->setVisible(plMode); if(ui->recordWav->isChecked()) ui->recordWav->click(); ui->recordWav->setVisible(!plMode); PGE_MusicPlayer::setPlayListMode(playListMode); adjustSize(); } void MusPlayer_Qt::playList_pushCurrent(bool) { PlayListEntry e; e.name = ui->musTitle->text(); e.fullPath = currentMusic; e.gme_trackNum = ui->trackID->value(); e.midi_device = ui->mididevice->currentIndex(); e.adl_bankNo = ui->fmbank->currentIndex(); e.adl_cmfVolumes = ui->volumeModel->currentIndex(); e.adl_tremolo = ui->tremolo->isChecked(); e.adl_vibrato = ui->vibrato->isChecked(); e.adl_adlibDrums = ui->adlibMode->isChecked(); e.adl_modulation = ui->modulation->isChecked(); e.adl_cmfVolumes = ui->logVolumes->isChecked(); playList.insertEntry(e); } void MusPlayer_Qt::playList_popCurrent(bool) { playList.removeEntry(); } void MusPlayer_Qt::playListNext() { PlayListEntry e = playList.nextEntry(); currentMusic = e.fullPath; switchMidiDevice(e.midi_device); ui->trackID->setValue(e.gme_trackNum); ui->fmbank->setCurrentIndex(e.adl_bankNo); ui->volumeModel->setCurrentIndex(e.adl_volumeModel); ui->tremolo->setChecked(e.adl_tremolo); ui->vibrato->setChecked(e.adl_vibrato); ui->adlibMode->setChecked(e.adl_adlibDrums); ui->modulation->setChecked(e.adl_modulation); ui->logVolumes->setChecked(e.adl_cmfVolumes); MIX_ADLMIDI_setBankID(e.adl_bankNo); MIX_ADLMIDI_setVolumeModel(e.adl_volumeModel); MIX_ADLMIDI_setTremolo(static_cast<int>(ui->tremolo->isChecked())); MIX_ADLMIDI_setVibrato(static_cast<int>(ui->vibrato->isChecked())); MIX_ADLMIDI_setAdLibMode(static_cast<int>(ui->adlibMode->isChecked())); MIX_ADLMIDI_setScaleMod(static_cast<int>(ui->modulation->isChecked())); MIX_ADLMIDI_setLogarithmicVolumes(static_cast<int>(ui->logVolumes->isChecked())); PGE_MusicPlayer::MUS_stopMusic(); on_play_clicked(); } void MusPlayer_Qt::switchMidiDevice(int index) { ui->midi_setup->setVisible(false); ui->adlmidi_xtra->setVisible(false); ui->opnmidi_extra->setVisible(false); ui->midi_setup->setVisible(true); switch(index) { case 0: MIX_SetMidiDevice(MIDI_ADLMIDI); ui->adlmidi_xtra->setVisible(true); break; case 1: MIX_SetMidiDevice(MIDI_Timidity); break; case 2: MIX_SetMidiDevice(MIDI_Native); break; case 3: MIX_SetMidiDevice(MIDI_OPNMIDI); ui->opnmidi_extra->setVisible(true); break; case 4: MIX_SetMidiDevice(MIDI_Fluidsynth); break; default: MIX_SetMidiDevice(MIDI_ADLMIDI); ui->adlmidi_xtra->setVisible(true); break; } } void MusPlayer_Qt::on_open_clicked() { QString file = QFileDialog::getOpenFileName(this, tr("Open music file"), (currentMusic.isEmpty() ? QApplication::applicationDirPath() : currentMusic), "All (*.*)"); if(file.isEmpty()) return; currentMusic = file; //ui->recordWav->setEnabled(!currentMusic.endsWith(".wav", Qt::CaseInsensitive));//Avoid self-trunkling! PGE_MusicPlayer::MUS_stopMusic(); on_play_clicked(); } void MusPlayer_Qt::on_stop_clicked() { PGE_MusicPlayer::MUS_stopMusic(); ui->play->setText(tr("Play")); if(ui->recordWav->isChecked()) { ui->recordWav->setChecked(false); PGE_MusicPlayer::stopWavRecording(); ui->open->setEnabled(true); ui->play->setEnabled(true); ui->frame->setEnabled(true); m_blinker.stop(); ui->recordWav->setStyleSheet(""); } } void MusPlayer_Qt::on_play_clicked() { if(Mix_PlayingMusic()) { if(Mix_PausedMusic()) { Mix_ResumeMusic(); ui->play->setText(tr("Pause")); return; } else { Mix_PauseMusic(); ui->play->setText(tr("Resume")); return; } } ui->play->setText(tr("Play")); m_prevTrackID = ui->trackID->value(); if(PGE_MusicPlayer::MUS_openFile(currentMusic + "|" + ui->trackID->text())) { PGE_MusicPlayer::MUS_changeVolume(ui->volume->value()); PGE_MusicPlayer::MUS_playMusic(); ui->play->setText(tr("Pause")); } ui->musTitle->setText(PGE_MusicPlayer::MUS_getMusTitle()); ui->musArtist->setText(PGE_MusicPlayer::MUS_getMusArtist()); ui->musAlbum->setText(PGE_MusicPlayer::MUS_getMusAlbum()); ui->musCopyright->setText(PGE_MusicPlayer::MUS_getMusCopy()); ui->gme_setup->setVisible(false); ui->adlmidi_xtra->setVisible(false); ui->opnmidi_extra->setVisible(false); ui->midi_setup->setVisible(false); ui->frame->setVisible(false); ui->frame->setVisible(true); ui->smallInfo->setText(PGE_MusicPlayer::musicType()); ui->gridLayout->update(); switch(PGE_MusicPlayer::type) { case MUS_MID: ui->adlmidi_xtra->setVisible(ui->mididevice->currentIndex() == 0); ui->opnmidi_extra->setVisible(ui->mididevice->currentIndex() == 3); ui->midi_setup->setVisible(true); ui->frame->setVisible(true); break; case MUS_SPC: ui->gme_setup->setVisible(true); ui->frame->setVisible(true); break; default: break; } adjustSize(); } void MusPlayer_Qt::on_mididevice_currentIndexChanged(int index) { switchMidiDevice(index); adjustSize(); if(Mix_PlayingMusic()) { if(PGE_MusicPlayer::type == MUS_MID) { PGE_MusicPlayer::MUS_stopMusic(); on_play_clicked(); } } } void MusPlayer_Qt::on_trackID_editingFinished() { if(Mix_PlayingMusic()) { if((PGE_MusicPlayer::type == MUS_SPC) && (m_prevTrackID != ui->trackID->value())) { PGE_MusicPlayer::MUS_stopMusic(); on_play_clicked(); } } } void MusPlayer_Qt::on_recordWav_clicked(bool checked) { if(checked) { PGE_MusicPlayer::MUS_stopMusic(); ui->play->setText(tr("Play")); QFileInfo twav(currentMusic); PGE_MusicPlayer::stopWavRecording(); QString wavPathBase = twav.absoluteDir().absolutePath() + "/" + twav.baseName(); QString wavPath = wavPathBase + ".wav"; int count = 1; while(QFile::exists(wavPath)) wavPath = wavPathBase + QString("-%1.wav").arg(count++); PGE_MusicPlayer::startWavRecording(wavPath); on_play_clicked(); ui->open->setEnabled(false); ui->play->setEnabled(false); ui->frame->setEnabled(false); m_blinker.start(500); } else { on_stop_clicked(); PGE_MusicPlayer::stopWavRecording(); ui->open->setEnabled(true); ui->play->setEnabled(true); ui->frame->setEnabled(true); m_blinker.stop(); ui->recordWav->setStyleSheet(""); } } void MusPlayer_Qt::on_resetDefaultADLMIDI_clicked() { ui->fmbank->setCurrentIndex(58); ui->tremolo->setChecked(true); ui->vibrato->setChecked(true); ui->adlibMode->setChecked(false);<|fim▁hole|> ui->modulation->setChecked(false); ui->logVolumes->setChecked(false); MIX_ADLMIDI_setTremolo(static_cast<int>(ui->tremolo->isChecked())); MIX_ADLMIDI_setVibrato(static_cast<int>(ui->vibrato->isChecked())); MIX_ADLMIDI_setAdLibMode(static_cast<int>(ui->adlibMode->isChecked())); MIX_ADLMIDI_setScaleMod(static_cast<int>(ui->modulation->isChecked())); MIX_ADLMIDI_setLogarithmicVolumes(static_cast<int>(ui->logVolumes->isChecked())); on_volumeModel_currentIndexChanged(ui->volumeModel->currentIndex()); on_fmbank_currentIndexChanged(ui->fmbank->currentIndex()); } void MusPlayer_Qt::_blink_red() { m_blink_state = !m_blink_state; if(m_blink_state) ui->recordWav->setStyleSheet("background-color : red; color : black;"); else ui->recordWav->setStyleSheet("background-color : black; color : red;"); } void MusPlayer_Qt::on_sfx_open_clicked() { QString file = QFileDialog::getOpenFileName(this, tr("Open SFX file"), (m_testSfxDir.isEmpty() ? QApplication::applicationDirPath() : m_testSfxDir), "All (*.*)"); if(file.isEmpty()) return; if(m_testSfx) { Mix_HaltChannel(0); Mix_FreeChunk(m_testSfx); m_testSfx = nullptr; } m_testSfx = Mix_LoadWAV(file.toUtf8().data()); if(!m_testSfx) QMessageBox::warning(this, "SFX open error!", QString("Mix_LoadWAV: ") + Mix_GetError()); else { QFileInfo f(file); m_testSfxDir = f.absoluteDir().absolutePath(); ui->sfx_file->setText(f.fileName()); } } void MusPlayer_Qt::on_sfx_play_clicked() { if(!m_testSfx) return; if(Mix_PlayChannelTimedVolume(0, m_testSfx, ui->sfx_loops->value(), ui->sfx_timed->value(), ui->sfx_volume->value()) == -1) { QMessageBox::warning(this, "SFX play error!", QString("Mix_PlayChannelTimedVolume: ") + Mix_GetError()); } } void MusPlayer_Qt::on_sfx_fadeIn_clicked() { if(!m_testSfx) return; if(Mix_FadeInChannelTimedVolume(0, m_testSfx, ui->sfx_loops->value(), ui->sfx_fadems->value(), ui->sfx_timed->value(), ui->sfx_volume->value()) == -1) { QMessageBox::warning(this, "SFX play error!", QString("Mix_PlayChannelTimedVolume: ") + Mix_GetError()); } } void MusPlayer_Qt::on_sfx_stop_clicked() { if(!m_testSfx) return; Mix_HaltChannel(0); } void MusPlayer_Qt::on_sfx_fadeout_clicked() { if(!m_testSfx) return; Mix_FadeOutChannel(0, ui->sfx_fadems->value()); } #endif<|fim▁end|>
<|file_name|>cutoffs.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************** ** Filename: cutoffs.c ** Purpose: Routines to manipulate an array of class cutoffs. ** Author: Dan Johnson ** History: Wed Feb 20 09:28:51 1991, DSJ, Created. ** ** (c) Copyright Hewlett-Packard Company, 1988. ** Licensed under the Apache License, Version 2.0 (the "License"); ** you may not use this file except in compliance with the License. ** You may obtain a copy of the License at ** http://www.apache.org/licenses/LICENSE-2.0 ** Unless required by applicable law or agreed to in writing, software ** distributed under the License is distributed on an "AS IS" BASIS, ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ** See the License for the specific language governing permissions and ** limitations under the License. ******************************************************************************/ /*----------------------------------------------------------------------------<|fim▁hole|>#include <stdio.h> #include BOSS_TESSERACT_U_classify_h //original-code:"classify.h" #include BOSS_TESSERACT_U_efio_h //original-code:"efio.h" #include BOSS_TESSERACT_U_globals_h //original-code:"globals.h" #include BOSS_TESSERACT_U_helpers_h //original-code:"helpers.h" #include BOSS_TESSERACT_U_scanutils_h //original-code:"scanutils.h" #include BOSS_TESSERACT_U_serialis_h //original-code:"serialis.h" #include BOSS_TESSERACT_U_unichar_h //original-code:"unichar.h" #define REALLY_QUOTE_IT(x) QUOTE_IT(x) #define MAX_CUTOFF 1000 namespace tesseract { /** * Open Filename, read in all of the class-id/cutoff pairs * and insert them into the Cutoffs array. Cutoffs are * indexed in the array by class id. Unused entries in the * array are set to an arbitrarily high cutoff value. * @param CutoffFile name of file containing cutoff definitions * @param Cutoffs array to put cutoffs into * @param swap * @param end_offset * @return none * @note Globals: none * @note Exceptions: none * @note History: Wed Feb 20 09:38:26 1991, DSJ, Created. */ void Classify::ReadNewCutoffs(FILE *CutoffFile, bool swap, inT64 end_offset, CLASS_CUTOFF_ARRAY Cutoffs) { char Class[UNICHAR_LEN + 1]; CLASS_ID ClassId; int Cutoff; int i; if (shape_table_ != NULL) { if (!shapetable_cutoffs_.DeSerialize(swap, CutoffFile)) { tprintf("Error during read of shapetable pffmtable!\n"); } } for (i = 0; i < MAX_NUM_CLASSES; i++) Cutoffs[i] = MAX_CUTOFF; while ((end_offset < 0 || ftell(CutoffFile) < end_offset) && tfscanf(CutoffFile, "%" REALLY_QUOTE_IT(UNICHAR_LEN) "s %d", Class, &Cutoff) == 2) { if (strcmp(Class, "NULL") == 0) { ClassId = unicharset.unichar_to_id(" "); } else { ClassId = unicharset.unichar_to_id(Class); } Cutoffs[ClassId] = Cutoff; SkipNewline(CutoffFile); } } } // namespace tesseract<|fim▁end|>
Include Files and Type Defines ----------------------------------------------------------------------------*/ #include "cutoffs.h"
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict'; var generators = require('yeoman-generator'); var chalk = require('chalk'); var path = require('path'); var extend = require('deep-extend'); var guid = require('uuid'); module.exports = generators.Base.extend({ /** * Setup the generator */ constructor: function () { generators.Base.apply(this, arguments); this.option('skip-install', { type: Boolean, required: false, defaults: false, desc: 'Skip running package managers (NPM, bower, etc) post scaffolding' }); this.option('name', { type: String, desc: 'Title of the Office Add-in', required: false }); this.option('root-path', { type: String, desc: 'Relative path where the Add-in should be created (blank = current directory)', required: false }); this.option('tech', { type: String, desc: 'Technology to use for the Add-in (html = HTML; ng = Angular)', required: false }); // create global config object on this generator this.genConfig = {}; }, // constructor() /** * Prompt users for options */ prompting: { askFor: function () { var done = this.async(); var prompts = [ // friendly name of the generator { name: 'name', message: 'Project name (display name):', default: 'My Office Add-in', when: this.options.name === undefined }, // root path where the addin should be created; should go in current folder where // generator is being executed, or within a subfolder? { name: 'root-path', message: 'Root folder of project?' + ' Default to current directory\n (' + this.destinationRoot() + '), or specify relative path\n' + ' from current (src / public): ', default: 'current folder', when: this.options['root-path'] === undefined, filter: /* istanbul ignore next */ function (response) { if (response === 'current folder') return ''; else return response; } }, // technology used to create the addin (html / angular / etc) { name: 'tech', message: 'Technology to use:', type: 'list', when: this.options.tech === undefined, choices: [ { name: 'HTML, CSS & JavaScript', value: 'html' }, { name: 'Angular', value: 'ng' }, { name: 'Manifest.xml only (no application source files)', value: 'manifest-only' }] }]; // trigger prompts this.prompt(prompts, function (responses) { this.genConfig = extend(this.genConfig, this.options); this.genConfig = extend(this.genConfig, responses); done(); }.bind(this)); }, // askFor() /** * If user specified tech:manifest-only, prompt for start page. */ askForStartPage: function () { if (this.genConfig.tech !== 'manifest-only') return; var done = this.async(); var prompts = [ // if tech = manifest only, prompt for start page { name: 'startPage', message: 'Add-in start URL:', when: this.options.startPage === undefined, }]; // trigger prompts this.prompt(prompts, function (responses) { this.genConfig = extend(this.genConfig, responses); done(); }.bind(this)); } // askForStartPage() }, // prompting() /** * save configurations & config project */ configuring: function () { // add the result of the question to the generator configuration object this.genConfig.projectnternalName = this.genConfig.name.toLowerCase().replace(/ /g, "-"); this.genConfig.projectDisplayName = this.genConfig.name; this.genConfig.rootPath = this.genConfig['root-path']; }, // configuring() /** * write generator specific files */ writing: { /** * If there is already a package.json in the root of this project, * get the name of the project from that file as that should be used * in bower.json & update packages. */ upsertPackage: function () { if (this.genConfig.tech !== 'manifest-only') { var done = this.async(); // default name for the root project = addin project this.genConfig.rootProjectName = this.genConfig.projectnternalName; // path to package.json var pathToPackageJson = this.destinationPath('package.json'); // if package.json doesn't exist if (!this.fs.exists(pathToPackageJson)) { // copy package.json to target this.fs.copyTpl(this.templatePath('common/_package.json'), this.destinationPath('package.json'), this.genConfig); } else { // load package.json var packageJson = this.fs.readJSON(pathToPackageJson, 'utf8'); // .. get it's name property this.genConfig.rootProjectName = packageJson.name; // update devDependencies /* istanbul ignore else */ if (!packageJson.devDependencies) { packageJson.devDependencies = {} } /* istanbul ignore else */ if (!packageJson.devDependencies['gulp']) { packageJson.devDependencies['gulp'] = "^3.9.0" } /* istanbul ignore else */ if (!packageJson.devDependencies['gulp-webserver']) { packageJson.devDependencies['gulp-webserver'] = "^0.9.1" } // overwrite existing package.json this.log(chalk.yellow('Adding additional packages to package.json')); this.fs.writeJSON(pathToPackageJson, packageJson); } done(); } }, // upsertPackage() /** * If bower.json already exists in the root of this project, update it * with the necessary addin packages. */ upsertBower: function () { if (this.genConfig.tech !== 'manifest-only') { var done = this.async(); var pathToBowerJson = this.destinationPath('bower.json'); // if doesn't exist... if (!this.fs.exists(pathToBowerJson)) { // copy bower.json => project switch (this.genConfig.tech) { case "ng": this.fs.copyTpl(this.templatePath('ng/_bower.json'), this.destinationPath('bower.json'), this.genConfig); break; case "html": this.fs.copyTpl(this.templatePath('html/_bower.json'), this.destinationPath('bower.json'), this.genConfig); break; } } else { // verify the necessary package references are present in bower.json... // if not, add them var bowerJson = this.fs.readJSON(pathToBowerJson, 'utf8'); // all addins need these if (!bowerJson.dependencies["microsoft.office.js"]) { bowerJson.dependencies["microsoft.office.js"] = "*"; } if (!bowerJson.dependencies["jquery"]) { bowerJson.dependencies["jquery"] = "~1.9.1"; } switch (this.genConfig.tech) { // if angular... case "ng": if (!bowerJson.dependencies["angular"]) { bowerJson.dependencies["angular"] = "~1.4.4"; } if (!bowerJson.dependencies["angular-route"]) { bowerJson.dependencies["angular-route"] = "~1.4.4"; } if (!bowerJson.dependencies["angular-sanitize"]) { bowerJson.dependencies["angular-sanitize"] = "~1.4.4"; } break; } // overwrite existing bower.json this.log(chalk.yellow('Adding additional packages to bower.json')); this.fs.writeJSON(pathToBowerJson, bowerJson); } done(); } }, // upsertBower() app: function () { // helper function to build path to the file off root path this._parseTargetPath = function (file) { return path.join(this.genConfig['root-path'], file); }; var done = this.async(); // create a new ID for the project this.genConfig.projectId = guid.v4(); if (this.genConfig.tech === 'manifest-only') { // create the manifest file this.fs.copyTpl(this.templatePath('common/manifest.xml'), this.destinationPath('manifest.xml'), this.genConfig); } else { // copy .bowerrc => project<|fim▁hole|> // create common assets this.fs.copy(this.templatePath('common/gulpfile.js'), this.destinationPath('gulpfile.js')); this.fs.copy(this.templatePath('common/content/Office.css'), this.destinationPath(this._parseTargetPath('content/Office.css'))); this.fs.copy(this.templatePath('common/images/close.png'), this.destinationPath(this._parseTargetPath('images/close.png'))); this.fs.copy(this.templatePath('common/scripts/MicrosoftAjax.js'), this.destinationPath(this._parseTargetPath('scripts/MicrosoftAjax.js'))); switch (this.genConfig.tech) { case 'html': // determine startpage for addin this.genConfig.startPage = 'https://localhost:8443/app/home/home.html'; // create the manifest file this.fs.copyTpl(this.templatePath('common/manifest.xml'), this.destinationPath('manifest.xml'), this.genConfig); // copy addin files this.fs.copy(this.templatePath('html/app.css'), this.destinationPath(this._parseTargetPath('app/app.css'))); this.fs.copy(this.templatePath('html/app.js'), this.destinationPath(this._parseTargetPath('app/app.js'))); this.fs.copy(this.templatePath('html/home/home.html'), this.destinationPath(this._parseTargetPath('app/home/home.html'))); this.fs.copy(this.templatePath('html/home/home.css'), this.destinationPath(this._parseTargetPath('app/home/home.css'))); this.fs.copy(this.templatePath('html/home/home.js'), this.destinationPath(this._parseTargetPath('app/home/home.js'))); break; case 'ng': // determine startpage for addin this.genConfig.startPage = 'https://localhost:8443/index.html'; // create the manifest file this.fs.copyTpl(this.templatePath('common/manifest.xml'), this.destinationPath('manifest.xml'), this.genConfig); // copy addin files this.genConfig.startPage = '{https-addin-host-site}/index.html'; this.fs.copy(this.templatePath('ng/index.html'), this.destinationPath(this._parseTargetPath('index.html'))); this.fs.copy(this.templatePath('ng/app.module.js'), this.destinationPath(this._parseTargetPath('app/app.module.js'))); this.fs.copy(this.templatePath('ng/app.routes.js'), this.destinationPath(this._parseTargetPath('app/app.routes.js'))); this.fs.copy(this.templatePath('ng/home/home.controller.js'), this.destinationPath(this._parseTargetPath('app/home/home.controller.js'))); this.fs.copy(this.templatePath('ng/home/home.html'), this.destinationPath(this._parseTargetPath('app/home/home.html'))); this.fs.copy(this.templatePath('ng/services/data.service.js'), this.destinationPath(this._parseTargetPath('app/services/data.service.js'))); break; } } done(); } // app() }, // writing() /** * conflict resolution */ // conflicts: { }, /** * run installations (bower, npm, tsd, etc) */ install: function () { if (!this.options['skip-install'] && this.genConfig.tech !== 'manifest-only') { this.npmInstall(); this.bowerInstall(); } } // install () /** * last cleanup, goodbye, etc */ // end: { } });<|fim▁end|>
this.fs.copyTpl( this.templatePath('common/_bowerrc'), this.destinationPath('.bowerrc'), this.genConfig);
<|file_name|>fuzzy-match-fb-places-step_2.js<|end_file_name|><|fim▁begin|>var Connection = require("./db/db-connection").Connection; var Tables = require("./db/tables"); const R = require("ramda"); var QUERY_MATCHES = { $or: [ {percent_name: {$gt: 0.9}}, { distance: {$lt: 1} }, { percent_name: {$gt: 0.8}, distance: {$lt: 30} }, { percent_name: {$gt: 0.8}, percent_address_phonetic: 1, distance: {$lt: 50} } ] }; var connection = new Connection(); connection.connect() .then(connection.collection.bind(connection, Tables.FUZZY_MATCHES_FB)) .then(coll=> { return coll.find(QUERY_MATCHES).toArray() }) .then(matches=> { var promises = matches.map(match=> { return Promise.resolve(R.filter(item=>(item.id_p1 == match.id_p1 || item.id_p2 == match.id_p1), matches)).then(response=> { return { id: match.id_p1.toString(), name: match.name_p1, response: R.compose( R.sortBy(R.identity), R.uniq, R.flatten, R.map(place=> { return [place.id_p1.toString(), place.id_p2.toString()] }) )(response) } }) }); return Promise.all(promises); }) .then(matches=> { matches = R.groupBy(item=>item.id, matches); matches = Object.keys(matches).map((key)=> { var get_ids = R.compose( R.reduce((prev, id)=> { prev.id_facebook.push(id); return prev; }, {id_facebook: [], id_google: [], id_opendata: [], id_imprese: []}), R.sortBy(R.identity), R.uniq, R.flatten , R.map(R.prop("response")) ); var ids = get_ids(matches[key]); return Object.assign({ name: matches[key][0].name }, ids) }); var uniq_matches = []; for (var i = 0; i < matches.length; i++) { var match = matches[i]; var found = R.find(nm=> { var has_fb = match.id_facebook.filter(id=>nm.id_facebook.includes(id)).length; return has_fb; })(uniq_matches); if (!found) { uniq_matches.push(match); }else { found.id_facebook=R.uniq(found.id_facebook.concat(match.id_facebook)); }<|fim▁hole|> return fuzzyMatchColl.drop().then(_=>{ return fuzzyMatchColl.insertMany(uniq_matches); }) }) .then(_=> { connection.db.close(); console.log("DONE"); process.exit(0); }) .catch(_=> { console.log(_); connection.db.close(); process.exit(1); });<|fim▁end|>
} var fuzzyMatchColl = connection.db.collection(Tables.FUZZY_MATCHES_FB_ONE_TO_MANY);
<|file_name|>stackmachine.py<|end_file_name|><|fim▁begin|>import operator import math class Machine(): """Simple stack based machine designed for genetic programming (GP) experiments. Easy to use and forgiving with nonfatal errors. See README and tests for examples. """ def __init__(self, debug=False): self.stack = [] self.debug = debug self.code = "" self.stack_safety = False self.has_errors = False self._max_runlines = 1000 self._max_stack = 1000 self.instructions = { 'CLR': self._clr, 'PUSH': self._push, # takes 1 value 'POP': self._pop, 'SWP': self._swp, 'ROT': self._rot, 'DUP': self._dup, 'INC': self._inc, 'MUL': lambda: self._operator2(operator.mul), 'DIV': lambda: self._operator2(operator.div), 'MOD': lambda: self._operator2(operator.mod), 'ADD': lambda: self._operator2(operator.add), 'SUB': lambda: self._operator2(operator.sub), 'EXP': lambda: self._operator2(operator.pow), 'MIN': lambda: self._operator2(min), 'MAX': lambda: self._operator2(max), 'LOG': lambda: self._operator1(math.log), 'TRUNC':lambda: self._operator1(math.trunc), 'JMP': self._jmp, # all jumps take an offset value 'JZ': self._jz, 'JE': self._je, 'JNE': self._jne, 'JLT': self._jlt, 'JGT': self._jgt, 'END': None } def _operator1(self, operator): if self.stack: self.stack.append(operator(self.stack.pop())) def _operator2(self, operator): if len(self.stack) < 2: self.stack = [0] else: val = operator(self.stack[-1], self.stack[-2]) self.stack = self.stack[:-2] self.stack.append(val) def _clr(self): self.stack = [] def _push(self, a): try: a = float(a) self.stack.append(a) except: pass def _pop(self): if self.stack: self.stack.pop() def _inc(self): if self.stack: self.stack[-1] += 1 def _swp(self): if len(self.stack) > 1: self.stack[-2], self.stack[-1] = self.stack[-1], self.stack[-2] def _rot(self): if len(self.stack) > 1: self.stack = self.stack[1:] + self.stack[:1] def _dup(self): if self.stack: self.stack.append(self.stack[-1]) def _jmp(self, a): n = self._curline + int(a) if n == self._curline or n < 0 or n > len(self.lines) - 1: return self._curline = n-1 def _jz(self, a): if self.stack: if self.stack.pop() == 0:<|fim▁hole|> def _je(self, a): if len(self.stack) > 1: if self.stack.pop() == self.stack.pop(): self._jmp(a) def _jne(self, a): if len(self.stack) > 1: if self.stack.pop() != self.stack.pop(): self._jmp(a) def _jlt(self, a): if len(self.stack) > 1: if self.stack.pop() < self.stack.pop(): self._jmp(a) def _jgt(self, a): if len(self.stack) > 1: if self.stack.pop() > self.stack.pop(): self._jmp(a) def verify_stack(self): if len(self.stack) > self._max_stack: return False allowed_types = [int, float, long] return all([type(v) in allowed_types for v in self.stack]) def code_listing(self): self.lines = self.code.split('\n') for num, line in enumerate(self.lines): line = line.strip().upper() print num, '\t', line def evaluate(self, line): if line: debug = self.debug if debug: print self._curline, '> ', line tokens = line.split() instr = tokens[0] if instr == 'END': return False if len(tokens) > 1: values = tokens [1:] else: values = [] try: self.instructions[instr](*values) except Exception as e: if debug: print "Error:", e self.has_errors = True if debug: print self.stack, '\n' self._curline += 1 return True def run(self): # Note: some class members are duplicated with locals for faster comparisons in the main loop self._curline = 0 self.has_errors = False self._lines_executed = 0 lines_exec = 0 max_exec = self._max_runlines lines = [line.split(';')[0].strip().upper() for line in self.code.split('\n')] self.lines = lines if self.stack_safety and not self.verify_stack(): if self.debug: print "Invalid stack, must only contain ints, longs, and floats" return while(self.evaluate(self.lines[self._curline])): lines_exec += 1 if lines_exec > max_exec: if self.debug: print "Reached maximum runlines:", self._max_runlines self.has_errors = True break if self._curline >= len(self.lines): break self._lines_executed = lines_exec return self.has_errors<|fim▁end|>
self._jmp(a)
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- from azure.cli.core import AzCommandsLoader import azure.cli.command_modules.sql._help # pylint: disable=unused-import class SqlCommandsLoader(AzCommandsLoader): def __init__(self, cli_ctx=None): from azure.cli.core.commands import CliCommandType from azure.cli.core.profiles import ResourceType sql_custom = CliCommandType(operations_tmpl='azure.cli.command_modules.sql.custom#{}') super(SqlCommandsLoader, self).__init__(cli_ctx=cli_ctx, custom_command_type=sql_custom, resource_type=ResourceType.MGMT_SQL) <|fim▁hole|> def load_command_table(self, args): from azure.cli.command_modules.sql.commands import load_command_table load_command_table(self, args) return self.command_table def load_arguments(self, command): from azure.cli.command_modules.sql._params import load_arguments load_arguments(self, command) COMMAND_LOADER_CLS = SqlCommandsLoader<|fim▁end|>
<|file_name|>main.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ This plugin is 3rd party and not it is not part of the plexus-streams addon Torrent-tv.ru (All categories) """<|fim▁hole|>core_dir = current_dir.replace(basename,'').replace('parsers','') sys.path.append(core_dir) from utils.webutils import * from utils.pluginxbmc import * from utils.directoryhandle import * from utils.timeutils import translate_months base_url = "http://super-pomoyka.us.to/trash/ttv-list/ttv.m3u" def module_tree(name,url,iconimage,mode,parser,parserfunction): if not parserfunction: torrenttv() elif parserfunction == 'channels': torrenttv_play(name,url) def torrenttv(): dict_torrent = {} html_source = get_page_source(base_url) match = re.compile('#EXTINF:-1,(.+?)\n(.*)').findall(html_source) for title, acehash in match: channel_name = re.compile('(.+?) \(').findall(title) match_cat = re.compile('\((.+?)\)').findall(title) for i in xrange(0,len(match_cat)): if match_cat[i] == "Для взрослых" and settings.getSetting('hide_porn') == "true": pass elif match_cat[i] == "Ночной канал" and settings.getSetting('hide_porn') == "true": pass else: if settings.getSetting('russian_translation') == "true": categorie = russiandictionary(match_cat[i]) else: categorie=match_cat[i] if categorie not in dict_torrent.keys(): try: dict_torrent[categorie] = [(channel_name[0],acehash)] except: pass else: try: dict_torrent[categorie].append((channel_name[0],acehash)) except: pass for categories in dict_torrent.keys(): addDir(categories,str(dict_torrent),401,os.path.join(current_dir,"icon.png"),401,True,parser="torrenttvruall",parserfunction="channels") def torrenttv_play(name,url): dict_torrent=eval(url) for channel in dict_torrent[name]: try: addDir(channel[0],channel[1],1,os.path.join(current_dir,"icon.png"),2,False) except:pass def russiandictionary(string): if string == "Eng": return translate(40077) elif string == "Спорт": return translate(40078) elif string == "Новостные": return translate(40079) elif string == "Свадебный": return translate(40080) elif string == "Общие": return translate(40081) elif string == "Познавательные": return translate(40082) elif string == "СНГ": return translate(40083) elif string == "Мужские": return translate(40084) elif string == "Ukraine": return translate(40085) elif string == "резерв": return translate(40086) elif string == "Донецк": return translate(40087) elif string == "Региональные": return translate(40088) elif string == "Для взрослых": return translate(40089) elif string == "TV21": return translate(40090) elif string == "Украина": return translate(40091) elif string == "Детские": return translate(40092) elif string == "Фильмы": return translate(40093) elif string == "Ночной канал": return translate(40094) elif string == "Европа": return translate(40095) elif string == "укр": return translate(40096) elif string == "Музыка": return translate(40097) elif string == "Религиозные": return translate(40098) elif string == "Развлекательные": return translate(40099) elif string == "украина": return translate(40151) elif string == "Казахстан": return "Kazakstan" else: return string<|fim▁end|>
import sys,os current_dir = os.path.dirname(os.path.realpath(__file__)) basename = os.path.basename(current_dir)
<|file_name|>borrowck-preserve-box-sometimes-needed.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // exec-env:RUST_POISON_ON_FREE=1 fn switcher(x: Option<@int>) { let mut x = x; match x { Some(@y) => { y.clone(); x = None; } None => { } } } pub fn main() {<|fim▁hole|><|fim▁end|>
switcher(None); switcher(Some(@3)); }
<|file_name|>issue-4517.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. fn bar(int_param: int) {} <|fim▁hole|> // (expected int, found vector) }<|fim▁end|>
fn main() { let foo: [u8, ..4] = [1u8, ..4u]; bar(foo); //~^ ERROR mismatched types: expected `int`, found `[u8, .. 4]`
<|file_name|>archive_org_plugin.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import (unicode_literals, division, absolute_import, print_function) store_version = 1 # Needed for dynamic plugin loading __license__ = 'GPL 3' __copyright__ = '2011, John Schember <[email protected]>' __docformat__ = 'restructuredtext en' from calibre.gui2.store.basic_config import BasicStoreConfig<|fim▁hole|>class ArchiveOrgStore(BasicStoreConfig, OpenSearchOPDSStore): open_search_url = 'http://bookserver.archive.org/catalog/opensearch.xml' web_url = 'http://www.archive.org/details/texts' # http://bookserver.archive.org/catalog/ def search(self, query, max_results=10, timeout=60): for s in OpenSearchOPDSStore.search(self, query, max_results, timeout): s.detail_item = 'http://www.archive.org/details/' + s.detail_item.split(':')[-1] s.price = '$0.00' s.drm = SearchResult.DRM_UNLOCKED yield s def get_details(self, search_result, timeout): ''' The opensearch feed only returns a subset of formats that are available. We want to get a list of all formats that the user can get. ''' from calibre import browser from contextlib import closing from lxml import html br = browser() with closing(br.open(search_result.detail_item, timeout=timeout)) as nf: idata = html.fromstring(nf.read()) formats = ', '.join(idata.xpath('//p[@id="dl" and @class="content"]//a/text()')) search_result.formats = formats.upper() return True<|fim▁end|>
from calibre.gui2.store.opensearch_store import OpenSearchOPDSStore from calibre.gui2.store.search_result import SearchResult
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ #![deny(unsafe_code)] #![crate_name = "servo_url"] #![crate_type = "rlib"] #[macro_use] extern crate malloc_size_of; #[macro_use] extern crate malloc_size_of_derive; #[macro_use] extern crate serde; pub mod origin; pub use crate::origin::{ImmutableOrigin, MutableOrigin, OpaqueOrigin}; use std::collections::hash_map::DefaultHasher; use std::fmt; use std::hash::Hasher; use std::net::IpAddr; use std::ops::{Index, Range, RangeFrom, RangeFull, RangeTo}; use std::path::Path; use std::sync::Arc; use to_shmem::{SharedMemoryBuilder, ToShmem}; use url::{Position, Url}; pub use url::Host; #[derive(Clone, Deserialize, Eq, Hash, MallocSizeOf, Ord, PartialEq, PartialOrd, Serialize)] pub struct ServoUrl(#[ignore_malloc_size_of = "Arc"] Arc<Url>); impl ToShmem for ServoUrl { fn to_shmem(&self, _builder: &mut SharedMemoryBuilder) -> to_shmem::Result<Self> { unimplemented!("If servo wants to share stylesheets across processes, ToShmem for Url must be implemented") } } impl ServoUrl { pub fn from_url(url: Url) -> Self { ServoUrl(Arc::new(url)) } pub fn parse_with_base(base: Option<&Self>, input: &str) -> Result<Self, url::ParseError> { Url::options() .base_url(base.map(|b| &*b.0)) .parse(input) .map(Self::from_url) } pub fn into_string(self) -> String { Arc::try_unwrap(self.0) .unwrap_or_else(|s| (*s).clone()) .into_string() } pub fn into_url(self) -> Url { Arc::try_unwrap(self.0).unwrap_or_else(|s| (*s).clone()) } pub fn as_url(&self) -> &Url { &self.0 } pub fn parse(input: &str) -> Result<Self, url::ParseError> { Url::parse(input).map(Self::from_url) } pub fn cannot_be_a_base(&self) -> bool { self.0.cannot_be_a_base() } pub fn domain(&self) -> Option<&str> { self.0.domain() } pub fn fragment(&self) -> Option<&str> { self.0.fragment() } pub fn path(&self) -> &str { self.0.path() } pub fn origin(&self) -> ImmutableOrigin { ImmutableOrigin::new(self.0.origin()) } pub fn scheme(&self) -> &str { self.0.scheme() } pub fn is_secure_scheme(&self) -> bool { let scheme = self.scheme(); scheme == "https" || scheme == "wss" } /// <https://fetch.spec.whatwg.org/#local-scheme> pub fn is_local_scheme(&self) -> bool { let scheme = self.scheme(); scheme == "about" || scheme == "blob" || scheme == "data" } pub fn is_chrome(&self) -> bool { self.scheme() == "chrome" } pub fn as_str(&self) -> &str { self.0.as_str() } pub fn as_mut_url(&mut self) -> &mut Url { Arc::make_mut(&mut self.0) } pub fn set_username(&mut self, user: &str) -> Result<(), ()> { self.as_mut_url().set_username(user) } pub fn set_ip_host(&mut self, addr: IpAddr) -> Result<(), ()> { self.as_mut_url().set_ip_host(addr) } pub fn set_password(&mut self, pass: Option<&str>) -> Result<(), ()> { self.as_mut_url().set_password(pass) } pub fn set_fragment(&mut self, fragment: Option<&str>) { self.as_mut_url().set_fragment(fragment) } pub fn username(&self) -> &str { self.0.username() } pub fn password(&self) -> Option<&str> { self.0.password() } pub fn to_file_path(&self) -> Result<::std::path::PathBuf, ()> { self.0.to_file_path() } pub fn host(&self) -> Option<url::Host<&str>> { self.0.host() } pub fn host_str(&self) -> Option<&str> { self.0.host_str() } pub fn port(&self) -> Option<u16> { self.0.port() } pub fn port_or_known_default(&self) -> Option<u16> { self.0.port_or_known_default() } <|fim▁hole|> pub fn join(&self, input: &str) -> Result<ServoUrl, url::ParseError> { self.0.join(input).map(Self::from_url) } pub fn path_segments(&self) -> Option<::std::str::Split<char>> { self.0.path_segments() } pub fn query(&self) -> Option<&str> { self.0.query() } pub fn from_file_path<P: AsRef<Path>>(path: P) -> Result<Self, ()> { Ok(Self::from_url(Url::from_file_path(path)?)) } /// <https://w3c.github.io/webappsec-secure-contexts/#potentially-trustworthy-url> pub fn is_potentially_trustworthy(&self) -> bool { // Step 1 if self.as_str() == "about:blank" || self.as_str() == "about:srcdoc" { return true; } // Step 2 if self.scheme() == "data" { return true; } // Step 3 self.is_origin_trustworthy() } /// <https://w3c.github.io/webappsec-secure-contexts/#is-origin-trustworthy> pub fn is_origin_trustworthy(&self) -> bool { // Step 1 if !self.origin().is_tuple() { return false; } // Step 3 if self.scheme() == "https" || self.scheme() == "wss" { true // Steps 4-5 } else if self.host().is_some() { let host = self.host_str().unwrap(); // Step 4 if let Ok(ip_addr) = host.parse::<IpAddr>() { ip_addr.is_loopback() // Step 5 } else { host == "localhost" || host.ends_with(".localhost") } // Step 6 } else { self.scheme() == "file" } } } impl fmt::Display for ServoUrl { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { self.0.fmt(formatter) } } impl fmt::Debug for ServoUrl { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { if self.0.as_str().len() > 40 { let mut hasher = DefaultHasher::new(); hasher.write(self.0.as_str().as_bytes()); let truncated: String = self.0.as_str().chars().take(40).collect(); let result = format!("{}... ({:x})", truncated, hasher.finish()); return result.fmt(formatter); } self.0.fmt(formatter) } } impl Index<RangeFull> for ServoUrl { type Output = str; fn index(&self, _: RangeFull) -> &str { &self.0[..] } } impl Index<RangeFrom<Position>> for ServoUrl { type Output = str; fn index(&self, range: RangeFrom<Position>) -> &str { &self.0[range] } } impl Index<RangeTo<Position>> for ServoUrl { type Output = str; fn index(&self, range: RangeTo<Position>) -> &str { &self.0[range] } } impl Index<Range<Position>> for ServoUrl { type Output = str; fn index(&self, range: Range<Position>) -> &str { &self.0[range] } } impl From<Url> for ServoUrl { fn from(url: Url) -> Self { ServoUrl::from_url(url) } }<|fim▁end|>
<|file_name|>tests.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from __future__ import unicode_literals import django from django.core.exceptions import FieldError from django.test import SimpleTestCase, TestCase from .models import ( AdvancedUserStat, Child1, Child2, Child3, Child4, Image, LinkedList, Parent1, Parent2, Product, StatDetails, User, UserProfile, UserStat, UserStatResult, ) class ReverseSelectRelatedTestCase(TestCase): def setUp(self): user = User.objects.create(username="test") UserProfile.objects.create(user=user, state="KS", city="Lawrence") results = UserStatResult.objects.create(results='first results') userstat = UserStat.objects.create(user=user, posts=150, results=results) StatDetails.objects.create(base_stats=userstat, comments=259) user2 = User.objects.create(username="bob") results2 = UserStatResult.objects.create(results='moar results') advstat = AdvancedUserStat.objects.create(user=user2, posts=200, karma=5, results=results2) StatDetails.objects.create(base_stats=advstat, comments=250) p1 = Parent1(name1="Only Parent1") p1.save() c1 = Child1(name1="Child1 Parent1", name2="Child1 Parent2", value=1) c1.save() p2 = Parent2(name2="Child2 Parent2") p2.save() c2 = Child2(name1="Child2 Parent1", parent2=p2, value=2) c2.save() def test_basic(self): with self.assertNumQueries(1): u = User.objects.select_related("userprofile").get(username="test") self.assertEqual(u.userprofile.state, "KS") def test_follow_next_level(self): with self.assertNumQueries(1): u = User.objects.select_related("userstat__results").get(username="test") self.assertEqual(u.userstat.posts, 150) self.assertEqual(u.userstat.results.results, 'first results') def test_follow_two(self): with self.assertNumQueries(1): u = User.objects.select_related("userprofile", "userstat").get(username="test") self.assertEqual(u.userprofile.state, "KS") self.assertEqual(u.userstat.posts, 150) def test_follow_two_next_level(self): with self.assertNumQueries(1): u = User.objects.select_related("userstat__results", "userstat__statdetails").get(username="test") self.assertEqual(u.userstat.results.results, 'first results') self.assertEqual(u.userstat.statdetails.comments, 259) def test_forward_and_back(self): with self.assertNumQueries(1): stat = UserStat.objects.select_related("user__userprofile").get(user__username="test") self.assertEqual(stat.user.userprofile.state, 'KS') self.assertEqual(stat.user.userstat.posts, 150) def test_back_and_forward(self): with self.assertNumQueries(1): u = User.objects.select_related("userstat").get(username="test") self.assertEqual(u.userstat.user.username, 'test') def test_not_followed_by_default(self): with self.assertNumQueries(2): u = User.objects.select_related().get(username="test") self.assertEqual(u.userstat.posts, 150) def test_follow_from_child_class(self): with self.assertNumQueries(1): stat = AdvancedUserStat.objects.select_related('user', 'statdetails').get(posts=200) self.assertEqual(stat.statdetails.comments, 250) self.assertEqual(stat.user.username, 'bob') def test_follow_inheritance(self): with self.assertNumQueries(1): stat = UserStat.objects.select_related('user', 'advanceduserstat').get(posts=200) self.assertEqual(stat.advanceduserstat.posts, 200) self.assertEqual(stat.user.username, 'bob') with self.assertNumQueries(1): self.assertEqual(stat.advanceduserstat.user.username, 'bob') def test_nullable_relation(self): im = Image.objects.create(name="imag1") p1 = Product.objects.create(name="Django Plushie", image=im) p2 = Product.objects.create(name="Talking Django Plushie") with self.assertNumQueries(1): result = sorted(Product.objects.select_related("image"), key=lambda x: x.name) self.assertEqual([p.name for p in result], ["Django Plushie", "Talking Django Plushie"]) self.assertEqual(p1.image, im) # Check for ticket #13839 self.assertIsNone(p2.image) def test_missing_reverse(self): """ Ticket #13839: select_related() should NOT cache None for missing objects on a reverse 1-1 relation. """ with self.assertNumQueries(1): user = User.objects.select_related('userprofile').get(username='bob') with self.assertRaises(UserProfile.DoesNotExist): user.userprofile def test_nullable_missing_reverse(self): """ Ticket #13839: select_related() should NOT cache None for missing objects on a reverse 0-1 relation. """ Image.objects.create(name="imag1") with self.assertNumQueries(1): image = Image.objects.select_related('product').get() with self.assertRaises(Product.DoesNotExist): image.product def test_parent_only(self): with self.assertNumQueries(1): p = Parent1.objects.select_related('child1').get(name1="Only Parent1") with self.assertNumQueries(0): with self.assertRaises(Child1.DoesNotExist): p.child1 def test_multiple_subclass(self): with self.assertNumQueries(1): p = Parent1.objects.select_related('child1').get(name1="Child1 Parent1") self.assertEqual(p.child1.name2, 'Child1 Parent2') def test_onetoone_with_subclass(self): with self.assertNumQueries(1): p = Parent2.objects.select_related('child2').get(name2="Child2 Parent2") self.assertEqual(p.child2.name1, 'Child2 Parent1') def test_onetoone_with_two_subclasses(self): with self.assertNumQueries(1): p = Parent2.objects.select_related('child2', "child2__child3").get(name2="Child2 Parent2") self.assertEqual(p.child2.name1, 'Child2 Parent1') with self.assertRaises(Child3.DoesNotExist): p.child2.child3 p3 = Parent2(name2="Child3 Parent2") p3.save() c2 = Child3(name1="Child3 Parent1", parent2=p3, value=2, value3=3) c2.save() with self.assertNumQueries(1): p = Parent2.objects.select_related('child2', "child2__child3").get(name2="Child3 Parent2") self.assertEqual(p.child2.name1, 'Child3 Parent1') self.assertEqual(p.child2.child3.value3, 3) self.assertEqual(p.child2.child3.value, p.child2.value) self.assertEqual(p.child2.name1, p.child2.child3.name1) def test_multiinheritance_two_subclasses(self): with self.assertNumQueries(1): p = Parent1.objects.select_related('child1', 'child1__child4').get(name1="Child1 Parent1") self.assertEqual(p.child1.name2, 'Child1 Parent2') self.assertEqual(p.child1.name1, p.name1) with self.assertRaises(Child4.DoesNotExist): p.child1.child4 Child4(name1='n1', name2='n2', value=1, value4=4).save() with self.assertNumQueries(1): p = Parent2.objects.select_related('child1', 'child1__child4').get(name2="n2") self.assertEqual(p.name2, 'n2') self.assertEqual(p.child1.name1, 'n1') self.assertEqual(p.child1.name2, p.name2) self.assertEqual(p.child1.value, 1) self.assertEqual(p.child1.child4.name1, p.child1.name1) self.assertEqual(p.child1.child4.name2, p.child1.name2) self.assertEqual(p.child1.child4.value, p.child1.value) self.assertEqual(p.child1.child4.value4, 4) def test_inheritance_deferred(self): if django.VERSION < (1, 10, 0): self.skipTest('does not work on older version of Django') c = Child4.objects.create(name1='n1', name2='n2', value=1, value4=4) with self.assertNumQueries(1): p = Parent2.objects.select_related('child1').only( 'id2', 'child1__value').get(name2="n2") self.assertEqual(p.id2, c.id2) self.assertEqual(p.child1.value, 1) p = Parent2.objects.select_related('child1').only( 'id2', 'child1__value').get(name2="n2") with self.assertNumQueries(1): self.assertEqual(p.name2, 'n2') p = Parent2.objects.select_related('child1').only( 'id2', 'child1__value').get(name2="n2") with self.assertNumQueries(1): self.assertEqual(p.child1.name2, 'n2') def test_inheritance_deferred2(self): if django.VERSION < (1, 10, 0): self.skipTest('does not work on older version of Django') c = Child4.objects.create(name1='n1', name2='n2', value=1, value4=4) qs = Parent2.objects.select_related('child1', 'child1__child4').only( 'id2', 'child1__value', 'child1__child4__value4') with self.assertNumQueries(1): p = qs.get(name2="n2") self.assertEqual(p.id2, c.id2) self.assertEqual(p.child1.value, 1) self.assertEqual(p.child1.child4.value4, 4) self.assertEqual(p.child1.child4.id2, c.id2) p = qs.get(name2="n2") with self.assertNumQueries(1): self.assertEqual(p.child1.name2, 'n2') p = qs.get(name2="n2") with self.assertNumQueries(0): self.assertEqual(p.child1.name1, 'n1') self.assertEqual(p.child1.child4.name1, 'n1') def test_self_relation(self): if django.VERSION < (1, 11, 0): self.skipTest("does not work on older version of Django") item1 = LinkedList.objects.create(name='item1') LinkedList.objects.create(name='item2', previous_item=item1) with self.assertNumQueries(1): item1_db = LinkedList.objects.select_related('next_item').get(name='item1') self.assertEqual(item1_db.next_item.name, 'item2') class ReverseSelectRelatedValidationTests(SimpleTestCase): """ Rverse related fields should be listed in the validation message when an invalid field is given in select_related(). """ non_relational_error = "Non-relational field given in select_related: '%s'. Choices are: %s" invalid_error = "Invalid field name(s) given in select_related: '%s'. Choices are: %s" def test_reverse_related_validation(self): fields = 'userprofile, userstat' with self.assertRaisesMessage(FieldError, self.invalid_error % ('foobar', fields)): list(User.objects.select_related('foobar')) with self.assertRaisesMessage(FieldError, self.non_relational_error % ('username', fields)): list(User.objects.select_related('username'))<|fim▁end|>
<|file_name|>noise.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Layers for regularization models via the addition of noise. """ from __future__ import absolute_import<|fim▁hole|>import numpy as np from tensorflow.contrib.keras.python.keras import backend as K from tensorflow.contrib.keras.python.keras.engine import Layer class GaussianNoise(Layer): """Apply additive zero-centered Gaussian noise. This is useful to mitigate overfitting (you could see it as a form of random data augmentation). Gaussian Noise (GS) is a natural choice as corruption process for real valued inputs. As it is a regularization layer, it is only active at training time. Arguments: stddev: float, standard deviation of the noise distribution. Input shape: Arbitrary. Use the keyword argument `input_shape` (tuple of integers, does not include the samples axis) when using this layer as the first layer in a model. Output shape: Same shape as input. """ def __init__(self, stddev, **kwargs): super(GaussianNoise, self).__init__(**kwargs) self.supports_masking = True self.stddev = stddev def call(self, inputs, training=None): def noised(): return inputs + K.random_normal( shape=K.shape(inputs), mean=0., stddev=self.stddev) return K.in_train_phase(noised, inputs, training=training) def get_config(self): config = {'stddev': self.stddev} base_config = super(GaussianNoise, self).get_config() return dict(list(base_config.items()) + list(config.items())) class GaussianDropout(Layer): """Apply multiplicative 1-centered Gaussian noise. As it is a regularization layer, it is only active at training time. Arguments: rate: float, drop probability (as with `Dropout`). The multiplicative noise will have standard deviation `sqrt(rate / (1 - rate))`. Input shape: Arbitrary. Use the keyword argument `input_shape` (tuple of integers, does not include the samples axis) when using this layer as the first layer in a model. Output shape: Same shape as input. References: - [Dropout: A Simple Way to Prevent Neural Networks from Overfitting Srivastava, Hinton, et al. 2014](http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf) """ def __init__(self, rate, **kwargs): super(GaussianDropout, self).__init__(**kwargs) self.supports_masking = True self.rate = rate def call(self, inputs, training=None): if 0 < self.rate < 1: def noised(): stddev = np.sqrt(self.rate / (1.0 - self.rate)) return inputs * K.random_normal( shape=K.shape(inputs), mean=1.0, stddev=stddev) return K.in_train_phase(noised, inputs, training=training) return inputs def get_config(self): config = {'rate': self.rate} base_config = super(GaussianDropout, self).get_config() return dict(list(base_config.items()) + list(config.items())) class AlphaDropout(Layer): """Applies Alpha Dropout to the input. Alpha Dropout is a `Dropout` that keeps mean and variance of inputs to their original values, in order to ensure the self-normalizing property even after this dropout. Alpha Dropout fits well to Scaled Exponential Linear Units by randomly setting activations to the negative saturation value. Arguments: rate: float, drop probability (as with `Dropout`). The multiplicative noise will have standard deviation `sqrt(rate / (1 - rate))`. seed: A Python integer to use as random seed. Input shape: Arbitrary. Use the keyword argument `input_shape` (tuple of integers, does not include the samples axis) when using this layer as the first layer in a model. Output shape: Same shape as input. References: - [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515) """ def __init__(self, rate, noise_shape=None, seed=None, **kwargs): super(AlphaDropout, self).__init__(**kwargs) self.rate = rate self.noise_shape = noise_shape self.seed = seed self.supports_masking = True def _get_noise_shape(self, inputs): return self.noise_shape if self.noise_shape else K.shape(inputs) def call(self, inputs, training=None): if 0. < self.rate < 1.: noise_shape = self._get_noise_shape(inputs) alpha = 1.6732632423543772848170429916717 scale = 1.0507009873554804934193349852946 def dropped_inputs(inputs=inputs, rate=self.rate, seed=self.seed): alpha_p = -alpha * scale kept_idx = K.greater_equal(K.random_uniform(noise_shape, seed=seed), rate) kept_idx = K.cast(kept_idx, K.floatx()) a = ((1 - rate) * (1 + rate * alpha_p ** 2)) ** -0.5 b = -a * alpha_p * rate x = inputs * kept_idx + alpha_p * (1 - kept_idx) return a * x + b return K.in_train_phase(dropped_inputs, inputs, training=training) return inputs def get_config(self): config = {'rate': self.rate} base_config = super(AlphaDropout, self).get_config() return dict(list(base_config.items()) + list(config.items()))<|fim▁end|>
from __future__ import division from __future__ import print_function
<|file_name|>argumentschema.py<|end_file_name|><|fim▁begin|># Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. from awscli.customizations.emr import helptext from awscli.customizations.emr.createdefaultroles import EC2_ROLE_NAME INSTANCE_GROUPS_SCHEMA = { "type": "array", "items": { "type": "object", "properties": { "Name": { "type": "string", "description": "Friendly name given to the instance group." }, "InstanceGroupType": { "type": "string", "description": "The type of the instance group in the cluster.", "enum": ["MASTER", "CORE", "TASK"], "required": True }, "BidPrice": { "type": "string", "description": "Bid price for each Amazon EC2 instance in the " "instance group when launching nodes as Spot Instances, " "expressed in USD." }, "InstanceType": { "type": "string", "description": "The Amazon EC2 instance type for all instances " "in the instance group.", "required": True }, "InstanceCount": { "type": "integer", "description": "Target number of Amazon EC2 instances " "for the instance group", "required": True }, "EbsConfiguration": { "type": "object", "description": "EBS configuration that will be associated with the instance group.", "properties": { "EbsOptimized": { "type": "boolean", "description": "Boolean flag used to tag EBS-optimized instances.", }, "EbsBlockDeviceConfigs": { "type": "array", "items": { "type": "object", "properties": { "VolumeSpecification" : { "type": "object", "description": "The EBS volume specification that will be created and attached to every instance in this instance group.", "properties": { "VolumeType": { "type": "string", "description": "The EBS volume type that is attached to all the instances in the instance group. Valid types are: gp2, io1, and standard.", "required": True }, "SizeInGB": { "type": "integer", "description": "The EBS volume size, in GB, that is attached to all the instances in the instance group.", "required": True }, "Iops": { "type": "integer", "description": "The IOPS of the EBS volume that is attached to all the instances in the instance group.", } } }, "VolumesPerInstance": { "type": "integer", "description": "The number of EBS volumes that will be created and attached to each instance in the instance group.", } } } } } }, "AutoScalingPolicy": { "type": "object", "description": "Auto Scaling policy that will be associated with the instance group.", "properties": { "Constraints": { "type": "object", "description": "The Constraints that will be associated to an Auto Scaling policy.", "properties": { "MinCapacity": { "type": "integer", "description": "The minimum value for the instances to scale in" " to in response to scaling activities." }, "MaxCapacity": { "type": "integer", "description": "The maximum value for the instances to scale out to in response" " to scaling activities" } } }, "Rules": { "type": "array", "description": "The Rules associated to an Auto Scaling policy.", "items": { "type": "object", "properties": { "Name": { "type": "string", "description": "Name of the Auto Scaling rule." }, "Description": { "type": "string", "description": "Description of the Auto Scaling rule." }, "Action": { "type": "object", "description": "The Action associated to an Auto Scaling rule.", "properties": { "Market": { # Required for Instance Fleets "type": "string", "description": "Market type of the Amazon EC2 instances used to create a " "cluster node by Auto Scaling action.", "enum": ["ON_DEMAND", "SPOT"] }, "SimpleScalingPolicyConfiguration": { "type": "object", "description": "The Simple scaling configuration that will be associated" "to Auto Scaling action.", "properties": { "AdjustmentType": { "type": "string", "description": "Specifies how the ScalingAdjustment parameter is " "interpreted.", "enum": ["CHANGE_IN_CAPACITY", "PERCENT_CHANGE_IN_CAPACITY", "EXACT_CAPACITY"] }, "ScalingAdjustment": { "type": "integer", "description": "The amount by which to scale, based on the " "specified adjustment type." }, "CoolDown": { "type": "integer", "description": "The amount of time, in seconds, after a scaling " "activity completes and before the next scaling " "activity can start." } } } } }, "Trigger": { "type": "object", "description": "The Trigger associated to an Auto Scaling rule.", "properties": { "CloudWatchAlarmDefinition": { "type": "object", "description": "The Alarm to be registered with CloudWatch, to trigger" " scaling activities.", "properties": { "ComparisonOperator": { "type": "string", "description": "The arithmetic operation to use when comparing the" " specified Statistic and Threshold." }, "EvaluationPeriods": { "type": "integer", "description": "The number of periods over which data is compared" " to the specified threshold." }, "MetricName": { "type": "string", "description": "The name for the alarm's associated metric." }, "Namespace": { "type": "string", "description": "The namespace for the alarm's associated metric." }, "Period": { "type": "integer", "description": "The period in seconds over which the specified " "statistic is applied." }, "Statistic": { "type": "string", "description": "The statistic to apply to the alarm's associated " "metric." }, "Threshold": { "type": "double", "description": "The value against which the specified statistic is " "compared." }, "Unit": { "type": "string", "description": "The statistic's unit of measure." }, "Dimensions": { "type": "array", "description": "The dimensions for the alarm's associated metric.", "items": { "type": "object", "properties": { "Key": { "type": "string", "description": "Dimension Key." }, "Value": { "type": "string", "description": "Dimension Value." } } } } } } } } } } } } } } } } EC2_ATTRIBUTES_SCHEMA = { "type": "object", "properties": { "KeyName": { "type": "string", "description": "The name of the Amazon EC2 key pair that can " "be used to ssh to the master node as the user 'hadoop'." }, "SubnetId": { "type": "string", "description": "To launch the cluster in Amazon " "Virtual Private Cloud (Amazon VPC), set this parameter to " "the identifier of the Amazon VPC subnet where you want " "the cluster to launch. If you do not specify this value, " "the cluster is launched in the normal Amazon Web Services " "cloud, outside of an Amazon VPC. " }, "AvailabilityZone": { "type": "string", "description": "The Availability Zone the cluster will run in." }, "InstanceProfile": { "type": "string", "description": "An IAM role for the cluster. The EC2 instances of the cluster" " assume this role. The default role is " + EC2_ROLE_NAME + ". In order to use the default" " role, you must have already created it using the " "<code>create-default-roles</code> command. " }, "EmrManagedMasterSecurityGroup": { "type": "string", "description": helptext.EMR_MANAGED_MASTER_SECURITY_GROUP }, "EmrManagedSlaveSecurityGroup": { "type": "string", "description": helptext.EMR_MANAGED_SLAVE_SECURITY_GROUP }, "ServiceAccessSecurityGroup": { "type": "string", "description": helptext.SERVICE_ACCESS_SECURITY_GROUP }, "AdditionalMasterSecurityGroups": { "type": "array", "description": helptext.ADDITIONAL_MASTER_SECURITY_GROUPS, "items": { "type": "string" } }, "AdditionalSlaveSecurityGroups": { "type": "array", "description": helptext.ADDITIONAL_SLAVE_SECURITY_GROUPS, "items": { "type": "string" } } } } APPLICATIONS_SCHEMA = { "type": "array", "items": { "type": "object", "properties": { "Name": { "type": "string", "description": "Application name.", "enum": ["MapR", "HUE", "HIVE", "PIG", "HBASE", "IMPALA", "GANGLIA", "HADOOP", "SPARK"], "required": True }, "Args": { "type": "array", "description": "A list of arguments to pass to the application.", "items": { "type": "string" } } } } } BOOTSTRAP_ACTIONS_SCHEMA = { "type": "array", "items": { "type": "object", "properties": { "Name": { "type": "string", "default": "Bootstrap Action" }, "Path": { "type": "string", "description": "Location of the script to run during a bootstrap action. " "Can be either a location in Amazon S3 or " "on a local file system.", "required": True }, "Args": { "type": "array", "description": "A list of command line arguments to pass to " "the bootstrap action script", "items": { "type": "string" } } } } } STEPS_SCHEMA = { "type": "array", "items": { "type": "object", "properties": { "Type": { "type": "string", "description": "The type of a step to be added to the cluster.", "default": "custom_jar", "enum": ["CUSTOM_JAR", "STREAMING", "HIVE", "PIG", "IMPALA"], }, "Name": { "type": "string", "description": "The name of the step. ", }, "ActionOnFailure": { "type": "string", "description": "The action to take if the cluster step fails.", "enum": ["TERMINATE_CLUSTER", "CANCEL_AND_WAIT", "CONTINUE"], "default": "CONTINUE" }, "Jar": { "type": "string", "description": "A path to a JAR file run during the step.", }, "Args": { "type": "array", "description": "A list of command line arguments to pass to the step.", "items": { "type": "string" } }, "MainClass": { "type": "string", "description": "The name of the main class in the specified " "Java file. If not specified, the JAR file should " "specify a Main-Class in its manifest file." }, "Properties": { "type": "string", "description": "A list of Java properties that are set when the step " "runs. You can use these properties to pass key value "<|fim▁hole|> } } HBASE_RESTORE_FROM_BACKUP_SCHEMA = { "type": "object", "properties": { "Dir": { "type": "string", "description": helptext.HBASE_BACKUP_DIR }, "BackupVersion": { "type": "string", "description": helptext.HBASE_BACKUP_VERSION } } } EMR_FS_SCHEMA = { "type": "object", "properties": { "Consistent": { "type": "boolean", "description": "Enable EMRFS consistent view." }, "SSE": { "type": "boolean", "description": "Enable Amazon S3 server-side encryption on files " "written to S3 by EMRFS." }, "RetryCount": { "type": "integer", "description": "The maximum number of times to retry upon S3 inconsistency." }, "RetryPeriod": { "type": "integer", "description": "The amount of time (in seconds) until the first " "retry. Subsequent retries use an exponential " "back-off." }, "Args": { "type": "array", "description": "A list of arguments to pass for additional " "EMRFS configuration.", "items": { "type": "string" } }, "Encryption": { "type": "string", "description": "EMRFS encryption type.", "enum": ["SERVERSIDE", "CLIENTSIDE"] }, "ProviderType": { "type": "string", "description": "EMRFS client-side encryption provider type.", "enum": ["KMS", "CUSTOM"] }, "KMSKeyId": { "type": "string", "description": "AWS KMS's customer master key identifier", }, "CustomProviderLocation": { "type": "string", "description": "Custom encryption provider JAR location." }, "CustomProviderClass": { "type": "string", "description": "Custom encryption provider full class name." } } } TAGS_SCHEMA = { "type": "array", "items": { "type": "string" } }<|fim▁end|>
"pairs to your main function." } }
<|file_name|>test10.cpp<|end_file_name|><|fim▁begin|><|fim▁hole|>for(int i=0;i<2000000000;i++); printf("done\n"); scanf(" "); }<|fim▁end|>
#include<stdio.h> main(){
<|file_name|>files_2.js<|end_file_name|><|fim▁begin|>var searchData= [<|fim▁hole|><|fim▁end|>
['error_2eh',['error.h',['../error_8h.html',1,'']]] ];
<|file_name|>stop_signs.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 Google LLC //<|fim▁hole|>// // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use ModifiedStopSign; use geom::GeomMap; use map_model::{IntersectionID, Map, TurnID}; use std::collections::HashMap; #[derive(Serialize, Deserialize, Debug, PartialEq, Clone, Copy, PartialOrd)] pub enum TurnPriority { Stop, Yield, Priority, } // This represents a single intersection controlled by a stop sign-like policy. The turns are // partitioned into three groups: // // 1) Priority turns - these must be non-conflicting, and cars don't have to stop before doing this // turn. // 2) Yields - cars can do this immediately if there are no previously accepted conflicting turns. // should maybe check that these turns originate from roads with priority turns. // 3) Stops - cars must stop before doing this turn, and they are accepted with the lowest priority #[derive(Debug)] pub struct ControlStopSign { intersection: IntersectionID, turns: HashMap<TurnID, TurnPriority>, changed: bool, } impl ControlStopSign { pub fn new(map: &Map, intersection: IntersectionID) -> ControlStopSign { assert!(!map.get_i(intersection).has_traffic_signal); ControlStopSign::all_way_stop(map, intersection) } fn all_way_stop(map: &Map, intersection: IntersectionID) -> ControlStopSign { let mut ss = ControlStopSign { intersection, turns: HashMap::new(), changed: false, }; for t in &map.get_i(intersection).turns { ss.turns.insert(*t, TurnPriority::Stop); } ss } pub fn get_priority(&self, turn: TurnID) -> TurnPriority { self.turns[&turn] } pub fn set_priority(&mut self, turn: TurnID, priority: TurnPriority, geom_map: &GeomMap) { if priority == TurnPriority::Priority { assert!(self.could_be_priority_turn(turn, geom_map)); } self.turns.insert(turn, priority); self.changed = true; } pub fn could_be_priority_turn(&self, id: TurnID, geom_map: &GeomMap) -> bool { for (t, pri) in &self.turns { if *pri == TurnPriority::Priority && geom_map.get_t(id).conflicts_with(geom_map.get_t(*t)) { return false; } } true } pub fn changed(&self) -> bool { // TODO detect edits that've been undone, equivalent to original self.changed } pub fn get_savestate(&self) -> Option<ModifiedStopSign> { if !self.changed() { return None; } Some(ModifiedStopSign { turns: self.turns.clone(), }) } pub fn load_savestate(&mut self, state: &ModifiedStopSign) { self.changed = true; self.turns = state.turns.clone(); } // TODO need to color turn icons } #[cfg(test)] mod tests { #[test] fn ordering() { use stop_signs::TurnPriority; assert!(TurnPriority::Priority > TurnPriority::Yield); } }<|fim▁end|>
// Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at
<|file_name|>umeditor.config.js<|end_file_name|><|fim▁begin|>/** * umeditor完整配置项<|fim▁hole|>/**************************提示******************************** * 所有被注释的配置项均为UEditor默认值。 * 修改默认配置请首先确保已经完全明确该参数的真实用途。 * 主要有两种修改方案,一种是取消此处注释,然后修改成对应参数;另一种是在实例化编辑器时传入对应参数。 * 当升级编辑器时,可直接使用旧版配置文件替换新版配置文件,不用担心旧版配置文件中因缺少新功能所需的参数而导致脚本报错。 **************************提示********************************/ (function () { /** * 编辑器资源文件根路径。它所表示的含义是:以编辑器实例化页面为当前路径,指向编辑器资源文件(即dialog等文件夹)的路径。 * 鉴于很多同学在使用编辑器的时候出现的种种路径问题,此处强烈建议大家使用"相对于网站根目录的相对路径"进行配置。 * "相对于网站根目录的相对路径"也就是以斜杠开头的形如"/myProject/umeditor/"这样的路径。 * 如果站点中有多个不在同一层级的页面需要实例化编辑器,且引用了同一UEditor的时候,此处的URL可能不适用于每个页面的编辑器。 * 因此,UEditor提供了针对不同页面的编辑器可单独配置的根路径,具体来说,在需要实例化编辑器的页面最顶部写上如下代码即可。当然,需要令此处的URL等于对应的配置。 * window.UMEDITOR_HOME_URL = "/xxxx/xxxx/"; */ var URL = window.UMEDITOR_HOME_URL || (function(){ function PathStack() { this.documentURL = self.document.URL || self.location.href; this.separator = '/'; this.separatorPattern = /\\|\//g; this.currentDir = './'; this.currentDirPattern = /^[.]\/]/; this.path = this.documentURL; this.stack = []; this.push( this.documentURL ); } PathStack.isParentPath = function( path ){ return path === '..'; }; PathStack.hasProtocol = function( path ){ return !!PathStack.getProtocol( path ); }; PathStack.getProtocol = function( path ){ var protocol = /^[^:]*:\/*/.exec( path ); return protocol ? protocol[0] : null; }; PathStack.prototype = { push: function( path ){ this.path = path; update.call( this ); parse.call( this ); return this; }, getPath: function(){ return this + ""; }, toString: function(){ return this.protocol + ( this.stack.concat( [''] ) ).join( this.separator ); } }; function update() { var protocol = PathStack.getProtocol( this.path || '' ); if( protocol ) { //根协议 this.protocol = protocol; //local this.localSeparator = /\\|\//.exec( this.path.replace( protocol, '' ) )[0]; this.stack = []; } else { protocol = /\\|\//.exec( this.path ); protocol && (this.localSeparator = protocol[0]); } } function parse(){ var parsedStack = this.path.replace( this.currentDirPattern, '' ); if( PathStack.hasProtocol( this.path ) ) { parsedStack = parsedStack.replace( this.protocol , ''); } parsedStack = parsedStack.split( this.localSeparator ); parsedStack.length = parsedStack.length - 1; for(var i= 0,tempPath,l=parsedStack.length,root = this.stack;i<l;i++){ tempPath = parsedStack[i]; if(tempPath){ if( PathStack.isParentPath( tempPath ) ) { root.pop(); } else { root.push( tempPath ); } } } } var currentPath = document.getElementsByTagName('script'); currentPath = currentPath[ currentPath.length -1 ].src; return new PathStack().push( currentPath ) + ""; })(); /** * 配置项主体。注意,此处所有涉及到路径的配置别遗漏URL变量。 */ window.UMEDITOR_CONFIG = { //为编辑器实例添加一个路径,这个不能被注释 UMEDITOR_HOME_URL : URL //图片上传配置区 ,imageUrl:"/Advice/imgUpload" //图片上传提交地址 ,imagePath:'http://www.test.com/' //图片修正地址,引用了fixedImagePath,如有特殊需求,可自行配置 ,imageFieldName:"upfile" //图片数据的key,若此处修改,需要在后台对应文件修改对应参数 //工具栏上的所有的功能按钮和下拉框,可以在new编辑器的实例时选择自己需要的从新定义 ,toolbar:[ 'source | undo redo | bold italic underline strikethrough | superscript subscript | forecolor backcolor | removeformat |', 'insertorderedlist insertunorderedlist | selectall cleardoc paragraph | fontfamily fontsize' , '| justifyleft justifycenter justifyright justifyjustify |', 'link unlink | emotion image video | map', '| horizontal print preview fullscreen', 'drafts', 'formula' ] //语言配置项,默认是zh-cn。有需要的话也可以使用如下这样的方式来自动多语言切换,当然,前提条件是lang文件夹下存在对应的语言文件: //lang值也可以通过自动获取 (navigator.language||navigator.browserLanguage ||navigator.userLanguage).toLowerCase() //,lang:"zh-cn" //,langPath:URL +"lang/" //ie下的链接自动监测 //,autourldetectinie:false //主题配置项,默认是default。有需要的话也可以使用如下这样的方式来自动多主题切换,当然,前提条件是themes文件夹下存在对应的主题文件: //现有如下皮肤:default //,theme:'default' //,themePath:URL +"themes/" //针对getAllHtml方法,会在对应的head标签中增加该编码设置。 //,charset:"utf-8" //常用配置项目 //,isShow : true //默认显示编辑器 //,initialContent:'欢迎使用UMEDITOR!' //初始化编辑器的内容,也可以通过textarea/script给值,看官网例子 //,initialFrameWidth:500 //初始化编辑器宽度,默认500 //,initialFrameHeight:500 //初始化编辑器高度,默认500 //,autoClearinitialContent:true //是否自动清除编辑器初始内容,注意:如果focus属性设置为true,这个也为真,那么编辑器一上来就会触发导致初始化的内容看不到了 //,textarea:'editorValue' // 提交表单时,服务器获取编辑器提交内容的所用的参数,多实例时可以给容器name属性,会将name给定的值最为每个实例的键值,不用每次实例化的时候都设置这个值 //,focus:false //初始化时,是否让编辑器获得焦点true或false //,autoClearEmptyNode : true //getContent时,是否删除空的inlineElement节点(包括嵌套的情况) //,fullscreen : false //是否开启初始化时即全屏,默认关闭 //,readonly : false //编辑器初始化结束后,编辑区域是否是只读的,默认是false //,zIndex : 900 //编辑器层级的基数,默认是900 //如果自定义,最好给p标签如下的行高,要不输入中文时,会有跳动感 //注意这里添加的样式,最好放在.edui-editor-body .edui-body-container这两个的下边,防止跟页面上css冲突 //,initialStyle:'.edui-editor-body .edui-body-container p{line-height:1em}' //,autoSyncData:true //自动同步编辑器要提交的数据 //,emotionLocalization:false //是否开启表情本地化,默认关闭。若要开启请确保emotion文件夹下包含官网提供的images表情文件夹 //,allHtmlEnabled:false //提交到后台的数据是否包含整个html字符串 //fontfamily //字体设置 // ,'fontfamily':[ // { name: 'songti', val: '宋体,SimSun'}, // ] //fontsize //字号 //,'fontsize':[10, 11, 12, 14, 16, 18, 20, 24, 36] //paragraph //段落格式 值留空时支持多语言自动识别,若配置,则以配置值为准 //,'paragraph':{'p':'', 'h1':'', 'h2':'', 'h3':'', 'h4':'', 'h5':'', 'h6':''} //undo //可以最多回退的次数,默认20 //,maxUndoCount:20 //当输入的字符数超过该值时,保存一次现场 //,maxInputCount:1 //imageScaleEnabled // 是否允许点击文件拖拽改变大小,默认true //,imageScaleEnabled:true //dropFileEnabled // 是否允许拖放图片到编辑区域,上传并插入,默认true //,dropFileEnabled:true //pasteImageEnabled // 是否允许粘贴QQ截屏,上传并插入,默认true //,pasteImageEnabled:true //autoHeightEnabled // 是否自动长高,默认true //,autoHeightEnabled:true //autoFloatEnabled //是否保持toolbar的位置不动,默认true //,autoFloatEnabled:true //浮动时工具栏距离浏览器顶部的高度,用于某些具有固定头部的页面 //,topOffset:30 //填写过滤规则 //,filterRules: {} }; })();<|fim▁end|>
* 可以在这里配置整个编辑器的特性 */
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>""" Some scripts define objects that we want to import via yaml files that we pass to the script, so this directory must be a python module, rather than just a directory full of scripts. """<|fim▁end|>
<|file_name|>configparser.go<|end_file_name|><|fim▁begin|>/* Copyright (c) 2013, Alex Yu <[email protected]> All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the <organization> nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // Package configparser provides a simple parser for reading/writing configuration (INI) files. // // Supports reading/writing the INI file format in addition to: // // - Reading/writing duplicate section names (ex: MySQL NDB engine's config.ini) // - Options without values (ex: can be used to group a set of hostnames) // - Options without a named section (ex: a simple option=value file) // - Find sections with regexp pattern matching on section names, ex: dc1.east.webservers where regex is '.webservers' // - # or ; as comment delimiter // - = or : as value delimiter // package configparser import ( "bufio" "container/list" "errors" "fmt" "os" "path" "regexp" "strings" "sync" ) // Configuration represents a configuration file with its sections and options. type Configuration struct { filePath string // configuration file sections map[string]*list.List // fully qualified section name as key orderedSections []string // track the order of section names as they are parsed mutex sync.RWMutex } // A Section in a configuration type Section struct { fqn string options map[string]string orderedOptions []string // track the order of the options as they are parsed mutex sync.RWMutex } // NewConfiguration returns a new Configuration instance with an empty file path. func NewConfiguration() *Configuration { return newConfiguration("") } // ReadFile parses a specified configuration file and returns a Configuration instance. func Read(filePath string) (*Configuration, error) { filePath = path.Clean(filePath) file, err := os.Open(filePath) if err != nil { return nil, err } defer file.Close() config := newConfiguration(filePath) activeSection := config.addSection("global") scanner := bufio.NewScanner(bufio.NewReader(file)) for scanner.Scan() { line := scanner.Text() if !(strings.HasPrefix(line, "#") || strings.HasPrefix(line, ";")) && len(line) > 0 { if isSection(line) { fqn := strings.Trim(line, " []") activeSection = config.addSection(fqn) continue } else { addOption(activeSection, line) } } } if err := scanner.Err(); err != nil { return nil, err } return config, nil } // Save the Configuration to file. Creates a backup (.bak) if file already exists. func Save(c *Configuration, filePath string) (err error) { c.mutex.Lock() err = os.Rename(filePath, filePath+".bak") if err != nil { if !os.IsNotExist(err) { // fine if the file does not exists return err } } f, err := os.Create(filePath) if err != nil { return err } defer func() {<|fim▁hole|> err = f.Close() }() w := bufio.NewWriter(f) defer func() { err = w.Flush() }() c.mutex.Unlock() s, err := c.AllSections() if err != nil { return err } c.mutex.Lock() defer c.mutex.Unlock() for _, v := range s { w.WriteString(v.String()) w.WriteString("\n") } return err } // NewSection creates and adds a new Section with the specified name. func (c *Configuration) NewSection(fqn string) *Section { return c.addSection(fqn) } // Filepath returns the configuration file path. func (c *Configuration) FilePath() string { return c.filePath } // SetFilePath sets the Configuration file path. func (c *Configuration) SetFilePath(filePath string) { c.mutex.Lock() defer c.mutex.Unlock() c.filePath = filePath } // StringValue returns the string value for the specified section and option. func (c *Configuration) StringValue(section, option string) (value string, err error) { s, err := c.Section(section) if err != nil { return } value = s.ValueOf(option) return } // DeleteSection deletes the specified sections matched by a regex name and returns the deleted sections. func (c *Configuration) Delete(regex string) (sections []*Section, err error) { sections, err = c.Find(regex) c.mutex.Lock() defer c.mutex.Unlock() if err == nil { for _, s := range sections { delete(c.sections, s.fqn) } // remove also from ordered list var matched bool for i, name := range c.orderedSections { if matched, err = regexp.MatchString(regex, name); matched { c.orderedSections = append(c.orderedSections[:i], c.orderedSections[i+1:]...) } else { if err != nil { return nil, err } } } } return sections, err } // Section returns the first section matching the fully qualified section name. func (c *Configuration) Section(fqn string) (*Section, error) { c.mutex.RLock() defer c.mutex.RUnlock() if l, ok := c.sections[fqn]; ok { for e := l.Front(); e != nil; e = e.Next() { s := e.Value.(*Section) return s, nil } } return nil, errors.New("Unable to find " + fqn) } // AllSections returns a slice of all sections available. func (c *Configuration) AllSections() ([]*Section, error) { return c.Sections("") } // Sections returns a slice of Sections matching the fully qualified section name. func (c *Configuration) Sections(fqn string) ([]*Section, error) { c.mutex.RLock() defer c.mutex.RUnlock() var sections []*Section f := func(lst *list.List) { for e := lst.Front(); e != nil; e = e.Next() { s := e.Value.(*Section) sections = append(sections, s) } } if fqn == "" { // Get all sections. for _, fqn := range c.orderedSections { if lst, ok := c.sections[fqn]; ok { f(lst) } } } else { if lst, ok := c.sections[fqn]; ok { f(lst) } else { return nil, errors.New("Unable to find " + fqn) } } return sections, nil } // Find returns a slice of Sections matching the regexp against the section name. func (c *Configuration) Find(regex string) ([]*Section, error) { c.mutex.RLock() defer c.mutex.RUnlock() var sections []*Section for key, lst := range c.sections { if matched, err := regexp.MatchString(regex, key); matched { for e := lst.Front(); e != nil; e = e.Next() { s := e.Value.(*Section) sections = append(sections, s) } } else { if err != nil { return nil, err } } } return sections, nil } // PrintSection prints a text representation of all sections matching the fully qualified section name. func (c *Configuration) PrintSection(fqn string) { c.mutex.RLock() defer c.mutex.RUnlock() sections, err := c.Sections(fqn) if err == nil { for _, section := range sections { fmt.Print(section) } } else { fmt.Printf("Unable to find section %v\n", err) } } // String returns the text representation of a parsed configuration file. func (c *Configuration) String() string { c.mutex.RLock() defer c.mutex.RUnlock() var parts []string for _, fqn := range c.orderedSections { sections, _ := c.Sections(fqn) for _, section := range sections { parts = append(parts, section.String()) } } return strings.Join(parts, "") } // Exists returns true if the option exists func (s *Section) Exists(option string) (ok bool) { s.mutex.RLock() defer s.mutex.RUnlock() _, ok = s.options[option] return } // ValueOf returns the value of specified option. func (s *Section) ValueOf(option string) string { s.mutex.Lock() defer s.mutex.Unlock() return s.options[option] } // SetValueFor sets the value for the specified option and returns the old value. func (s *Section) SetValueFor(option string, value string) string { s.mutex.Lock() defer s.mutex.Unlock() var oldValue string oldValue, s.options[option] = s.options[option], value return oldValue } // Add adds a new option to the section. Adding and existing option will overwrite the old one. // The old value is returned func (s *Section) Add(option string, value string) (oldValue string) { s.mutex.Lock() defer s.mutex.Unlock() var ok bool if oldValue, ok = s.options[option]; !ok { s.orderedOptions = append(s.orderedOptions, option) } s.options[option] = value return oldValue } // Delete removes the specified option from the section and returns the deleted option's value. func (s *Section) Delete(option string) (value string) { s.mutex.Lock() defer s.mutex.Unlock() value = s.options[option] delete(s.options, option) for i, opt := range s.orderedOptions { if opt == option { s.orderedOptions = append(s.orderedOptions[:i], s.orderedOptions[i+1:]...) } } return value } // Options returns a map of options for the section. func (s *Section) Options() map[string]string { return s.options } // OptionNames returns a slice of option names in the same order as they were parsed. func (s *Section) OptionNames() []string { return s.orderedOptions } // String returns the text representation of a section with its options. func (s *Section) String() string { s.mutex.RLock() defer s.mutex.RUnlock() var parts []string s_name := "[" + s.fqn + "]\n" if s.fqn == "global" { s_name = "" } parts = append(parts, s_name) for _, opt := range s.orderedOptions { value := s.options[opt] if value != "" { parts = append(parts, opt, "=", value, "\n") } else { parts = append(parts, opt, "\n") } } return strings.Join(parts, "") } // // Private // // newConfiguration creates a new Configuration instance. func newConfiguration(filePath string) *Configuration { return &Configuration{ filePath: filePath, sections: make(map[string]*list.List), } } func isSection(section string) bool { return strings.HasPrefix(section, "[") } func addOption(s *Section, option string) { var opt, value string if opt, value = parseOption(option); value != "" { s.options[opt] = value } else { // only insert keys. ex list of hosts s.options[opt] = "" } s.orderedOptions = append(s.orderedOptions, opt) } func parseOption(option string) (opt, value string) { split := func(i int, delim string) (opt, value string) { // strings.Split cannot handle wsrep_provider_options settings opt = strings.Trim(option[:i], " ") value = strings.Trim(option[i+1:], " ") return } if i := strings.Index(option, "="); i != -1 { opt, value = split(i, "=") } else if i := strings.Index(option, ":"); i != -1 { opt, value = split(i, ":") } else { opt = option } return } func (c *Configuration) addSection(fqn string) *Section { section := &Section{fqn: fqn, options: make(map[string]string)} var lst *list.List if lst = c.sections[fqn]; lst == nil { lst = list.New() c.sections[fqn] = lst c.orderedSections = append(c.orderedSections, fqn) } lst.PushBack(section) return section }<|fim▁end|>
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main import ( "github.com/kataras/iris" ) // same as embedded-single-page-application but without go-bindata, the files are "physical" stored in the // current system directory. var page = struct { Title string }{"Welcome"} func newApp() *iris.Application { app := iris.New() app.RegisterView(iris.HTML("./public", ".html")) app.Get("/", func(ctx iris.Context) { ctx.ViewData("Page", page) ctx.View("index.html") }) // or just serve index.html as it is: // app.Get("/", func(ctx iris.Context) { // ctx.ServeFile("index.html", false) // }) assetHandler := app.StaticHandler("./public", false, false) // as an alternative of SPA you can take a look at the /routing/dynamic-path/root-wildcard // example too app.SPA(assetHandler) return app<|fim▁hole|>} func main() { app := newApp() // http://localhost:8080 // http://localhost:8080/index.html // http://localhost:8080/app.js // http://localhost:8080/css/main.css app.Run(iris.Addr(":8080")) }<|fim▁end|>
<|file_name|>gohi.go<|end_file_name|><|fim▁begin|>package main<|fim▁hole|> "fmt" "net/http" "runtime" ) func main() { runtime.GOMAXPROCS(runtime.NumCPU()) http.HandleFunc("/", hello) fmt.Println("listening...") err := http.ListenAndServe(":8000", nil) if err != nil { panic(err) } } func hello(res http.ResponseWriter, req *http.Request) { fmt.Fprintln(res, "the forms are go!") }<|fim▁end|>
import (
<|file_name|>effects.rs<|end_file_name|><|fim▁begin|>use super::{Effect, EffectState, EffectStats}; use super::EffectState::*; pub struct DrainEffect { remaining_duration: usize, damage: usize, health_regeneration: usize, } impl DrainEffect { pub fn new(duration: usize, damage: usize, health_regeneration: usize) -> Self { DrainEffect { remaining_duration: duration, damage: damage, health_regeneration: health_regeneration, } } } impl Effect for DrainEffect { fn cause_effect(&mut self) -> EffectState { if self.remaining_duration > 0 { self.remaining_duration = self.remaining_duration.saturating_sub(1); Active(EffectStats::new(self.remaining_duration,<|fim▁hole|> self.damage, 0, self.health_regeneration, 0)) } else { Finished } } } pub struct ShieldEffect { remaining_duration: usize, defense: usize, } impl ShieldEffect { pub fn new(duration: usize, defense: usize) -> Self { ShieldEffect { remaining_duration: duration, defense: defense, } } } impl Effect for ShieldEffect { fn cause_effect(&mut self) -> EffectState { if self.remaining_duration > 0 { self.remaining_duration = self.remaining_duration.saturating_sub(1); Active(EffectStats::new(self.remaining_duration, 0, self.defense, 0, 0)) } else { Finished } } } pub struct DamagingEffect { remaining_duration: usize, attack: usize, } impl DamagingEffect { pub fn new(duration: usize, attack: usize) -> Self { DamagingEffect { remaining_duration: duration, attack: attack, } } } impl Effect for DamagingEffect { fn cause_effect(&mut self) -> EffectState { if self.remaining_duration > 0 { self.remaining_duration = self.remaining_duration.saturating_sub(1); Active(EffectStats::new(self.remaining_duration, self.attack, 0, 0, 0)) } else { Finished } } } pub struct HealingEffect { remaining_duration: usize, health_regeneration: usize, } impl HealingEffect { pub fn new(duration: usize, health_regeneration: usize) -> Self { HealingEffect { remaining_duration: duration, health_regeneration: health_regeneration, } } } impl Effect for HealingEffect { fn cause_effect(&mut self) -> EffectState { if self.remaining_duration > 0 { self.remaining_duration = self.remaining_duration.saturating_sub(1); Active(EffectStats::new(self.remaining_duration, 0, 0, self.health_regeneration, 0)) } else { Finished } } } pub struct RechargingEffect { remaining_duration: usize, mana_regeneration: usize, } impl RechargingEffect { pub fn new(duration: usize, mana_regeneration: usize) -> Self { RechargingEffect { remaining_duration: duration, mana_regeneration: mana_regeneration, } } } impl Effect for RechargingEffect { fn cause_effect(&mut self) -> EffectState { if self.remaining_duration > 0 { self.remaining_duration = self.remaining_duration.saturating_sub(1); Active(EffectStats::new(self.remaining_duration, 0, 0, 0, self.mana_regeneration)) } else { Finished } } }<|fim▁end|>
<|file_name|>about_iteration.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- from runner.koan import * class AboutIteration(Koan): def test_iterators_are_a_type(self): it = iter(range(1,6)) fib = 0 for num in it: fib += num self.assertEqual(__ , fib) def test_iterating_with_next(self): stages = iter(['alpha','beta','gamma']) try: self.assertEqual(__, next(stages)) next(stages) self.assertEqual(__, next(stages)) next(stages) except StopIteration as ex: err_msg = 'Ran out of iterations' self.assertRegexpMatches(err_msg, __) # ------------------------------------------------------------------ def add_ten(self, item): return item + 10 def test_map_transforms_elements_of_a_list(self): seq = [1, 2, 3] mapped_seq = list() mapping = map(self.add_ten, seq) self.assertNotEqual(list, type(mapping).__name__) self.assertEqual(__, type(mapping).__name__) # In Python 3 built in iterator funcs return iteratable view objects # instead of lists for item in mapping: mapped_seq.append(item) self.assertEqual(__, mapped_seq) # None, iterator methods actually return objects of iter type in # python 3. In python 2 map() would give you a list. def test_filter_selects_certain_items_from_a_list(self): def is_even(item):<|fim▁hole|> for item in filter(is_even, seq): even_numbers.append(item) self.assertEqual(__, even_numbers) def test_just_return_first_item_found(self): def is_big_name(item): return len(item) > 4 names = ["Jim", "Bill", "Clarence", "Doug", "Eli"] name = None iterator = filter(is_big_name, names) try: name = next(iterator) except StopIteration: msg = 'Ran out of big names' self.assertEqual(__, name) # ------------------------------------------------------------------ def add(self,accum,item): return accum + item def multiply(self,accum,item): return accum * item def test_reduce_will_blow_your_mind(self): import functools # As of Python 3 reduce() has been demoted from a builtin function # to the functools module. result = functools.reduce(self.add, [2, 3, 4]) self.assertEqual(__, type(result).__name__) # Reduce() syntax is same as Python 2 self.assertEqual(__, result) result2 = functools.reduce(self.multiply, [2, 3, 4], 1) self.assertEqual(__, result2) # Extra Credit: # Describe in your own words what reduce does. # ------------------------------------------------------------------ def test_creating_lists_with_list_comprehensions(self): feast = ['lambs', 'sloths', 'orangutans', 'breakfast cereals', 'fruit bats'] comprehension = [delicacy.capitalize() for delicacy in feast] self.assertEqual(__, comprehension[0]) self.assertEqual(__, comprehension[2]) def test_use_pass_for_iterations_with_no_body(self): for num in range(1,5): pass self.assertEqual(__, num) # ------------------------------------------------------------------ def test_all_iteration_methods_work_on_any_sequence_not_just_lists(self): # Ranges are an iteratable sequence result = map(self.add_ten, range(1,4)) self.assertEqual(__, list(result)) try: # Files act like a collection of lines file = open("example_file.txt") def make_upcase(line): return line.strip().upper() upcase_lines = map(make_upcase, file.readlines()) self.assertEqual(__, list(upcase_lines)) # NOTE: You can create your own collections that work with each, # map, select, etc. finally: # Arg, this is ugly. # We will figure out how to fix this later. if file: file.close()<|fim▁end|>
return (item % 2) == 0 seq = [1, 2, 3, 4, 5, 6] even_numbers = list()
<|file_name|>erlang_rust_port.rs<|end_file_name|><|fim▁begin|>// see erlang_rust_port.erl extern crate erl_ext; extern crate getopts; <|fim▁hole|>use std::io; use std::env; fn main() { let args: Vec<String> = env::args().collect(); let mut opts = Options::new(); opts.optflag("u", "utf8-atoms", "Use utf-8 atoms feature"); opts.optflag("s", "small-atoms", "Use small atoms feature"); opts.optflag("f", "fair-new-fun", "Fairly calculate NEW_FUN size (requires extra memory)"); let matches = match opts.parse(&args[1..]) { Ok(m) => { m } Err(f) => { panic!(f.to_string()) } }; let mut in_f = io::stdin(); let mut out_f = io::stdout(); // let mut out_writer = std::io::BufferedWriter::with_capacity(20480, // out_f.unwrap()); let decoder = Decoder::new(&mut in_f); let encoder = Encoder::new(&mut out_f, matches.opt_present("u"), matches.opt_present("s"), matches.opt_present("f")); match read_write_loop(decoder, encoder) { Err(Error::ByteorderUnexpectedEOF) => (), // port was closed Err(ref err) => panic!("Error: {}", err), Ok(()) => () // unreachable in this example }; } fn read_write_loop<R: io::Read>(mut decoder: Decoder<R>, mut encoder: Encoder) -> Result<(), Error> { loop { assert!(true == try!(decoder.read_prelude())); let term = try!(decoder.decode_term()); try!(encoder.write_prelude()); try!(encoder.encode_term(term)); try!(encoder.flush()); } }<|fim▁end|>
use getopts::Options; use erl_ext::{Decoder,Encoder,Error};
<|file_name|>gles2_implementation_unittest.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // Tests for GLES2Implementation. #include "gpu/command_buffer/client/gles2_implementation.h" #include <GLES2/gl2ext.h> #include "gpu/command_buffer/client/client_test_helper.h" #include "gpu/command_buffer/client/transfer_buffer.h" #include "gpu/command_buffer/common/command_buffer.h" #include "gpu/command_buffer/common/compiler_specific.h" #include "testing/gtest/include/gtest/gtest.h" #include "testing/gmock/include/gmock/gmock.h" #if !defined(GLES2_SUPPORT_CLIENT_SIDE_ARRAYS) #define GLES2_SUPPORT_CLIENT_SIDE_ARRAYS #endif using testing::_; using testing::AnyNumber; using testing::DoAll; using testing::InSequence; using testing::Invoke; using testing::Mock; using testing::Sequence; using testing::StrictMock; using testing::Truly; using testing::Return; namespace gpu { namespace gles2 { ACTION_P2(SetMemory, dst, obj) { memcpy(dst, &obj, sizeof(obj)); } ACTION_P3(SetMemoryFromArray, dst, array, size) { memcpy(dst, array, size); } // Used to help set the transfer buffer result to SizedResult of a single value. template <typename T> class SizedResultHelper { public: explicit SizedResultHelper(T result) : size_(sizeof(result)), result_(result) { } private: uint32 size_; T result_; }; // Struct to make it easy to pass a vec4 worth of floats. struct FourFloats { FourFloats(float _x, float _y, float _z, float _w) : x(_x), y(_y), z(_z), w(_w) { } float x; float y; float z; float w; }; #pragma pack(push, 1) // Struct that holds 7 characters. struct Str7 { char str[7]; }; #pragma pack(pop) class MockTransferBuffer : public TransferBufferInterface { public: struct ExpectedMemoryInfo { uint32 offset; int32 id; uint8* ptr; }; MockTransferBuffer( CommandBuffer* command_buffer, unsigned int size, unsigned int result_size, unsigned int alignment) : command_buffer_(command_buffer), size_(size), result_size_(result_size), alignment_(alignment), actual_buffer_index_(0), expected_buffer_index_(0), last_alloc_(NULL), expected_offset_(result_size), actual_offset_(result_size) { // We have to allocate the buffers here because // we need to know their address before GLES2Implementation::Initialize // is called. for (int ii = 0; ii < kNumBuffers; ++ii) { buffer_ids_[ii] = command_buffer_->CreateTransferBuffer( size_ + ii * alignment_, -1); EXPECT_NE(-1, buffer_ids_[ii]); buffers_[ii] = command_buffer_->GetTransferBuffer(buffer_ids_[ii]); } } virtual ~MockTransferBuffer() { } bool Initialize( unsigned int starting_buffer_size, unsigned int result_size, unsigned int /* min_buffer_size */, unsigned int /* max_buffer_size */, unsigned int alignment, unsigned int size_to_flush) OVERRIDE; virtual int GetShmId() OVERRIDE; virtual void* GetResultBuffer() OVERRIDE; virtual int GetResultOffset() OVERRIDE; virtual void Free() OVERRIDE; virtual bool HaveBuffer() const OVERRIDE; virtual void* AllocUpTo( unsigned int size, unsigned int* size_allocated) OVERRIDE; virtual void* Alloc(unsigned int size) OVERRIDE; virtual RingBuffer::Offset GetOffset(void* pointer) const OVERRIDE; virtual void FreePendingToken(void* p, unsigned int /* token */) OVERRIDE; size_t MaxTransferBufferSize() { return size_ - result_size_; } unsigned int RoundToAlignment(unsigned int size) { return (size + alignment_ - 1) & ~(alignment_ - 1); } bool InSync() { return expected_buffer_index_ == actual_buffer_index_; } ExpectedMemoryInfo GetExpectedMemory(size_t size) { ExpectedMemoryInfo mem; mem.offset = AllocateExpectedTransferBuffer(size); mem.id = GetExpectedTransferBufferId(); mem.ptr = static_cast<uint8*>( GetExpectedTransferAddressFromOffset(mem.offset, size)); return mem; } ExpectedMemoryInfo GetExpectedResultMemory(size_t size) { ExpectedMemoryInfo mem; mem.offset = GetExpectedResultBufferOffset(); mem.id = GetExpectedResultBufferId(); mem.ptr = static_cast<uint8*>( GetExpectedTransferAddressFromOffset(mem.offset, size)); return mem; } private: static const int kNumBuffers = 2; uint8* actual_buffer() const { return static_cast<uint8*>(buffers_[actual_buffer_index_].ptr); } uint8* expected_buffer() const { return static_cast<uint8*>(buffers_[expected_buffer_index_].ptr); } uint32 AllocateExpectedTransferBuffer(size_t size) { EXPECT_LE(size, MaxTransferBufferSize()); // Toggle which buffer we get each time to simulate the buffer being // reallocated. expected_buffer_index_ = (expected_buffer_index_ + 1) % kNumBuffers; if (expected_offset_ + size > size_) { expected_offset_ = result_size_; } uint32 offset = expected_offset_; expected_offset_ += RoundToAlignment(size); // Make sure each buffer has a different offset. return offset + expected_buffer_index_ * alignment_; } void* GetExpectedTransferAddressFromOffset(uint32 offset, size_t size) { EXPECT_GE(offset, expected_buffer_index_ * alignment_); EXPECT_LE(offset + size, size_ + expected_buffer_index_ * alignment_); return expected_buffer() + offset; } int GetExpectedResultBufferId() { return buffer_ids_[expected_buffer_index_]; } uint32 GetExpectedResultBufferOffset() { return expected_buffer_index_ * alignment_; } int GetExpectedTransferBufferId() { return buffer_ids_[expected_buffer_index_]; } CommandBuffer* command_buffer_; size_t size_; size_t result_size_; uint32 alignment_; int buffer_ids_[kNumBuffers]; gpu::Buffer buffers_[kNumBuffers]; int actual_buffer_index_; int expected_buffer_index_; void* last_alloc_; uint32 expected_offset_; uint32 actual_offset_; DISALLOW_COPY_AND_ASSIGN(MockTransferBuffer); }; bool MockTransferBuffer::Initialize( unsigned int starting_buffer_size, unsigned int result_size, unsigned int /* min_buffer_size */, unsigned int /* max_buffer_size */, unsigned int alignment, unsigned int /* size_to_flush */) { // Just check they match. return size_ == starting_buffer_size && result_size_ == result_size && alignment_ == alignment; }; int MockTransferBuffer::GetShmId() { return buffer_ids_[actual_buffer_index_]; } void* MockTransferBuffer::GetResultBuffer() { return actual_buffer() + actual_buffer_index_ * alignment_; } int MockTransferBuffer::GetResultOffset() { return actual_buffer_index_ * alignment_; } void MockTransferBuffer::Free() { GPU_NOTREACHED(); } bool MockTransferBuffer::HaveBuffer() const { return true; } void* MockTransferBuffer::AllocUpTo( unsigned int size, unsigned int* size_allocated) { EXPECT_TRUE(size_allocated != NULL); EXPECT_TRUE(last_alloc_ == NULL); // Toggle which buffer we get each time to simulate the buffer being // reallocated. actual_buffer_index_ = (actual_buffer_index_ + 1) % kNumBuffers; size = std::min(static_cast<size_t>(size), MaxTransferBufferSize()); if (actual_offset_ + size > size_) { actual_offset_ = result_size_; } uint32 offset = actual_offset_; actual_offset_ += RoundToAlignment(size); *size_allocated = size; // Make sure each buffer has a different offset. last_alloc_ = actual_buffer() + offset + actual_buffer_index_ * alignment_; return last_alloc_; } void* MockTransferBuffer::Alloc(unsigned int size) { EXPECT_LE(size, MaxTransferBufferSize()); unsigned int temp = 0; void* p = AllocUpTo(size, &temp); EXPECT_EQ(temp, size); return p; } RingBuffer::Offset MockTransferBuffer::GetOffset(void* pointer) const { // Make sure each buffer has a different offset. return static_cast<uint8*>(pointer) - actual_buffer(); } void MockTransferBuffer::FreePendingToken(void* p, unsigned int /* token */) { EXPECT_EQ(last_alloc_, p); last_alloc_ = NULL; } class GLES2ImplementationTest : public testing::Test { protected: static const uint8 kInitialValue = 0xBD; static const int32 kNumCommandEntries = 500; static const int32 kCommandBufferSizeBytes = kNumCommandEntries * sizeof(CommandBufferEntry); static const size_t kTransferBufferSize = 256; static const GLint kMaxCombinedTextureImageUnits = 8; static const GLint kMaxCubeMapTextureSize = 64; static const GLint kMaxFragmentUniformVectors = 16; static const GLint kMaxRenderbufferSize = 64; static const GLint kMaxTextureImageUnits = 8; static const GLint kMaxTextureSize = 128; static const GLint kMaxVaryingVectors = 8; static const GLint kMaxVertexAttribs = 8; static const GLint kMaxVertexTextureImageUnits = 0; static const GLint kMaxVertexUniformVectors = 128; static const GLint kNumCompressedTextureFormats = 0; static const GLint kNumShaderBinaryFormats = 0; static const GLuint kStartId = 1024; static const GLuint kBuffersStartId = GLES2Implementation::kClientSideArrayId + 2; static const GLuint kFramebuffersStartId = 1; static const GLuint kProgramsAndShadersStartId = 1; static const GLuint kRenderbuffersStartId = 1; static const GLuint kTexturesStartId = 1; static const GLuint kQueriesStartId = 1; typedef MockTransferBuffer::ExpectedMemoryInfo ExpectedMemoryInfo; GLES2ImplementationTest() : commands_(NULL), token_(0) { } virtual void SetUp() OVERRIDE; virtual void TearDown() OVERRIDE; bool NoCommandsWritten() { return static_cast<const uint8*>(static_cast<const void*>(commands_))[0] == kInitialValue; } QueryTracker::Query* GetQuery(GLuint id) { return gl_->query_tracker_->GetQuery(id); } void Initialize(bool shared_resources, bool bind_generates_resource) { command_buffer_.reset(new StrictMock<MockClientCommandBuffer>()); ASSERT_TRUE(command_buffer_->Initialize()); transfer_buffer_.reset(new MockTransferBuffer( command_buffer(), kTransferBufferSize, GLES2Implementation::kStartingOffset, GLES2Implementation::kAlignment)); helper_.reset(new GLES2CmdHelper(command_buffer())); helper_->Initialize(kCommandBufferSizeBytes); GLES2Implementation::GLState state; state.max_combined_texture_image_units = kMaxCombinedTextureImageUnits; state.max_cube_map_texture_size = kMaxCubeMapTextureSize; state.max_fragment_uniform_vectors = kMaxFragmentUniformVectors; state.max_renderbuffer_size = kMaxRenderbufferSize; state.max_texture_image_units = kMaxTextureImageUnits; state.max_texture_size = kMaxTextureSize; state.max_varying_vectors = kMaxVaryingVectors; state.max_vertex_attribs = kMaxVertexAttribs; state.max_vertex_texture_image_units = kMaxVertexTextureImageUnits; state.max_vertex_uniform_vectors = kMaxVertexUniformVectors; state.num_compressed_texture_formats = kNumCompressedTextureFormats; state.num_shader_binary_formats = kNumShaderBinaryFormats; // This just happens to work for now because GLState has 1 GLint per // state. If GLState gets more complicated this code will need to get // more complicated. ExpectedMemoryInfo mem1 = GetExpectedMemory(sizeof(state) * 2); { InSequence sequence; EXPECT_CALL(*command_buffer(), OnFlush()) .WillOnce(SetMemory(mem1.ptr + sizeof(state), state)) .RetiresOnSaturation(); GetNextToken(); // eat the token that starting up will use. gl_.reset(new GLES2Implementation( helper_.get(), NULL, transfer_buffer_.get(), shared_resources, bind_generates_resource)); ASSERT_TRUE(gl_->Initialize( kTransferBufferSize, kTransferBufferSize, kTransferBufferSize)); } EXPECT_CALL(*command_buffer(), OnFlush()) .Times(1) .RetiresOnSaturation(); helper_->CommandBufferHelper::Finish(); ::testing::Mock::VerifyAndClearExpectations(gl_.get()); Buffer ring_buffer = helper_->get_ring_buffer(); commands_ = static_cast<CommandBufferEntry*>(ring_buffer.ptr) + command_buffer()->GetState().put_offset; ClearCommands(); EXPECT_TRUE(transfer_buffer_->InSync()); ::testing::Mock::VerifyAndClearExpectations(command_buffer()); } MockClientCommandBuffer* command_buffer() const { return command_buffer_.get(); } int GetNextToken() { return ++token_; } const void* GetPut() { return helper_->GetSpace(0); } void ClearCommands() { Buffer ring_buffer = helper_->get_ring_buffer(); memset(ring_buffer.ptr, kInitialValue, ring_buffer.size); } size_t MaxTransferBufferSize() { return transfer_buffer_->MaxTransferBufferSize(); } ExpectedMemoryInfo GetExpectedMemory(size_t size) { return transfer_buffer_->GetExpectedMemory(size); } ExpectedMemoryInfo GetExpectedResultMemory(size_t size) { return transfer_buffer_->GetExpectedResultMemory(size); } int CheckError() { ExpectedMemoryInfo result = GetExpectedResultMemory(sizeof(GetError::Result)); EXPECT_CALL(*command_buffer(), OnFlush()) .WillOnce(SetMemory(result.ptr, GLuint(GL_NO_ERROR))) .RetiresOnSaturation(); return gl_->GetError(); } bool GetBucketContents(uint32 bucket_id, std::vector<int8>* data) { return gl_->GetBucketContents(bucket_id, data); } Sequence sequence_; scoped_ptr<MockClientCommandBuffer> command_buffer_; scoped_ptr<GLES2CmdHelper> helper_; scoped_ptr<MockTransferBuffer> transfer_buffer_; scoped_ptr<GLES2Implementation> gl_; CommandBufferEntry* commands_; int token_; }; void GLES2ImplementationTest::SetUp() { Initialize(false, true); } void GLES2ImplementationTest::TearDown() { Mock::VerifyAndClear(gl_.get()); EXPECT_CALL(*command_buffer(), OnFlush()).Times(AnyNumber()); gl_.reset(); } class GLES2ImplementationStrictSharedTest : public GLES2ImplementationTest { protected: virtual void SetUp() OVERRIDE; }; void GLES2ImplementationStrictSharedTest::SetUp() { Initialize(true, false); } // GCC requires these declarations, but MSVC requires they not be present #ifndef _MSC_VER const uint8 GLES2ImplementationTest::kInitialValue; const int32 GLES2ImplementationTest::kNumCommandEntries; const int32 GLES2ImplementationTest::kCommandBufferSizeBytes; const size_t GLES2ImplementationTest::kTransferBufferSize; const GLint GLES2ImplementationTest::kMaxCombinedTextureImageUnits; const GLint GLES2ImplementationTest::kMaxCubeMapTextureSize; const GLint GLES2ImplementationTest::kMaxFragmentUniformVectors; const GLint GLES2ImplementationTest::kMaxRenderbufferSize; const GLint GLES2ImplementationTest::kMaxTextureImageUnits; const GLint GLES2ImplementationTest::kMaxTextureSize; const GLint GLES2ImplementationTest::kMaxVaryingVectors; const GLint GLES2ImplementationTest::kMaxVertexAttribs; const GLint GLES2ImplementationTest::kMaxVertexTextureImageUnits; const GLint GLES2ImplementationTest::kMaxVertexUniformVectors; const GLint GLES2ImplementationTest::kNumCompressedTextureFormats; const GLint GLES2ImplementationTest::kNumShaderBinaryFormats; const GLuint GLES2ImplementationTest::kStartId; const GLuint GLES2ImplementationTest::kBuffersStartId; const GLuint GLES2ImplementationTest::kFramebuffersStartId; const GLuint GLES2ImplementationTest::kProgramsAndShadersStartId; const GLuint GLES2ImplementationTest::kRenderbuffersStartId; const GLuint GLES2ImplementationTest::kTexturesStartId; const GLuint GLES2ImplementationTest::kQueriesStartId; #endif TEST_F(GLES2ImplementationTest, Basic) { EXPECT_TRUE(gl_->share_group() != NULL); } TEST_F(GLES2ImplementationTest, GetBucketContents) { const uint32 kBucketId = GLES2Implementation::kResultBucketId; const uint32 kTestSize = MaxTransferBufferSize() + 32; scoped_array<uint8> buf(new uint8 [kTestSize]); uint8* expected_data = buf.get(); for (uint32 ii = 0; ii < kTestSize; ++ii) { expected_data[ii] = ii * 3; } struct Cmds { cmd::GetBucketStart get_bucket_start; cmd::SetToken set_token1; cmd::GetBucketData get_bucket_data; cmd::SetToken set_token2; cmd::SetBucketSize set_bucket_size2; }; ExpectedMemoryInfo mem1 = GetExpectedMemory(MaxTransferBufferSize()); ExpectedMemoryInfo result1 = GetExpectedResultMemory(sizeof(uint32)); ExpectedMemoryInfo mem2 = GetExpectedMemory( kTestSize - MaxTransferBufferSize()); Cmds expected; expected.get_bucket_start.Init( kBucketId, result1.id, result1.offset, MaxTransferBufferSize(), mem1.id, mem1.offset); expected.set_token1.Init(GetNextToken()); expected.get_bucket_data.Init( kBucketId, MaxTransferBufferSize(), kTestSize - MaxTransferBufferSize(), mem2.id, mem2.offset); expected.set_bucket_size2.Init(kBucketId, 0); expected.set_token2.Init(GetNextToken()); EXPECT_CALL(*command_buffer(), OnFlush()) .WillOnce(DoAll( SetMemory(result1.ptr, kTestSize), SetMemoryFromArray( mem1.ptr, expected_data, MaxTransferBufferSize()))) .WillOnce(SetMemoryFromArray( mem2.ptr, expected_data + MaxTransferBufferSize(), kTestSize - MaxTransferBufferSize())) .RetiresOnSaturation(); std::vector<int8> data; GetBucketContents(kBucketId, &data); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); ASSERT_EQ(kTestSize, data.size()); EXPECT_EQ(0, memcmp(expected_data, &data[0], data.size())); } TEST_F(GLES2ImplementationTest, ShaderSource) { const uint32 kBucketId = GLES2Implementation::kResultBucketId; const GLuint kShaderId = 456; const char* kString1 = "foobar"; const char* kString2 = "barfoo"; const size_t kString1Size = strlen(kString1); const size_t kString2Size = strlen(kString2); const size_t kString3Size = 1; // Want the NULL; const size_t kSourceSize = kString1Size + kString2Size + kString3Size; const size_t kPaddedString1Size = transfer_buffer_->RoundToAlignment(kString1Size); const size_t kPaddedString2Size = transfer_buffer_->RoundToAlignment(kString2Size); const size_t kPaddedString3Size = transfer_buffer_->RoundToAlignment(kString3Size); struct Cmds { cmd::SetBucketSize set_bucket_size; cmd::SetBucketData set_bucket_data1; cmd::SetToken set_token1; cmd::SetBucketData set_bucket_data2; cmd::SetToken set_token2; cmd::SetBucketData set_bucket_data3; cmd::SetToken set_token3; ShaderSourceBucket shader_source_bucket; cmd::SetBucketSize clear_bucket_size; }; ExpectedMemoryInfo mem1 = GetExpectedMemory(kPaddedString1Size); ExpectedMemoryInfo mem2 = GetExpectedMemory(kPaddedString2Size); ExpectedMemoryInfo mem3 = GetExpectedMemory(kPaddedString3Size); Cmds expected; expected.set_bucket_size.Init(kBucketId, kSourceSize); expected.set_bucket_data1.Init( kBucketId, 0, kString1Size, mem1.id, mem1.offset); expected.set_token1.Init(GetNextToken()); expected.set_bucket_data2.Init( kBucketId, kString1Size, kString2Size, mem2.id, mem2.offset); expected.set_token2.Init(GetNextToken()); expected.set_bucket_data3.Init( kBucketId, kString1Size + kString2Size, kString3Size, mem3.id, mem3.offset); expected.set_token3.Init(GetNextToken()); expected.shader_source_bucket.Init(kShaderId, kBucketId); expected.clear_bucket_size.Init(kBucketId, 0); const char* strings[] = { kString1, kString2, }; gl_->ShaderSource(kShaderId, 2, strings, NULL); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); } TEST_F(GLES2ImplementationTest, GetShaderSource) { const uint32 kBucketId = GLES2Implementation::kResultBucketId; const GLuint kShaderId = 456; const Str7 kString = {"foobar"}; const char kBad = 0x12; struct Cmds { cmd::SetBucketSize set_bucket_size1; GetShaderSource get_shader_source; cmd::GetBucketStart get_bucket_start; cmd::SetToken set_token1; cmd::SetBucketSize set_bucket_size2; }; ExpectedMemoryInfo mem1 = GetExpectedMemory(MaxTransferBufferSize()); ExpectedMemoryInfo result1 = GetExpectedResultMemory(sizeof(uint32)); Cmds expected; expected.set_bucket_size1.Init(kBucketId, 0); expected.get_shader_source.Init(kShaderId, kBucketId); expected.get_bucket_start.Init( kBucketId, result1.id, result1.offset, MaxTransferBufferSize(), mem1.id, mem1.offset); expected.set_token1.Init(GetNextToken()); expected.set_bucket_size2.Init(kBucketId, 0); char buf[sizeof(kString) + 1]; memset(buf, kBad, sizeof(buf)); EXPECT_CALL(*command_buffer(), OnFlush()) .WillOnce(DoAll(SetMemory(result1.ptr, uint32(sizeof(kString))), SetMemory(mem1.ptr, kString))) .RetiresOnSaturation(); GLsizei length = 0; gl_->GetShaderSource(kShaderId, sizeof(buf), &length, buf); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); EXPECT_EQ(sizeof(kString) - 1, static_cast<size_t>(length)); EXPECT_STREQ(kString.str, buf); EXPECT_EQ(buf[sizeof(kString)], kBad); } #if defined(GLES2_SUPPORT_CLIENT_SIDE_ARRAYS) TEST_F(GLES2ImplementationTest, DrawArraysClientSideBuffers) { static const float verts[][4] = { { 12.0f, 23.0f, 34.0f, 45.0f, }, { 56.0f, 67.0f, 78.0f, 89.0f, }, { 13.0f, 24.0f, 35.0f, 46.0f, }, }; struct Cmds { EnableVertexAttribArray enable1; EnableVertexAttribArray enable2; BindBuffer bind_to_emu; BufferData set_size; BufferSubData copy_data1; cmd::SetToken set_token1; VertexAttribPointer set_pointer1; BufferSubData copy_data2; cmd::SetToken set_token2; VertexAttribPointer set_pointer2; DrawArrays draw; BindBuffer restore; }; const GLuint kEmuBufferId = GLES2Implementation::kClientSideArrayId; const GLuint kAttribIndex1 = 1; const GLuint kAttribIndex2 = 3; const GLint kNumComponents1 = 3; const GLint kNumComponents2 = 2; const GLsizei kClientStride = sizeof(verts[0]); const GLint kFirst = 1; const GLsizei kCount = 2; const GLsizei kSize1 = arraysize(verts) * kNumComponents1 * sizeof(verts[0][0]); const GLsizei kSize2 = arraysize(verts) * kNumComponents2 * sizeof(verts[0][0]); const GLsizei kEmuOffset1 = 0; const GLsizei kEmuOffset2 = kSize1; const GLsizei kTotalSize = kSize1 + kSize2; ExpectedMemoryInfo mem1 = GetExpectedMemory(kSize1); ExpectedMemoryInfo mem2 = GetExpectedMemory(kSize2); Cmds expected; expected.enable1.Init(kAttribIndex1); expected.enable2.Init(kAttribIndex2); expected.bind_to_emu.Init(GL_ARRAY_BUFFER, kEmuBufferId); expected.set_size.Init(GL_ARRAY_BUFFER, kTotalSize, 0, 0, GL_DYNAMIC_DRAW); expected.copy_data1.Init( GL_ARRAY_BUFFER, kEmuOffset1, kSize1, mem1.id, mem1.offset); expected.set_token1.Init(GetNextToken()); expected.set_pointer1.Init( kAttribIndex1, kNumComponents1, GL_FLOAT, GL_FALSE, 0, kEmuOffset1); expected.copy_data2.Init( GL_ARRAY_BUFFER, kEmuOffset2, kSize2, mem2.id, mem2.offset); expected.set_token2.Init(GetNextToken()); expected.set_pointer2.Init( kAttribIndex2, kNumComponents2, GL_FLOAT, GL_FALSE, 0, kEmuOffset2); expected.draw.Init(GL_POINTS, kFirst, kCount); expected.restore.Init(GL_ARRAY_BUFFER, 0); gl_->EnableVertexAttribArray(kAttribIndex1); gl_->EnableVertexAttribArray(kAttribIndex2); gl_->VertexAttribPointer( kAttribIndex1, kNumComponents1, GL_FLOAT, GL_FALSE, kClientStride, verts); gl_->VertexAttribPointer( kAttribIndex2, kNumComponents2, GL_FLOAT, GL_FALSE, kClientStride, verts); gl_->DrawArrays(GL_POINTS, kFirst, kCount); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); } TEST_F(GLES2ImplementationTest, DrawArraysInstancedANGLEClientSideBuffers) { static const float verts[][4] = { { 12.0f, 23.0f, 34.0f, 45.0f, }, { 56.0f, 67.0f, 78.0f, 89.0f, }, { 13.0f, 24.0f, 35.0f, 46.0f, }, }; struct Cmds { EnableVertexAttribArray enable1; EnableVertexAttribArray enable2; VertexAttribDivisorANGLE divisor; BindBuffer bind_to_emu; BufferData set_size; BufferSubData copy_data1; cmd::SetToken set_token1; VertexAttribPointer set_pointer1; BufferSubData copy_data2; cmd::SetToken set_token2; VertexAttribPointer set_pointer2; DrawArraysInstancedANGLE draw; BindBuffer restore; }; const GLuint kEmuBufferId = GLES2Implementation::kClientSideArrayId; const GLuint kAttribIndex1 = 1; const GLuint kAttribIndex2 = 3; const GLint kNumComponents1 = 3; const GLint kNumComponents2 = 2; const GLsizei kClientStride = sizeof(verts[0]); const GLint kFirst = 1; const GLsizei kCount = 2; const GLuint kDivisor = 1; const GLsizei kSize1 = arraysize(verts) * kNumComponents1 * sizeof(verts[0][0]); const GLsizei kSize2 = 1 * kNumComponents2 * sizeof(verts[0][0]); const GLsizei kEmuOffset1 = 0; const GLsizei kEmuOffset2 = kSize1; const GLsizei kTotalSize = kSize1 + kSize2; ExpectedMemoryInfo mem1 = GetExpectedMemory(kSize1); ExpectedMemoryInfo mem2 = GetExpectedMemory(kSize2); Cmds expected; expected.enable1.Init(kAttribIndex1); expected.enable2.Init(kAttribIndex2); expected.divisor.Init(kAttribIndex2, kDivisor); expected.bind_to_emu.Init(GL_ARRAY_BUFFER, kEmuBufferId); expected.set_size.Init(GL_ARRAY_BUFFER, kTotalSize, 0, 0, GL_DYNAMIC_DRAW); expected.copy_data1.Init( GL_ARRAY_BUFFER, kEmuOffset1, kSize1, mem1.id, mem1.offset); expected.set_token1.Init(GetNextToken()); expected.set_pointer1.Init( kAttribIndex1, kNumComponents1, GL_FLOAT, GL_FALSE, 0, kEmuOffset1); expected.copy_data2.Init( GL_ARRAY_BUFFER, kEmuOffset2, kSize2, mem2.id, mem2.offset); expected.set_token2.Init(GetNextToken()); expected.set_pointer2.Init( kAttribIndex2, kNumComponents2, GL_FLOAT, GL_FALSE, 0, kEmuOffset2); expected.draw.Init(GL_POINTS, kFirst, kCount, 1); expected.restore.Init(GL_ARRAY_BUFFER, 0); gl_->EnableVertexAttribArray(kAttribIndex1); gl_->EnableVertexAttribArray(kAttribIndex2); gl_->VertexAttribPointer( kAttribIndex1, kNumComponents1, GL_FLOAT, GL_FALSE, kClientStride, verts); gl_->VertexAttribPointer( kAttribIndex2, kNumComponents2, GL_FLOAT, GL_FALSE, kClientStride, verts); gl_->VertexAttribDivisorANGLE(kAttribIndex2, kDivisor); gl_->DrawArraysInstancedANGLE(GL_POINTS, kFirst, kCount, 1); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); } TEST_F(GLES2ImplementationTest, DrawElementsClientSideBuffers) { static const float verts[][4] = { { 12.0f, 23.0f, 34.0f, 45.0f, }, { 56.0f, 67.0f, 78.0f, 89.0f, }, { 13.0f, 24.0f, 35.0f, 46.0f, }, }; static const uint16 indices[] = { 1, 2, }; struct Cmds { EnableVertexAttribArray enable1; EnableVertexAttribArray enable2; BindBuffer bind_to_index_emu; BufferData set_index_size; BufferSubData copy_data0; cmd::SetToken set_token0; BindBuffer bind_to_emu; BufferData set_size; BufferSubData copy_data1; cmd::SetToken set_token1; VertexAttribPointer set_pointer1; BufferSubData copy_data2; cmd::SetToken set_token2; VertexAttribPointer set_pointer2; DrawElements draw; BindBuffer restore; BindBuffer restore_element; }; const GLsizei kIndexSize = sizeof(indices); const GLuint kEmuBufferId = GLES2Implementation::kClientSideArrayId; const GLuint kEmuIndexBufferId = GLES2Implementation::kClientSideElementArrayId; const GLuint kAttribIndex1 = 1; const GLuint kAttribIndex2 = 3; const GLint kNumComponents1 = 3; const GLint kNumComponents2 = 2; const GLsizei kClientStride = sizeof(verts[0]); const GLsizei kCount = 2; const GLsizei kSize1 = arraysize(verts) * kNumComponents1 * sizeof(verts[0][0]); const GLsizei kSize2 = arraysize(verts) * kNumComponents2 * sizeof(verts[0][0]); const GLsizei kEmuOffset1 = 0; const GLsizei kEmuOffset2 = kSize1; const GLsizei kTotalSize = kSize1 + kSize2; ExpectedMemoryInfo mem1 = GetExpectedMemory(kIndexSize); ExpectedMemoryInfo mem2 = GetExpectedMemory(kSize1); ExpectedMemoryInfo mem3 = GetExpectedMemory(kSize2); Cmds expected; expected.enable1.Init(kAttribIndex1); expected.enable2.Init(kAttribIndex2); expected.bind_to_index_emu.Init(GL_ELEMENT_ARRAY_BUFFER, kEmuIndexBufferId); expected.set_index_size.Init( GL_ELEMENT_ARRAY_BUFFER, kIndexSize, 0, 0, GL_DYNAMIC_DRAW); expected.copy_data0.Init( GL_ELEMENT_ARRAY_BUFFER, 0, kIndexSize, mem1.id, mem1.offset); expected.set_token0.Init(GetNextToken()); expected.bind_to_emu.Init(GL_ARRAY_BUFFER, kEmuBufferId); expected.set_size.Init(GL_ARRAY_BUFFER, kTotalSize, 0, 0, GL_DYNAMIC_DRAW); expected.copy_data1.Init( GL_ARRAY_BUFFER, kEmuOffset1, kSize1, mem2.id, mem2.offset); expected.set_token1.Init(GetNextToken()); expected.set_pointer1.Init( kAttribIndex1, kNumComponents1, GL_FLOAT, GL_FALSE, 0, kEmuOffset1); expected.copy_data2.Init( GL_ARRAY_BUFFER, kEmuOffset2, kSize2, mem3.id, mem3.offset); expected.set_token2.Init(GetNextToken()); expected.set_pointer2.Init(kAttribIndex2, kNumComponents2, GL_FLOAT, GL_FALSE, 0, kEmuOffset2); expected.draw.Init(GL_POINTS, kCount, GL_UNSIGNED_SHORT, 0); expected.restore.Init(GL_ARRAY_BUFFER, 0); expected.restore_element.Init(GL_ELEMENT_ARRAY_BUFFER, 0); gl_->EnableVertexAttribArray(kAttribIndex1); gl_->EnableVertexAttribArray(kAttribIndex2); gl_->VertexAttribPointer(kAttribIndex1, kNumComponents1, GL_FLOAT, GL_FALSE, kClientStride, verts); gl_->VertexAttribPointer(kAttribIndex2, kNumComponents2, GL_FLOAT, GL_FALSE, kClientStride, verts); gl_->DrawElements(GL_POINTS, kCount, GL_UNSIGNED_SHORT, indices); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); } TEST_F(GLES2ImplementationTest, DrawElementsClientSideBuffersServiceSideIndices) { static const float verts[][4] = { { 12.0f, 23.0f, 34.0f, 45.0f, }, { 56.0f, 67.0f, 78.0f, 89.0f, }, { 13.0f, 24.0f, 35.0f, 46.0f, }, }; struct Cmds { EnableVertexAttribArray enable1; EnableVertexAttribArray enable2; BindBuffer bind_to_index; GetMaxValueInBufferCHROMIUM get_max; BindBuffer bind_to_emu; BufferData set_size; BufferSubData copy_data1; cmd::SetToken set_token1; VertexAttribPointer set_pointer1; BufferSubData copy_data2; cmd::SetToken set_token2; VertexAttribPointer set_pointer2; DrawElements draw; BindBuffer restore; }; const GLuint kEmuBufferId = GLES2Implementation::kClientSideArrayId; const GLuint kClientIndexBufferId = 0x789; const GLuint kIndexOffset = 0x40; const GLuint kMaxIndex = 2; const GLuint kAttribIndex1 = 1; const GLuint kAttribIndex2 = 3; const GLint kNumComponents1 = 3; const GLint kNumComponents2 = 2; const GLsizei kClientStride = sizeof(verts[0]); const GLsizei kCount = 2; const GLsizei kSize1 = arraysize(verts) * kNumComponents1 * sizeof(verts[0][0]); const GLsizei kSize2 = arraysize(verts) * kNumComponents2 * sizeof(verts[0][0]); const GLsizei kEmuOffset1 = 0; const GLsizei kEmuOffset2 = kSize1; const GLsizei kTotalSize = kSize1 + kSize2; ExpectedMemoryInfo mem1 = GetExpectedResultMemory(sizeof(uint32)); ExpectedMemoryInfo mem2 = GetExpectedMemory(kSize1); ExpectedMemoryInfo mem3 = GetExpectedMemory(kSize2); Cmds expected; expected.enable1.Init(kAttribIndex1); expected.enable2.Init(kAttribIndex2); expected.bind_to_index.Init(GL_ELEMENT_ARRAY_BUFFER, kClientIndexBufferId); expected.get_max.Init(kClientIndexBufferId, kCount, GL_UNSIGNED_SHORT, kIndexOffset, mem1.id, mem1.offset); expected.bind_to_emu.Init(GL_ARRAY_BUFFER, kEmuBufferId); expected.set_size.Init(GL_ARRAY_BUFFER, kTotalSize, 0, 0, GL_DYNAMIC_DRAW); expected.copy_data1.Init( GL_ARRAY_BUFFER, kEmuOffset1, kSize1, mem2.id, mem2.offset); expected.set_token1.Init(GetNextToken()); expected.set_pointer1.Init(kAttribIndex1, kNumComponents1, GL_FLOAT, GL_FALSE, 0, kEmuOffset1); expected.copy_data2.Init( GL_ARRAY_BUFFER, kEmuOffset2, kSize2, mem3.id, mem3.offset); expected.set_token2.Init(GetNextToken()); expected.set_pointer2.Init(kAttribIndex2, kNumComponents2, GL_FLOAT, GL_FALSE, 0, kEmuOffset2); expected.draw.Init(GL_POINTS, kCount, GL_UNSIGNED_SHORT, kIndexOffset); expected.restore.Init(GL_ARRAY_BUFFER, 0); EXPECT_CALL(*command_buffer(), OnFlush()) .WillOnce(SetMemory(mem1.ptr,kMaxIndex)) .RetiresOnSaturation(); gl_->EnableVertexAttribArray(kAttribIndex1); gl_->EnableVertexAttribArray(kAttribIndex2); gl_->BindBuffer(GL_ELEMENT_ARRAY_BUFFER, kClientIndexBufferId); gl_->VertexAttribPointer(kAttribIndex1, kNumComponents1, GL_FLOAT, GL_FALSE, kClientStride, verts); gl_->VertexAttribPointer(kAttribIndex2, kNumComponents2, GL_FLOAT, GL_FALSE, kClientStride, verts); gl_->DrawElements(GL_POINTS, kCount, GL_UNSIGNED_SHORT, reinterpret_cast<const void*>(kIndexOffset)); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); } TEST_F(GLES2ImplementationTest, DrawElementsInstancedANGLEClientSideBuffers) { static const float verts[][4] = { { 12.0f, 23.0f, 34.0f, 45.0f, }, { 56.0f, 67.0f, 78.0f, 89.0f, }, { 13.0f, 24.0f, 35.0f, 46.0f, }, }; static const uint16 indices[] = { 1, 2, }; struct Cmds { EnableVertexAttribArray enable1; EnableVertexAttribArray enable2; VertexAttribDivisorANGLE divisor; BindBuffer bind_to_index_emu; BufferData set_index_size; BufferSubData copy_data0; cmd::SetToken set_token0; BindBuffer bind_to_emu; BufferData set_size; BufferSubData copy_data1; cmd::SetToken set_token1; VertexAttribPointer set_pointer1; BufferSubData copy_data2; cmd::SetToken set_token2; VertexAttribPointer set_pointer2; DrawElementsInstancedANGLE draw; BindBuffer restore; BindBuffer restore_element; }; const GLsizei kIndexSize = sizeof(indices); const GLuint kEmuBufferId = GLES2Implementation::kClientSideArrayId; const GLuint kEmuIndexBufferId = GLES2Implementation::kClientSideElementArrayId; const GLuint kAttribIndex1 = 1; const GLuint kAttribIndex2 = 3; const GLint kNumComponents1 = 3; const GLint kNumComponents2 = 2; const GLsizei kClientStride = sizeof(verts[0]); const GLsizei kCount = 2; const GLsizei kSize1 = arraysize(verts) * kNumComponents1 * sizeof(verts[0][0]); const GLsizei kSize2 = 1 * kNumComponents2 * sizeof(verts[0][0]); const GLuint kDivisor = 1; const GLsizei kEmuOffset1 = 0; const GLsizei kEmuOffset2 = kSize1; const GLsizei kTotalSize = kSize1 + kSize2; ExpectedMemoryInfo mem1 = GetExpectedMemory(kIndexSize); ExpectedMemoryInfo mem2 = GetExpectedMemory(kSize1); ExpectedMemoryInfo mem3 = GetExpectedMemory(kSize2); Cmds expected; expected.enable1.Init(kAttribIndex1); expected.enable2.Init(kAttribIndex2); expected.divisor.Init(kAttribIndex2, kDivisor); expected.bind_to_index_emu.Init(GL_ELEMENT_ARRAY_BUFFER, kEmuIndexBufferId); expected.set_index_size.Init( GL_ELEMENT_ARRAY_BUFFER, kIndexSize, 0, 0, GL_DYNAMIC_DRAW); expected.copy_data0.Init( GL_ELEMENT_ARRAY_BUFFER, 0, kIndexSize, mem1.id, mem1.offset); expected.set_token0.Init(GetNextToken()); expected.bind_to_emu.Init(GL_ARRAY_BUFFER, kEmuBufferId); expected.set_size.Init(GL_ARRAY_BUFFER, kTotalSize, 0, 0, GL_DYNAMIC_DRAW); expected.copy_data1.Init( GL_ARRAY_BUFFER, kEmuOffset1, kSize1, mem2.id, mem2.offset); expected.set_token1.Init(GetNextToken()); expected.set_pointer1.Init( kAttribIndex1, kNumComponents1, GL_FLOAT, GL_FALSE, 0, kEmuOffset1); expected.copy_data2.Init( GL_ARRAY_BUFFER, kEmuOffset2, kSize2, mem3.id, mem3.offset); expected.set_token2.Init(GetNextToken()); expected.set_pointer2.Init(kAttribIndex2, kNumComponents2, GL_FLOAT, GL_FALSE, 0, kEmuOffset2); expected.draw.Init(GL_POINTS, kCount, GL_UNSIGNED_SHORT, 0, 1); expected.restore.Init(GL_ARRAY_BUFFER, 0); expected.restore_element.Init(GL_ELEMENT_ARRAY_BUFFER, 0); gl_->EnableVertexAttribArray(kAttribIndex1); gl_->EnableVertexAttribArray(kAttribIndex2); gl_->VertexAttribPointer(kAttribIndex1, kNumComponents1, GL_FLOAT, GL_FALSE, kClientStride, verts); gl_->VertexAttribPointer(kAttribIndex2, kNumComponents2, GL_FLOAT, GL_FALSE, kClientStride, verts); gl_->VertexAttribDivisorANGLE(kAttribIndex2, kDivisor); gl_->DrawElementsInstancedANGLE( GL_POINTS, kCount, GL_UNSIGNED_SHORT, indices, 1); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); } TEST_F(GLES2ImplementationTest, GetVertexBufferPointerv) { static const float verts[1] = { 0.0f, }; const GLuint kAttribIndex1 = 1; const GLuint kAttribIndex2 = 3; const GLint kNumComponents1 = 3; const GLint kNumComponents2 = 2; const GLsizei kStride1 = 12; const GLsizei kStride2 = 0; const GLuint kBufferId = 0x123; const GLint kOffset2 = 0x456; // Only one set and one get because the client side buffer's info is stored // on the client side. struct Cmds { BindBuffer bind; VertexAttribPointer set_pointer; GetVertexAttribPointerv get_pointer; }; ExpectedMemoryInfo mem1 = GetExpectedResultMemory(16); Cmds expected; expected.bind.Init(GL_ARRAY_BUFFER, kBufferId); expected.set_pointer.Init(kAttribIndex2, kNumComponents2, GL_FLOAT, GL_FALSE, kStride2, kOffset2); expected.get_pointer.Init(kAttribIndex2, GL_VERTEX_ATTRIB_ARRAY_POINTER, mem1.id, mem1.offset); // One call to flush to way for GetVertexAttribPointerv EXPECT_CALL(*command_buffer(), OnFlush()) .WillOnce(SetMemory(mem1.ptr, SizedResultHelper<uint32>(kOffset2))) .RetiresOnSaturation(); // Set one client side buffer. gl_->VertexAttribPointer(kAttribIndex1, kNumComponents1, GL_FLOAT, GL_FALSE, kStride1, verts); // Set one VBO gl_->BindBuffer(GL_ARRAY_BUFFER, kBufferId); gl_->VertexAttribPointer(kAttribIndex2, kNumComponents2, GL_FLOAT, GL_FALSE, kStride2, reinterpret_cast<const void*>(kOffset2)); // now get them both. void* ptr1 = NULL; void* ptr2 = NULL; gl_->GetVertexAttribPointerv( kAttribIndex1, GL_VERTEX_ATTRIB_ARRAY_POINTER, &ptr1); gl_->GetVertexAttribPointerv( kAttribIndex2, GL_VERTEX_ATTRIB_ARRAY_POINTER, &ptr2); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); EXPECT_TRUE(static_cast<const void*>(&verts) == ptr1); // because the service is not running ptr2 is not read. EXPECT_TRUE(ptr2 == reinterpret_cast<void*>(kOffset2)); } TEST_F(GLES2ImplementationTest, GetVertexAttrib) { static const float verts[1] = { 0.0f, }; const GLuint kAttribIndex1 = 1; const GLuint kAttribIndex2 = 3; const GLint kNumComponents1 = 3; const GLint kNumComponents2 = 2; const GLsizei kStride1 = 12; const GLsizei kStride2 = 0; const GLuint kBufferId = 0x123; const GLint kOffset2 = 0x456; // Only one set and one get because the client side buffer's info is stored // on the client side. struct Cmds { EnableVertexAttribArray enable; BindBuffer bind; VertexAttribPointer set_pointer; GetVertexAttribiv get1; // for getting the buffer from attrib2 GetVertexAttribfv get2; // for getting the value from attrib1 }; ExpectedMemoryInfo mem1 = GetExpectedResultMemory(16); ExpectedMemoryInfo mem2 = GetExpectedResultMemory(16); Cmds expected; expected.enable.Init(kAttribIndex1); expected.bind.Init(GL_ARRAY_BUFFER, kBufferId); expected.set_pointer.Init(kAttribIndex2, kNumComponents2, GL_FLOAT, GL_FALSE, kStride2, kOffset2); expected.get1.Init(kAttribIndex2, GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING, mem1.id, mem1.offset); expected.get2.Init(kAttribIndex1, GL_CURRENT_VERTEX_ATTRIB, mem2.id, mem2.offset); FourFloats current_attrib(1.2f, 3.4f, 5.6f, 7.8f); // One call to flush to way for GetVertexAttribiv EXPECT_CALL(*command_buffer(), OnFlush()) .WillOnce(SetMemory( mem1.ptr, SizedResultHelper<GLuint>(kBufferId))) .WillOnce(SetMemory( mem2.ptr, SizedResultHelper<FourFloats>(current_attrib))) .RetiresOnSaturation(); gl_->EnableVertexAttribArray(kAttribIndex1); // Set one client side buffer. gl_->VertexAttribPointer(kAttribIndex1, kNumComponents1, GL_FLOAT, GL_FALSE, kStride1, verts); // Set one VBO gl_->BindBuffer(GL_ARRAY_BUFFER, kBufferId); gl_->VertexAttribPointer(kAttribIndex2, kNumComponents2, GL_FLOAT, GL_FALSE, kStride2, reinterpret_cast<const void*>(kOffset2)); // first get the service side once to see that we make a command GLint buffer_id = 0; GLint enabled = 0; GLint size = 0; GLint stride = 0; GLint type = 0; GLint normalized = 1; float current[4] = { 0.0f, }; gl_->GetVertexAttribiv( kAttribIndex2, GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING, &buffer_id); EXPECT_EQ(kBufferId, static_cast<GLuint>(buffer_id)); gl_->GetVertexAttribiv( kAttribIndex1, GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING, &buffer_id); gl_->GetVertexAttribiv( kAttribIndex1, GL_VERTEX_ATTRIB_ARRAY_ENABLED, &enabled); gl_->GetVertexAttribiv( kAttribIndex1, GL_VERTEX_ATTRIB_ARRAY_SIZE, &size); gl_->GetVertexAttribiv( kAttribIndex1, GL_VERTEX_ATTRIB_ARRAY_STRIDE, &stride); gl_->GetVertexAttribiv( kAttribIndex1, GL_VERTEX_ATTRIB_ARRAY_TYPE, &type); gl_->GetVertexAttribiv( kAttribIndex1, GL_VERTEX_ATTRIB_ARRAY_NORMALIZED, &normalized); gl_->GetVertexAttribfv( kAttribIndex1, GL_CURRENT_VERTEX_ATTRIB, &current[0]); EXPECT_EQ(0, buffer_id); EXPECT_EQ(GL_TRUE, enabled); EXPECT_EQ(kNumComponents1, size); EXPECT_EQ(kStride1, stride); EXPECT_EQ(GL_FLOAT, type); EXPECT_EQ(GL_FALSE, normalized); EXPECT_EQ(0, memcmp(&current_attrib, &current, sizeof(current_attrib))); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); } TEST_F(GLES2ImplementationTest, ReservedIds) { // Only the get error command should be issued. struct Cmds { GetError get; }; Cmds expected; ExpectedMemoryInfo mem1 = GetExpectedResultMemory(sizeof(GetError::Result)); expected.get.Init(mem1.id, mem1.offset); // One call to flush to wait for GetError EXPECT_CALL(*command_buffer(), OnFlush()) .WillOnce(SetMemory(mem1.ptr, GLuint(GL_NO_ERROR))) .RetiresOnSaturation(); gl_->BindBuffer( GL_ARRAY_BUFFER, GLES2Implementation::kClientSideArrayId); gl_->BindBuffer( GL_ARRAY_BUFFER, GLES2Implementation::kClientSideElementArrayId); GLenum err = gl_->GetError(); EXPECT_EQ(static_cast<GLenum>(GL_INVALID_OPERATION), err); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); } #endif // defined(GLES2_SUPPORT_CLIENT_SIDE_ARRAYS) TEST_F(GLES2ImplementationTest, ReadPixels2Reads) { struct Cmds { ReadPixels read1; cmd::SetToken set_token1; ReadPixels read2; cmd::SetToken set_token2; }; const GLint kBytesPerPixel = 4; const GLint kWidth = (kTransferBufferSize - GLES2Implementation::kStartingOffset) / kBytesPerPixel; const GLint kHeight = 2; const GLenum kFormat = GL_RGBA; const GLenum kType = GL_UNSIGNED_BYTE; ExpectedMemoryInfo mem1 = GetExpectedMemory(kWidth * kHeight / 2 * kBytesPerPixel); ExpectedMemoryInfo result1 = GetExpectedResultMemory(sizeof(ReadPixels::Result)); ExpectedMemoryInfo mem2 = GetExpectedMemory(kWidth * kHeight / 2 * kBytesPerPixel); ExpectedMemoryInfo result2 = GetExpectedResultMemory(sizeof(ReadPixels::Result)); Cmds expected; expected.read1.Init( 0, 0, kWidth, kHeight / 2, kFormat, kType, mem1.id, mem1.offset, result1.id, result1.offset); expected.set_token1.Init(GetNextToken()); expected.read2.Init( 0, kHeight / 2, kWidth, kHeight / 2, kFormat, kType, mem2.id, mem2.offset, result2.id, result2.offset); expected.set_token2.Init(GetNextToken()); scoped_array<int8> buffer(new int8[kWidth * kHeight * kBytesPerPixel]); EXPECT_CALL(*command_buffer(), OnFlush()) .WillOnce(SetMemory(result1.ptr, static_cast<uint32>(1))) .WillOnce(SetMemory(result2.ptr, static_cast<uint32>(1))) .RetiresOnSaturation(); gl_->ReadPixels(0, 0, kWidth, kHeight, kFormat, kType, buffer.get()); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); } TEST_F(GLES2ImplementationTest, ReadPixelsBadFormatType) { struct Cmds { ReadPixels read; cmd::SetToken set_token; }; const GLint kBytesPerPixel = 4; const GLint kWidth = 2; const GLint kHeight = 2; const GLenum kFormat = 0; const GLenum kType = 0; ExpectedMemoryInfo mem1 = GetExpectedMemory(kWidth * kHeight * kBytesPerPixel); ExpectedMemoryInfo result1 = GetExpectedResultMemory(sizeof(ReadPixels::Result)); Cmds expected; expected.read.Init( 0, 0, kWidth, kHeight, kFormat, kType, mem1.id, mem1.offset, result1.id, result1.offset); expected.set_token.Init(GetNextToken()); scoped_array<int8> buffer(new int8[kWidth * kHeight * kBytesPerPixel]); EXPECT_CALL(*command_buffer(), OnFlush()) .Times(1) .RetiresOnSaturation(); gl_->ReadPixels(0, 0, kWidth, kHeight, kFormat, kType, buffer.get()); } TEST_F(GLES2ImplementationTest, FreeUnusedSharedMemory) { struct Cmds { BufferSubData buf; cmd::SetToken set_token; }; const GLenum kTarget = GL_ELEMENT_ARRAY_BUFFER; const GLintptr kOffset = 15; const GLsizeiptr kSize = 16; ExpectedMemoryInfo mem1 = GetExpectedMemory(kSize); Cmds expected; expected.buf.Init( kTarget, kOffset, kSize, mem1.id, mem1.offset); expected.set_token.Init(GetNextToken()); void* mem = gl_->MapBufferSubDataCHROMIUM( kTarget, kOffset, kSize, GL_WRITE_ONLY); ASSERT_TRUE(mem != NULL); gl_->UnmapBufferSubDataCHROMIUM(mem); EXPECT_CALL(*command_buffer(), DestroyTransferBuffer(_)) .Times(1) .RetiresOnSaturation(); gl_->FreeUnusedSharedMemory(); } TEST_F(GLES2ImplementationTest, MapUnmapBufferSubDataCHROMIUM) { struct Cmds { BufferSubData buf; cmd::SetToken set_token; }; const GLenum kTarget = GL_ELEMENT_ARRAY_BUFFER; const GLintptr kOffset = 15; const GLsizeiptr kSize = 16; uint32 offset = 0; Cmds expected; expected.buf.Init( kTarget, kOffset, kSize, command_buffer()->GetNextFreeTransferBufferId(), offset); expected.set_token.Init(GetNextToken()); void* mem = gl_->MapBufferSubDataCHROMIUM( kTarget, kOffset, kSize, GL_WRITE_ONLY); ASSERT_TRUE(mem != NULL); gl_->UnmapBufferSubDataCHROMIUM(mem); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); } TEST_F(GLES2ImplementationTest, MapUnmapBufferSubDataCHROMIUMBadArgs) { const GLenum kTarget = GL_ELEMENT_ARRAY_BUFFER; const GLintptr kOffset = 15; const GLsizeiptr kSize = 16; ExpectedMemoryInfo result1 = GetExpectedResultMemory(sizeof(GetError::Result)); ExpectedMemoryInfo result2 = GetExpectedResultMemory(sizeof(GetError::Result)); ExpectedMemoryInfo result3 = GetExpectedResultMemory(sizeof(GetError::Result)); ExpectedMemoryInfo result4 = GetExpectedResultMemory(sizeof(GetError::Result)); // Calls to flush to wait for GetError EXPECT_CALL(*command_buffer(), OnFlush()) .WillOnce(SetMemory(result1.ptr, GLuint(GL_NO_ERROR))) .WillOnce(SetMemory(result2.ptr, GLuint(GL_NO_ERROR))) .WillOnce(SetMemory(result3.ptr, GLuint(GL_NO_ERROR))) .WillOnce(SetMemory(result4.ptr, GLuint(GL_NO_ERROR))) .RetiresOnSaturation(); void* mem; mem = gl_->MapBufferSubDataCHROMIUM(kTarget, -1, kSize, GL_WRITE_ONLY); ASSERT_TRUE(mem == NULL); EXPECT_EQ(static_cast<GLenum>(GL_INVALID_VALUE), gl_->GetError()); mem = gl_->MapBufferSubDataCHROMIUM(kTarget, kOffset, -1, GL_WRITE_ONLY); ASSERT_TRUE(mem == NULL); EXPECT_EQ(static_cast<GLenum>(GL_INVALID_VALUE), gl_->GetError()); mem = gl_->MapBufferSubDataCHROMIUM(kTarget, kOffset, kSize, GL_READ_ONLY); ASSERT_TRUE(mem == NULL); EXPECT_EQ(static_cast<GLenum>(GL_INVALID_ENUM), gl_->GetError()); const char* kPtr = "something"; gl_->UnmapBufferSubDataCHROMIUM(kPtr); EXPECT_EQ(static_cast<GLenum>(GL_INVALID_VALUE), gl_->GetError()); } TEST_F(GLES2ImplementationTest, MapUnmapTexSubImage2DCHROMIUM) { struct Cmds { TexSubImage2D tex; cmd::SetToken set_token; }; const GLint kLevel = 1; const GLint kXOffset = 2; const GLint kYOffset = 3; const GLint kWidth = 4; const GLint kHeight = 5; const GLenum kFormat = GL_RGBA; const GLenum kType = GL_UNSIGNED_BYTE; uint32 offset = 0; Cmds expected; expected.tex.Init(<|fim▁hole|> GL_TEXTURE_2D, kLevel, kXOffset, kYOffset, kWidth, kHeight, kFormat, kType, command_buffer()->GetNextFreeTransferBufferId(), offset, GL_FALSE); expected.set_token.Init(GetNextToken()); void* mem = gl_->MapTexSubImage2DCHROMIUM( GL_TEXTURE_2D, kLevel, kXOffset, kYOffset, kWidth, kHeight, kFormat, kType, GL_WRITE_ONLY); ASSERT_TRUE(mem != NULL); gl_->UnmapTexSubImage2DCHROMIUM(mem); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); } TEST_F(GLES2ImplementationTest, MapUnmapTexSubImage2DCHROMIUMBadArgs) { const GLint kLevel = 1; const GLint kXOffset = 2; const GLint kYOffset = 3; const GLint kWidth = 4; const GLint kHeight = 5; const GLenum kFormat = GL_RGBA; const GLenum kType = GL_UNSIGNED_BYTE; ExpectedMemoryInfo result1 = GetExpectedResultMemory(sizeof(GetError::Result)); ExpectedMemoryInfo result2 = GetExpectedResultMemory(sizeof(GetError::Result)); ExpectedMemoryInfo result3 = GetExpectedResultMemory(sizeof(GetError::Result)); ExpectedMemoryInfo result4 = GetExpectedResultMemory(sizeof(GetError::Result)); ExpectedMemoryInfo result5 = GetExpectedResultMemory(sizeof(GetError::Result)); ExpectedMemoryInfo result6 = GetExpectedResultMemory(sizeof(GetError::Result)); ExpectedMemoryInfo result7 = GetExpectedResultMemory(sizeof(GetError::Result)); // Calls to flush to wait for GetError EXPECT_CALL(*command_buffer(), OnFlush()) .WillOnce(SetMemory(result1.ptr, GLuint(GL_NO_ERROR))) .WillOnce(SetMemory(result2.ptr, GLuint(GL_NO_ERROR))) .WillOnce(SetMemory(result3.ptr, GLuint(GL_NO_ERROR))) .WillOnce(SetMemory(result4.ptr, GLuint(GL_NO_ERROR))) .WillOnce(SetMemory(result5.ptr, GLuint(GL_NO_ERROR))) .WillOnce(SetMemory(result6.ptr, GLuint(GL_NO_ERROR))) .WillOnce(SetMemory(result7.ptr, GLuint(GL_NO_ERROR))) .RetiresOnSaturation(); void* mem; mem = gl_->MapTexSubImage2DCHROMIUM( GL_TEXTURE_2D, -1, kXOffset, kYOffset, kWidth, kHeight, kFormat, kType, GL_WRITE_ONLY); EXPECT_TRUE(mem == NULL); EXPECT_EQ(static_cast<GLenum>(GL_INVALID_VALUE), gl_->GetError()); mem = gl_->MapTexSubImage2DCHROMIUM( GL_TEXTURE_2D, kLevel, -1, kYOffset, kWidth, kHeight, kFormat, kType, GL_WRITE_ONLY); EXPECT_TRUE(mem == NULL); EXPECT_EQ(static_cast<GLenum>(GL_INVALID_VALUE), gl_->GetError()); mem = gl_->MapTexSubImage2DCHROMIUM( GL_TEXTURE_2D, kLevel, kXOffset, -1, kWidth, kHeight, kFormat, kType, GL_WRITE_ONLY); EXPECT_TRUE(mem == NULL); EXPECT_EQ(static_cast<GLenum>(GL_INVALID_VALUE), gl_->GetError()); mem = gl_->MapTexSubImage2DCHROMIUM( GL_TEXTURE_2D, kLevel, kXOffset, kYOffset, -1, kHeight, kFormat, kType, GL_WRITE_ONLY); EXPECT_TRUE(mem == NULL); EXPECT_EQ(static_cast<GLenum>(GL_INVALID_VALUE), gl_->GetError()); mem = gl_->MapTexSubImage2DCHROMIUM( GL_TEXTURE_2D, kLevel, kXOffset, kYOffset, kWidth, -1, kFormat, kType, GL_WRITE_ONLY); EXPECT_TRUE(mem == NULL); EXPECT_EQ(static_cast<GLenum>(GL_INVALID_VALUE), gl_->GetError()); mem = gl_->MapTexSubImage2DCHROMIUM( GL_TEXTURE_2D, kLevel, kXOffset, kYOffset, kWidth, kHeight, kFormat, kType, GL_READ_ONLY); EXPECT_TRUE(mem == NULL); EXPECT_EQ(static_cast<GLenum>(GL_INVALID_ENUM), gl_->GetError()); const char* kPtr = "something"; gl_->UnmapTexSubImage2DCHROMIUM(kPtr); EXPECT_EQ(static_cast<GLenum>(GL_INVALID_VALUE), gl_->GetError()); } TEST_F(GLES2ImplementationTest, GetMultipleIntegervCHROMIUMValidArgs) { const GLenum pnames[] = { GL_DEPTH_WRITEMASK, GL_COLOR_WRITEMASK, GL_STENCIL_WRITEMASK, }; const GLint num_results = 6; GLint results[num_results + 1]; struct Cmds { GetMultipleIntegervCHROMIUM get_multiple; cmd::SetToken set_token; }; const GLsizei kNumPnames = arraysize(pnames); const GLsizeiptr kResultsSize = num_results * sizeof(results[0]); const size_t kPNamesSize = kNumPnames * sizeof(pnames[0]); ExpectedMemoryInfo mem1 = GetExpectedMemory(kPNamesSize + kResultsSize); ExpectedMemoryInfo result1 = GetExpectedResultMemory( sizeof(GetError::Result)); const uint32 kPnamesOffset = mem1.offset; const uint32 kResultsOffset = mem1.offset + kPNamesSize; Cmds expected; expected.get_multiple.Init( mem1.id, kPnamesOffset, kNumPnames, mem1.id, kResultsOffset, kResultsSize); expected.set_token.Init(GetNextToken()); const GLint kSentinel = 0x12345678; memset(results, 0, sizeof(results)); results[num_results] = kSentinel; const GLint returned_results[] = { 1, 0, 1, 0, 1, -1, }; // One call to flush to wait for results EXPECT_CALL(*command_buffer(), OnFlush()) .WillOnce(SetMemoryFromArray(mem1.ptr + kPNamesSize, returned_results, sizeof(returned_results))) .WillOnce(SetMemory(result1.ptr, GLuint(GL_NO_ERROR))) .RetiresOnSaturation(); gl_->GetMultipleIntegervCHROMIUM( &pnames[0], kNumPnames, &results[0], kResultsSize); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); EXPECT_EQ(0, memcmp(&returned_results, results, sizeof(returned_results))); EXPECT_EQ(kSentinel, results[num_results]); EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), gl_->GetError()); } TEST_F(GLES2ImplementationTest, GetMultipleIntegervCHROMIUMBadArgs) { GLenum pnames[] = { GL_DEPTH_WRITEMASK, GL_COLOR_WRITEMASK, GL_STENCIL_WRITEMASK, }; const GLint num_results = 6; GLint results[num_results + 1]; const GLsizei kNumPnames = arraysize(pnames); const GLsizeiptr kResultsSize = num_results * sizeof(results[0]); ExpectedMemoryInfo result1 = GetExpectedResultMemory(sizeof(GetError::Result)); ExpectedMemoryInfo result2 = GetExpectedResultMemory(sizeof(GetError::Result)); ExpectedMemoryInfo result3 = GetExpectedResultMemory(sizeof(GetError::Result)); ExpectedMemoryInfo result4 = GetExpectedResultMemory(sizeof(GetError::Result)); // Calls to flush to wait for GetError EXPECT_CALL(*command_buffer(), OnFlush()) .WillOnce(SetMemory(result1.ptr, GLuint(GL_NO_ERROR))) .WillOnce(SetMemory(result2.ptr, GLuint(GL_NO_ERROR))) .WillOnce(SetMemory(result3.ptr, GLuint(GL_NO_ERROR))) .WillOnce(SetMemory(result4.ptr, GLuint(GL_NO_ERROR))) .RetiresOnSaturation(); const GLint kSentinel = 0x12345678; memset(results, 0, sizeof(results)); results[num_results] = kSentinel; // try bad size. gl_->GetMultipleIntegervCHROMIUM( &pnames[0], kNumPnames, &results[0], kResultsSize + 1); EXPECT_TRUE(NoCommandsWritten()); EXPECT_EQ(static_cast<GLenum>(GL_INVALID_VALUE), gl_->GetError()); EXPECT_EQ(0, results[0]); EXPECT_EQ(kSentinel, results[num_results]); // try bad size. ClearCommands(); gl_->GetMultipleIntegervCHROMIUM( &pnames[0], kNumPnames, &results[0], kResultsSize - 1); EXPECT_TRUE(NoCommandsWritten()); EXPECT_EQ(static_cast<GLenum>(GL_INVALID_VALUE), gl_->GetError()); EXPECT_EQ(0, results[0]); EXPECT_EQ(kSentinel, results[num_results]); // try uncleared results. ClearCommands(); results[2] = 1; gl_->GetMultipleIntegervCHROMIUM( &pnames[0], kNumPnames, &results[0], kResultsSize); EXPECT_TRUE(NoCommandsWritten()); EXPECT_EQ(static_cast<GLenum>(GL_INVALID_VALUE), gl_->GetError()); EXPECT_EQ(0, results[0]); EXPECT_EQ(kSentinel, results[num_results]); // try bad enum results. ClearCommands(); results[2] = 0; pnames[1] = GL_TRUE; gl_->GetMultipleIntegervCHROMIUM( &pnames[0], kNumPnames, &results[0], kResultsSize); EXPECT_TRUE(NoCommandsWritten()); EXPECT_EQ(static_cast<GLenum>(GL_INVALID_ENUM), gl_->GetError()); EXPECT_EQ(0, results[0]); EXPECT_EQ(kSentinel, results[num_results]); } TEST_F(GLES2ImplementationTest, GetProgramInfoCHROMIUMGoodArgs) { const uint32 kBucketId = GLES2Implementation::kResultBucketId; const GLuint kProgramId = 123; const char kBad = 0x12; GLsizei size = 0; const Str7 kString = {"foobar"}; char buf[20]; ExpectedMemoryInfo mem1 = GetExpectedMemory(MaxTransferBufferSize()); ExpectedMemoryInfo result1 = GetExpectedResultMemory(sizeof(cmd::GetBucketStart::Result)); ExpectedMemoryInfo result2 = GetExpectedResultMemory(sizeof(GetError::Result)); memset(buf, kBad, sizeof(buf)); EXPECT_CALL(*command_buffer(), OnFlush()) .WillOnce(DoAll(SetMemory(result1.ptr, uint32(sizeof(kString))), SetMemory(mem1.ptr, kString))) .WillOnce(SetMemory(result2.ptr, GLuint(GL_NO_ERROR))) .RetiresOnSaturation(); struct Cmds { cmd::SetBucketSize set_bucket_size1; GetProgramInfoCHROMIUM get_program_info; cmd::GetBucketStart get_bucket_start; cmd::SetToken set_token1; cmd::SetBucketSize set_bucket_size2; }; Cmds expected; expected.set_bucket_size1.Init(kBucketId, 0); expected.get_program_info.Init(kProgramId, kBucketId); expected.get_bucket_start.Init( kBucketId, result1.id, result1.offset, MaxTransferBufferSize(), mem1.id, mem1.offset); expected.set_token1.Init(GetNextToken()); expected.set_bucket_size2.Init(kBucketId, 0); gl_->GetProgramInfoCHROMIUM(kProgramId, sizeof(buf), &size, &buf); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), gl_->GetError()); EXPECT_EQ(sizeof(kString), static_cast<size_t>(size)); EXPECT_STREQ(kString.str, buf); EXPECT_EQ(buf[sizeof(kString)], kBad); } TEST_F(GLES2ImplementationTest, GetProgramInfoCHROMIUMBadArgs) { const uint32 kBucketId = GLES2Implementation::kResultBucketId; const GLuint kProgramId = 123; GLsizei size = 0; const Str7 kString = {"foobar"}; char buf[20]; ExpectedMemoryInfo mem1 = GetExpectedMemory(MaxTransferBufferSize()); ExpectedMemoryInfo result1 = GetExpectedResultMemory(sizeof(cmd::GetBucketStart::Result)); ExpectedMemoryInfo result2 = GetExpectedResultMemory(sizeof(GetError::Result)); ExpectedMemoryInfo result3 = GetExpectedResultMemory(sizeof(GetError::Result)); ExpectedMemoryInfo result4 = GetExpectedResultMemory(sizeof(GetError::Result)); EXPECT_CALL(*command_buffer(), OnFlush()) .WillOnce(DoAll(SetMemory(result1.ptr, uint32(sizeof(kString))), SetMemory(mem1.ptr, kString))) .WillOnce(SetMemory(result2.ptr, GLuint(GL_NO_ERROR))) .WillOnce(SetMemory(result3.ptr, GLuint(GL_NO_ERROR))) .WillOnce(SetMemory(result4.ptr, GLuint(GL_NO_ERROR))) .RetiresOnSaturation(); // try bufsize not big enough. struct Cmds { cmd::SetBucketSize set_bucket_size1; GetProgramInfoCHROMIUM get_program_info; cmd::GetBucketStart get_bucket_start; cmd::SetToken set_token1; cmd::SetBucketSize set_bucket_size2; }; Cmds expected; expected.set_bucket_size1.Init(kBucketId, 0); expected.get_program_info.Init(kProgramId, kBucketId); expected.get_bucket_start.Init( kBucketId, result1.id, result1.offset, MaxTransferBufferSize(), mem1.id, mem1.offset); expected.set_token1.Init(GetNextToken()); expected.set_bucket_size2.Init(kBucketId, 0); gl_->GetProgramInfoCHROMIUM(kProgramId, 6, &size, &buf); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); EXPECT_EQ(static_cast<GLenum>(GL_INVALID_OPERATION), gl_->GetError()); ClearCommands(); // try bad bufsize gl_->GetProgramInfoCHROMIUM(kProgramId, -1, &size, &buf); EXPECT_TRUE(NoCommandsWritten()); EXPECT_EQ(static_cast<GLenum>(GL_INVALID_VALUE), gl_->GetError()); ClearCommands(); // try no size ptr. gl_->GetProgramInfoCHROMIUM(kProgramId, sizeof(buf), NULL, &buf); EXPECT_TRUE(NoCommandsWritten()); EXPECT_EQ(static_cast<GLenum>(GL_INVALID_VALUE), gl_->GetError()); } // Test that things are cached TEST_F(GLES2ImplementationTest, GetIntegerCacheRead) { struct PNameValue { GLenum pname; GLint expected; }; const PNameValue pairs[] = { { GL_ACTIVE_TEXTURE, GL_TEXTURE0, }, { GL_TEXTURE_BINDING_2D, 0, }, { GL_TEXTURE_BINDING_CUBE_MAP, 0, }, { GL_FRAMEBUFFER_BINDING, 0, }, { GL_RENDERBUFFER_BINDING, 0, }, { GL_ARRAY_BUFFER_BINDING, 0, }, { GL_ELEMENT_ARRAY_BUFFER_BINDING, 0, }, { GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS, kMaxCombinedTextureImageUnits, }, { GL_MAX_CUBE_MAP_TEXTURE_SIZE, kMaxCubeMapTextureSize, }, { GL_MAX_FRAGMENT_UNIFORM_VECTORS, kMaxFragmentUniformVectors, }, { GL_MAX_RENDERBUFFER_SIZE, kMaxRenderbufferSize, }, { GL_MAX_TEXTURE_IMAGE_UNITS, kMaxTextureImageUnits, }, { GL_MAX_TEXTURE_SIZE, kMaxTextureSize, }, { GL_MAX_VARYING_VECTORS, kMaxVaryingVectors, }, { GL_MAX_VERTEX_ATTRIBS, kMaxVertexAttribs, }, { GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS, kMaxVertexTextureImageUnits, }, { GL_MAX_VERTEX_UNIFORM_VECTORS, kMaxVertexUniformVectors, }, { GL_NUM_COMPRESSED_TEXTURE_FORMATS, kNumCompressedTextureFormats, }, { GL_NUM_SHADER_BINARY_FORMATS, kNumShaderBinaryFormats, }, }; size_t num_pairs = sizeof(pairs) / sizeof(pairs[0]); for (size_t ii = 0; ii < num_pairs; ++ii) { const PNameValue& pv = pairs[ii]; GLint v = -1; gl_->GetIntegerv(pv.pname, &v); EXPECT_TRUE(NoCommandsWritten()); EXPECT_EQ(pv.expected, v); } ExpectedMemoryInfo result1 = GetExpectedResultMemory(sizeof(GetError::Result)); EXPECT_CALL(*command_buffer(), OnFlush()) .WillOnce(SetMemory(result1.ptr, GLuint(GL_NO_ERROR))) .RetiresOnSaturation(); EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), gl_->GetError()); } TEST_F(GLES2ImplementationTest, GetIntegerCacheWrite) { struct PNameValue { GLenum pname; GLint expected; }; gl_->ActiveTexture(GL_TEXTURE4); gl_->BindBuffer(GL_ARRAY_BUFFER, 2); gl_->BindBuffer(GL_ELEMENT_ARRAY_BUFFER, 3); gl_->BindFramebuffer(GL_FRAMEBUFFER, 4); gl_->BindRenderbuffer(GL_RENDERBUFFER, 5); gl_->BindTexture(GL_TEXTURE_2D, 6); gl_->BindTexture(GL_TEXTURE_CUBE_MAP, 7); const PNameValue pairs[] = { { GL_ACTIVE_TEXTURE, GL_TEXTURE4, }, { GL_ARRAY_BUFFER_BINDING, 2, }, { GL_ELEMENT_ARRAY_BUFFER_BINDING, 3, }, { GL_FRAMEBUFFER_BINDING, 4, }, { GL_RENDERBUFFER_BINDING, 5, }, { GL_TEXTURE_BINDING_2D, 6, }, { GL_TEXTURE_BINDING_CUBE_MAP, 7, }, }; size_t num_pairs = sizeof(pairs) / sizeof(pairs[0]); for (size_t ii = 0; ii < num_pairs; ++ii) { const PNameValue& pv = pairs[ii]; GLint v = -1; gl_->GetIntegerv(pv.pname, &v); EXPECT_EQ(pv.expected, v); } ExpectedMemoryInfo result1 = GetExpectedResultMemory(sizeof(GetError::Result)); EXPECT_CALL(*command_buffer(), OnFlush()) .WillOnce(SetMemory(result1.ptr, GLuint(GL_NO_ERROR))) .RetiresOnSaturation(); EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), gl_->GetError()); } static bool CheckRect( int width, int height, GLenum format, GLenum type, int alignment, bool flip_y, const uint8* r1, const uint8* r2) { uint32 size = 0; uint32 unpadded_row_size = 0; uint32 padded_row_size = 0; if (!GLES2Util::ComputeImageDataSizes( width, height, format, type, alignment, &size, &unpadded_row_size, &padded_row_size)) { return false; } int r2_stride = flip_y ? -static_cast<int>(padded_row_size) : static_cast<int>(padded_row_size); r2 = flip_y ? (r2 + (height - 1) * padded_row_size) : r2; for (int y = 0; y < height; ++y) { if (memcmp(r1, r2, unpadded_row_size) != 0) { return false; } r1 += padded_row_size; r2 += r2_stride; } return true; } ACTION_P8(CheckRectAction, width, height, format, type, alignment, flip_y, r1, r2) { EXPECT_TRUE(CheckRect( width, height, format, type, alignment, flip_y, r1, r2)); } // Test TexImage2D with and without flip_y TEST_F(GLES2ImplementationTest, TexImage2D) { struct Cmds { TexImage2D tex_image_2d; cmd::SetToken set_token; }; struct Cmds2 { TexImage2D tex_image_2d; cmd::SetToken set_token; }; const GLenum kTarget = GL_TEXTURE_2D; const GLint kLevel = 0; const GLenum kFormat = GL_RGB; const GLsizei kWidth = 3; const GLsizei kHeight = 4; const GLint kBorder = 0; const GLenum kType = GL_UNSIGNED_BYTE; const GLint kPixelStoreUnpackAlignment = 4; static uint8 pixels[] = { 11, 12, 13, 13, 14, 15, 15, 16, 17, 101, 102, 103, 21, 22, 23, 23, 24, 25, 25, 26, 27, 201, 202, 203, 31, 32, 33, 33, 34, 35, 35, 36, 37, 123, 124, 125, 41, 42, 43, 43, 44, 45, 45, 46, 47, }; ExpectedMemoryInfo mem1 = GetExpectedMemory(sizeof(pixels)); Cmds expected; expected.tex_image_2d.Init( kTarget, kLevel, kFormat, kWidth, kHeight, kBorder, kFormat, kType, mem1.id, mem1.offset); expected.set_token.Init(GetNextToken()); gl_->TexImage2D( kTarget, kLevel, kFormat, kWidth, kHeight, kBorder, kFormat, kType, pixels); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); EXPECT_TRUE(CheckRect( kWidth, kHeight, kFormat, kType, kPixelStoreUnpackAlignment, false, pixels, mem1.ptr)); ClearCommands(); gl_->PixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, GL_TRUE); ExpectedMemoryInfo mem2 = GetExpectedMemory(sizeof(pixels)); Cmds2 expected2; expected2.tex_image_2d.Init( kTarget, kLevel, kFormat, kWidth, kHeight, kBorder, kFormat, kType, mem2.id, mem2.offset); expected2.set_token.Init(GetNextToken()); const void* commands2 = GetPut(); gl_->TexImage2D( kTarget, kLevel, kFormat, kWidth, kHeight, kBorder, kFormat, kType, pixels); EXPECT_EQ(0, memcmp(&expected2, commands2, sizeof(expected2))); EXPECT_TRUE(CheckRect( kWidth, kHeight, kFormat, kType, kPixelStoreUnpackAlignment, true, pixels, mem2.ptr)); } // Test TexImage2D with 2 writes TEST_F(GLES2ImplementationTest, TexImage2D2Writes) { struct Cmds { TexImage2D tex_image_2d; TexSubImage2D tex_sub_image_2d1; cmd::SetToken set_token1; TexSubImage2D tex_sub_image_2d2; cmd::SetToken set_token2; }; const GLenum kTarget = GL_TEXTURE_2D; const GLint kLevel = 0; const GLenum kFormat = GL_RGB; const GLint kBorder = 0; const GLenum kType = GL_UNSIGNED_BYTE; const GLint kPixelStoreUnpackAlignment = 4; const GLsizei kWidth = 3; uint32 size = 0; uint32 unpadded_row_size = 0; uint32 padded_row_size = 0; ASSERT_TRUE(GLES2Util::ComputeImageDataSizes( kWidth, 2, kFormat, kType, kPixelStoreUnpackAlignment, &size, &unpadded_row_size, &padded_row_size)); const GLsizei kHeight = (MaxTransferBufferSize() / padded_row_size) * 2; ASSERT_TRUE(GLES2Util::ComputeImageDataSizes( kWidth, kHeight, kFormat, kType, kPixelStoreUnpackAlignment, &size, NULL, NULL)); uint32 half_size = 0; ASSERT_TRUE(GLES2Util::ComputeImageDataSizes( kWidth, kHeight / 2, kFormat, kType, kPixelStoreUnpackAlignment, &half_size, NULL, NULL)); scoped_array<uint8> pixels(new uint8[size]); for (uint32 ii = 0; ii < size; ++ii) { pixels[ii] = static_cast<uint8>(ii); } ExpectedMemoryInfo mem1 = GetExpectedMemory(half_size); ExpectedMemoryInfo mem2 = GetExpectedMemory(half_size); Cmds expected; expected.tex_image_2d.Init( kTarget, kLevel, kFormat, kWidth, kHeight, kBorder, kFormat, kType, 0, 0); expected.tex_sub_image_2d1.Init( kTarget, kLevel, 0, 0, kWidth, kHeight / 2, kFormat, kType, mem1.id, mem1.offset, true); expected.set_token1.Init(GetNextToken()); expected.tex_sub_image_2d2.Init( kTarget, kLevel, 0, kHeight / 2, kWidth, kHeight / 2, kFormat, kType, mem2.id, mem2.offset, true); expected.set_token2.Init(GetNextToken()); // TODO(gman): Make it possible to run this test // EXPECT_CALL(*command_buffer(), OnFlush()) // .WillOnce(CheckRectAction( // kWidth, kHeight / 2, kFormat, kType, kPixelStoreUnpackAlignment, // false, pixels.get(), // GetExpectedTransferAddressFromOffsetAs<uint8>(offset1, half_size))) // .RetiresOnSaturation(); gl_->TexImage2D( kTarget, kLevel, kFormat, kWidth, kHeight, kBorder, kFormat, kType, pixels.get()); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); EXPECT_TRUE(CheckRect( kWidth, kHeight / 2, kFormat, kType, kPixelStoreUnpackAlignment, false, pixels.get() + kHeight / 2 * padded_row_size, mem2.ptr)); ClearCommands(); gl_->PixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, GL_TRUE); const void* commands2 = GetPut(); ExpectedMemoryInfo mem3 = GetExpectedMemory(half_size); ExpectedMemoryInfo mem4 = GetExpectedMemory(half_size); expected.tex_image_2d.Init( kTarget, kLevel, kFormat, kWidth, kHeight, kBorder, kFormat, kType, 0, 0); expected.tex_sub_image_2d1.Init( kTarget, kLevel, 0, kHeight / 2, kWidth, kHeight / 2, kFormat, kType, mem3.id, mem3.offset, true); expected.set_token1.Init(GetNextToken()); expected.tex_sub_image_2d2.Init( kTarget, kLevel, 0, 0, kWidth, kHeight / 2, kFormat, kType, mem4.id, mem4.offset, true); expected.set_token2.Init(GetNextToken()); // TODO(gman): Make it possible to run this test // EXPECT_CALL(*command_buffer(), OnFlush()) // .WillOnce(CheckRectAction( // kWidth, kHeight / 2, kFormat, kType, kPixelStoreUnpackAlignment, // true, pixels.get(), // GetExpectedTransferAddressFromOffsetAs<uint8>(offset3, half_size))) // .RetiresOnSaturation(); gl_->TexImage2D( kTarget, kLevel, kFormat, kWidth, kHeight, kBorder, kFormat, kType, pixels.get()); EXPECT_EQ(0, memcmp(&expected, commands2, sizeof(expected))); EXPECT_TRUE(CheckRect( kWidth, kHeight / 2, kFormat, kType, kPixelStoreUnpackAlignment, true, pixels.get() + kHeight / 2 * padded_row_size, mem4.ptr)); } // Test TexSubImage2D with GL_PACK_FLIP_Y set and partial multirow transfers TEST_F(GLES2ImplementationTest, TexSubImage2DFlipY) { const GLsizei kTextureWidth = MaxTransferBufferSize() / 4; const GLsizei kTextureHeight = 7; const GLsizei kSubImageWidth = MaxTransferBufferSize() / 8; const GLsizei kSubImageHeight = 4; const GLint kSubImageXOffset = 1; const GLint kSubImageYOffset = 2; const GLenum kFormat = GL_RGBA; const GLenum kType = GL_UNSIGNED_BYTE; const GLenum kTarget = GL_TEXTURE_2D; const GLint kLevel = 0; const GLint kBorder = 0; const GLint kPixelStoreUnpackAlignment = 4; struct Cmds { PixelStorei pixel_store_i1; TexImage2D tex_image_2d; PixelStorei pixel_store_i2; TexSubImage2D tex_sub_image_2d1; cmd::SetToken set_token1; TexSubImage2D tex_sub_image_2d2; cmd::SetToken set_token2; }; uint32 sub_2_high_size = 0; ASSERT_TRUE(GLES2Util::ComputeImageDataSizes( kSubImageWidth, 2, kFormat, kType, kPixelStoreUnpackAlignment, &sub_2_high_size, NULL, NULL)); ExpectedMemoryInfo mem1 = GetExpectedMemory(sub_2_high_size); ExpectedMemoryInfo mem2 = GetExpectedMemory(sub_2_high_size); Cmds expected; expected.pixel_store_i1.Init(GL_UNPACK_ALIGNMENT, kPixelStoreUnpackAlignment); expected.tex_image_2d.Init( kTarget, kLevel, kFormat, kTextureWidth, kTextureHeight, kBorder, kFormat, kType, 0, 0); expected.pixel_store_i2.Init(GL_UNPACK_FLIP_Y_CHROMIUM, GL_TRUE); expected.tex_sub_image_2d1.Init(kTarget, kLevel, kSubImageXOffset, kSubImageYOffset + 2, kSubImageWidth, 2, kFormat, kType, mem1.id, mem1.offset, false); expected.set_token1.Init(GetNextToken()); expected.tex_sub_image_2d2.Init(kTarget, kLevel, kSubImageXOffset, kSubImageYOffset, kSubImageWidth , 2, kFormat, kType, mem2.id, mem2.offset, false); expected.set_token2.Init(GetNextToken()); gl_->PixelStorei(GL_UNPACK_ALIGNMENT, kPixelStoreUnpackAlignment); gl_->TexImage2D( kTarget, kLevel, kFormat, kTextureWidth, kTextureHeight, kBorder, kFormat, kType, NULL); gl_->PixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, GL_TRUE); scoped_array<uint32> pixels(new uint32[kSubImageWidth * kSubImageHeight]); for (int y = 0; y < kSubImageHeight; ++y) { for (int x = 0; x < kSubImageWidth; ++x) { pixels.get()[kSubImageWidth * y + x] = x | (y << 16); } } gl_->TexSubImage2D( GL_TEXTURE_2D, 0, kSubImageXOffset, kSubImageYOffset, kSubImageWidth, kSubImageHeight, GL_RGBA, GL_UNSIGNED_BYTE, pixels.get()); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); EXPECT_TRUE(CheckRect( kSubImageWidth, 2, kFormat, kType, kPixelStoreUnpackAlignment, true, reinterpret_cast<uint8*>(pixels.get() + 2 * kSubImageWidth), mem2.ptr)); } TEST_F(GLES2ImplementationTest, SubImageUnpack) { static const GLint unpack_alignments[] = { 1, 2, 4, 8 }; static const GLenum kFormat = GL_RGB; static const GLenum kType = GL_UNSIGNED_BYTE; static const GLint kLevel = 0; static const GLint kBorder = 0; // We're testing using the unpack params to pull a subimage out of a larger // source of pixels. Here we specify the subimage by its border rows / // columns. static const GLint kSrcWidth = 33; static const GLint kSrcSubImageX0 = 11; static const GLint kSrcSubImageX1 = 20; static const GLint kSrcSubImageY0 = 18; static const GLint kSrcSubImageY1 = 23; static const GLint kSrcSubImageWidth = kSrcSubImageX1 - kSrcSubImageX0; static const GLint kSrcSubImageHeight = kSrcSubImageY1 - kSrcSubImageY0; // these are only used in the texsubimage tests static const GLint kTexWidth = 1023; static const GLint kTexHeight = 511; static const GLint kTexSubXOffset = 419; static const GLint kTexSubYOffset = 103; struct { PixelStorei pixel_store_i; PixelStorei pixel_store_i2; TexImage2D tex_image_2d; } texImageExpected; struct { PixelStorei pixel_store_i; PixelStorei pixel_store_i2; TexImage2D tex_image_2d; TexSubImage2D tex_sub_image_2d; } texSubImageExpected; uint32 src_size; ASSERT_TRUE(GLES2Util::ComputeImageDataSizes( kSrcWidth, kSrcSubImageY1, kFormat, kType, 8, &src_size, NULL, NULL)); scoped_array<uint8> src_pixels; src_pixels.reset(new uint8[src_size]); for (size_t i = 0; i < src_size; ++i) { src_pixels[i] = static_cast<int8>(i); } for (int sub = 0; sub < 2; ++sub) { for (int flip_y = 0; flip_y < 2; ++flip_y) { for (size_t a = 0; a < arraysize(unpack_alignments); ++a) { GLint alignment = unpack_alignments[a]; uint32 size; uint32 unpadded_row_size; uint32 padded_row_size; ASSERT_TRUE(GLES2Util::ComputeImageDataSizes( kSrcSubImageWidth, kSrcSubImageHeight, kFormat, kType, alignment, &size, &unpadded_row_size, &padded_row_size)); ASSERT_TRUE(size <= MaxTransferBufferSize()); ExpectedMemoryInfo mem = GetExpectedMemory(size); const void* commands = GetPut(); gl_->PixelStorei(GL_UNPACK_ALIGNMENT, alignment); gl_->PixelStorei(GL_UNPACK_ROW_LENGTH, kSrcWidth); gl_->PixelStorei(GL_UNPACK_SKIP_PIXELS, kSrcSubImageX0); gl_->PixelStorei(GL_UNPACK_SKIP_ROWS, kSrcSubImageY0); gl_->PixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, flip_y); if (sub) { gl_->TexImage2D( GL_TEXTURE_2D, kLevel, kFormat, kTexWidth, kTexHeight, kBorder, kFormat, kType, NULL); gl_->TexSubImage2D( GL_TEXTURE_2D, kLevel, kTexSubXOffset, kTexSubYOffset, kSrcSubImageWidth, kSrcSubImageHeight, kFormat, kType, src_pixels.get()); texSubImageExpected.pixel_store_i.Init( GL_UNPACK_ALIGNMENT, alignment); texSubImageExpected.pixel_store_i2.Init( GL_UNPACK_FLIP_Y_CHROMIUM, flip_y); texSubImageExpected.tex_image_2d.Init( GL_TEXTURE_2D, kLevel, kFormat, kTexWidth, kTexHeight, kBorder, kFormat, kType, 0, 0); texSubImageExpected.tex_sub_image_2d.Init( GL_TEXTURE_2D, kLevel, kTexSubXOffset, kTexSubYOffset, kSrcSubImageWidth, kSrcSubImageHeight, kFormat, kType, mem.id, mem.offset, GL_FALSE); EXPECT_EQ(0, memcmp( &texSubImageExpected, commands, sizeof(texSubImageExpected))); } else { gl_->TexImage2D( GL_TEXTURE_2D, kLevel, kFormat, kSrcSubImageWidth, kSrcSubImageHeight, kBorder, kFormat, kType, src_pixels.get()); texImageExpected.pixel_store_i.Init(GL_UNPACK_ALIGNMENT, alignment); texImageExpected.pixel_store_i2.Init( GL_UNPACK_FLIP_Y_CHROMIUM, flip_y); texImageExpected.tex_image_2d.Init( GL_TEXTURE_2D, kLevel, kFormat, kSrcSubImageWidth, kSrcSubImageHeight, kBorder, kFormat, kType, mem.id, mem.offset); EXPECT_EQ(0, memcmp( &texImageExpected, commands, sizeof(texImageExpected))); } uint32 src_padded_row_size; ASSERT_TRUE(GLES2Util::ComputeImagePaddedRowSize( kSrcWidth, kFormat, kType, alignment, &src_padded_row_size)); uint32 bytes_per_group = GLES2Util::ComputeImageGroupSize( kFormat, kType); for (int y = 0; y < kSrcSubImageHeight; ++y) { GLint src_sub_y = flip_y ? kSrcSubImageHeight - y - 1 : y; const uint8* src_row = src_pixels.get() + (kSrcSubImageY0 + src_sub_y) * src_padded_row_size + bytes_per_group * kSrcSubImageX0; const uint8* dst_row = mem.ptr + y * padded_row_size; EXPECT_EQ(0, memcmp(src_row, dst_row, unpadded_row_size)); } ClearCommands(); } } } } // Binds can not be cached with bind_generates_resource = false because // our id might not be valid. More specifically if you bind on contextA then // delete on contextB the resource is still bound on contextA but GetInterger // won't return an id. TEST_F(GLES2ImplementationStrictSharedTest, BindsNotCached) { struct PNameValue { GLenum pname; GLint expected; }; const PNameValue pairs[] = { { GL_TEXTURE_BINDING_2D, 1, }, { GL_TEXTURE_BINDING_CUBE_MAP, 2, }, { GL_FRAMEBUFFER_BINDING, 3, }, { GL_RENDERBUFFER_BINDING, 4, }, { GL_ARRAY_BUFFER_BINDING, 5, }, { GL_ELEMENT_ARRAY_BUFFER_BINDING, 6, }, }; size_t num_pairs = sizeof(pairs) / sizeof(pairs[0]); for (size_t ii = 0; ii < num_pairs; ++ii) { const PNameValue& pv = pairs[ii]; GLint v = -1; ExpectedMemoryInfo result1 = GetExpectedResultMemory(sizeof(GetIntegerv::Result)); EXPECT_CALL(*command_buffer(), OnFlush()) .WillOnce(SetMemory(result1.ptr, SizedResultHelper<GLuint>(pv.expected))) .RetiresOnSaturation(); gl_->GetIntegerv(pv.pname, &v); EXPECT_EQ(pv.expected, v); } } TEST_F(GLES2ImplementationTest, CreateStreamTextureCHROMIUM) { const GLuint kTextureId = 123; const GLuint kResult = 456; struct Cmds { CreateStreamTextureCHROMIUM create_stream; }; ExpectedMemoryInfo result1 = GetExpectedResultMemory(sizeof(CreateStreamTextureCHROMIUM::Result)); ExpectedMemoryInfo result2 = GetExpectedResultMemory(sizeof(GetError::Result)); Cmds expected; expected.create_stream.Init(kTextureId, result1.id, result1.offset); EXPECT_CALL(*command_buffer(), OnFlush()) .WillOnce(SetMemory(result1.ptr, kResult)) .WillOnce(SetMemory(result2.ptr, GLuint(GL_NO_ERROR))) .RetiresOnSaturation(); GLuint handle = gl_->CreateStreamTextureCHROMIUM(kTextureId); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); EXPECT_EQ(handle, kResult); EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), gl_->GetError()); } TEST_F(GLES2ImplementationTest, GetString) { const uint32 kBucketId = GLES2Implementation::kResultBucketId; const Str7 kString = {"foobar"}; // GL_CHROMIUM_map_sub GL_CHROMIUM_flipy are hard coded into // GLES2Implementation. const char* expected_str = "foobar GL_CHROMIUM_map_sub GL_CHROMIUM_flipy GL_EXT_unpack_subimage"; const char kBad = 0x12; struct Cmds { cmd::SetBucketSize set_bucket_size1; GetString get_string; cmd::GetBucketStart get_bucket_start; cmd::SetToken set_token1; cmd::SetBucketSize set_bucket_size2; }; ExpectedMemoryInfo mem1 = GetExpectedMemory(MaxTransferBufferSize()); ExpectedMemoryInfo result1 = GetExpectedResultMemory(sizeof(cmd::GetBucketStart::Result)); Cmds expected; expected.set_bucket_size1.Init(kBucketId, 0); expected.get_string.Init(GL_EXTENSIONS, kBucketId); expected.get_bucket_start.Init( kBucketId, result1.id, result1.offset, MaxTransferBufferSize(), mem1.id, mem1.offset); expected.set_token1.Init(GetNextToken()); expected.set_bucket_size2.Init(kBucketId, 0); char buf[sizeof(kString) + 1]; memset(buf, kBad, sizeof(buf)); EXPECT_CALL(*command_buffer(), OnFlush()) .WillOnce(DoAll(SetMemory(result1.ptr, uint32(sizeof(kString))), SetMemory(mem1.ptr, kString))) .RetiresOnSaturation(); const GLubyte* result = gl_->GetString(GL_EXTENSIONS); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); EXPECT_STREQ(expected_str, reinterpret_cast<const char*>(result)); } TEST_F(GLES2ImplementationTest, PixelStoreiGLPackReverseRowOrderANGLE) { const uint32 kBucketId = GLES2Implementation::kResultBucketId; const Str7 kString = {"foobar"}; struct Cmds { cmd::SetBucketSize set_bucket_size1; GetString get_string; cmd::GetBucketStart get_bucket_start; cmd::SetToken set_token1; cmd::SetBucketSize set_bucket_size2; PixelStorei pixel_store; }; ExpectedMemoryInfo mem1 = GetExpectedMemory(MaxTransferBufferSize()); ExpectedMemoryInfo result1 = GetExpectedResultMemory(sizeof(cmd::GetBucketStart::Result)); Cmds expected; expected.set_bucket_size1.Init(kBucketId, 0); expected.get_string.Init(GL_EXTENSIONS, kBucketId); expected.get_bucket_start.Init( kBucketId, result1.id, result1.offset, MaxTransferBufferSize(), mem1.id, mem1.offset); expected.set_token1.Init(GetNextToken()); expected.set_bucket_size2.Init(kBucketId, 0); expected.pixel_store.Init(GL_PACK_REVERSE_ROW_ORDER_ANGLE, 1); EXPECT_CALL(*command_buffer(), OnFlush()) .WillOnce(DoAll(SetMemory(result1.ptr, uint32(sizeof(kString))), SetMemory(mem1.ptr, kString))) .RetiresOnSaturation(); gl_->PixelStorei(GL_PACK_REVERSE_ROW_ORDER_ANGLE, 1); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); } TEST_F(GLES2ImplementationTest, CreateProgram) { struct Cmds { CreateProgram cmd; }; Cmds expected; expected.cmd.Init(kProgramsAndShadersStartId); GLuint id = gl_->CreateProgram(); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); EXPECT_EQ(kProgramsAndShadersStartId, id); } TEST_F(GLES2ImplementationTest, BufferDataLargerThanTransferBuffer) { struct Cmds { BufferData set_size; BufferSubData copy_data1; cmd::SetToken set_token1; BufferSubData copy_data2; cmd::SetToken set_token2; }; const unsigned kUsableSize = kTransferBufferSize - GLES2Implementation::kStartingOffset; uint8 buf[kUsableSize * 2] = { 0, }; ExpectedMemoryInfo mem1 = GetExpectedMemory(kUsableSize); ExpectedMemoryInfo mem2 = GetExpectedMemory(kUsableSize); Cmds expected; expected.set_size.Init( GL_ARRAY_BUFFER, arraysize(buf), 0, 0, GL_DYNAMIC_DRAW); expected.copy_data1.Init( GL_ARRAY_BUFFER, 0, kUsableSize, mem1.id, mem1.offset); expected.set_token1.Init(GetNextToken()); expected.copy_data2.Init( GL_ARRAY_BUFFER, kUsableSize, kUsableSize, mem2.id, mem2.offset); expected.set_token2.Init(GetNextToken()); gl_->BufferData(GL_ARRAY_BUFFER, arraysize(buf), buf, GL_DYNAMIC_DRAW); EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected))); } TEST_F(GLES2ImplementationTest, BeginEndQueryEXT) { // Test GetQueryivEXT returns 0 if no current query. GLint param = -1; gl_->GetQueryivEXT(GL_ANY_SAMPLES_PASSED_EXT, GL_CURRENT_QUERY_EXT, &param); EXPECT_EQ(0, param); GLuint expected_ids[2] = { 1, 2 }; // These must match what's actually genned. struct GenCmds { GenQueriesEXTImmediate gen; GLuint data[2]; }; GenCmds expected_gen_cmds; expected_gen_cmds.gen.Init(arraysize(expected_ids), &expected_ids[0]); GLuint ids[arraysize(expected_ids)] = { 0, }; gl_->GenQueriesEXT(arraysize(expected_ids), &ids[0]); EXPECT_EQ(0, memcmp( &expected_gen_cmds, commands_, sizeof(expected_gen_cmds))); GLuint id1 = ids[0]; GLuint id2 = ids[1]; ClearCommands(); // Test BeginQueryEXT fails if id = 0. gl_->BeginQueryEXT(GL_ANY_SAMPLES_PASSED_EXT, 0); EXPECT_TRUE(NoCommandsWritten()); EXPECT_EQ(GL_INVALID_OPERATION, CheckError()); // Test BeginQueryEXT fails if id not GENed. // TODO(gman): // Test BeginQueryEXT inserts command. struct BeginCmds { BeginQueryEXT begin_query; }; BeginCmds expected_begin_cmds; const void* commands = GetPut(); gl_->BeginQueryEXT(GL_ANY_SAMPLES_PASSED_EXT, id1); QueryTracker::Query* query = GetQuery(id1); ASSERT_TRUE(query != NULL); expected_begin_cmds.begin_query.Init( GL_ANY_SAMPLES_PASSED_EXT, id1, query->shm_id(), query->shm_offset()); EXPECT_EQ(0, memcmp( &expected_begin_cmds, commands, sizeof(expected_begin_cmds))); ClearCommands(); // Test GetQueryivEXT returns id. param = -1; gl_->GetQueryivEXT(GL_ANY_SAMPLES_PASSED_EXT, GL_CURRENT_QUERY_EXT, &param); EXPECT_EQ(id1, static_cast<GLuint>(param)); gl_->GetQueryivEXT( GL_ANY_SAMPLES_PASSED_CONSERVATIVE_EXT, GL_CURRENT_QUERY_EXT, &param); EXPECT_EQ(0, param); // Test BeginQueryEXT fails if between Begin/End. gl_->BeginQueryEXT(GL_ANY_SAMPLES_PASSED_EXT, id2); EXPECT_TRUE(NoCommandsWritten()); EXPECT_EQ(GL_INVALID_OPERATION, CheckError()); // Test EndQueryEXT fails if target not same as current query. ClearCommands(); gl_->EndQueryEXT(GL_ANY_SAMPLES_PASSED_CONSERVATIVE_EXT); EXPECT_TRUE(NoCommandsWritten()); EXPECT_EQ(GL_INVALID_OPERATION, CheckError()); // Test EndQueryEXT sends command struct EndCmds { EndQueryEXT end_query; }; EndCmds expected_end_cmds; expected_end_cmds.end_query.Init( GL_ANY_SAMPLES_PASSED_EXT, query->submit_count()); commands = GetPut(); gl_->EndQueryEXT(GL_ANY_SAMPLES_PASSED_EXT); EXPECT_EQ(0, memcmp( &expected_end_cmds, commands, sizeof(expected_end_cmds))); // Test EndQueryEXT fails if no current query. ClearCommands(); gl_->EndQueryEXT(GL_ANY_SAMPLES_PASSED_EXT); EXPECT_TRUE(NoCommandsWritten()); EXPECT_EQ(GL_INVALID_OPERATION, CheckError()); // Test 2nd Begin/End increments count. uint32 old_submit_count = query->submit_count(); gl_->BeginQueryEXT(GL_ANY_SAMPLES_PASSED_EXT, id1); EXPECT_NE(old_submit_count, query->submit_count()); expected_end_cmds.end_query.Init( GL_ANY_SAMPLES_PASSED_EXT, query->submit_count()); commands = GetPut(); gl_->EndQueryEXT(GL_ANY_SAMPLES_PASSED_EXT); EXPECT_EQ(0, memcmp( &expected_end_cmds, commands, sizeof(expected_end_cmds))); // Test BeginQueryEXT fails if target changed. ClearCommands(); gl_->BeginQueryEXT(GL_ANY_SAMPLES_PASSED_CONSERVATIVE_EXT, id1); EXPECT_TRUE(NoCommandsWritten()); EXPECT_EQ(GL_INVALID_OPERATION, CheckError()); // Test GetQueryObjectuivEXT fails if unused id GLuint available = 0xBDu; ClearCommands(); gl_->GetQueryObjectuivEXT(id2, GL_QUERY_RESULT_AVAILABLE_EXT, &available); EXPECT_TRUE(NoCommandsWritten()); EXPECT_EQ(0xBDu, available); EXPECT_EQ(GL_INVALID_OPERATION, CheckError()); // Test GetQueryObjectuivEXT fails if bad id ClearCommands(); gl_->GetQueryObjectuivEXT(4567, GL_QUERY_RESULT_AVAILABLE_EXT, &available); EXPECT_TRUE(NoCommandsWritten()); EXPECT_EQ(0xBDu, available); EXPECT_EQ(GL_INVALID_OPERATION, CheckError()); // Test GetQueryObjectuivEXT CheckResultsAvailable ClearCommands(); gl_->GetQueryObjectuivEXT(id1, GL_QUERY_RESULT_AVAILABLE_EXT, &available); EXPECT_TRUE(NoCommandsWritten()); EXPECT_EQ(0u, available); } #include "gpu/command_buffer/client/gles2_implementation_unittest_autogen.h" } // namespace gles2 } // namespace gpu<|fim▁end|>
<|file_name|>persistent_factories.py<|end_file_name|><|fim▁begin|>"""Provides factories for Split.""" from xmodule.modulestore import ModuleStoreEnum from xmodule.course_module import CourseDescriptor from xmodule.x_module import XModuleDescriptor import factory from factory.helpers import lazy_attribute from opaque_keys.edx.keys import UsageKey # Factories don't have __init__ methods, and are self documenting # pylint: disable=W0232, C0111 class SplitFactory(factory.Factory): """ Abstracted superclass which defines modulestore so that there's no dependency on django if the caller passes modulestore in kwargs """ @lazy_attribute def modulestore(self): # Delayed import so that we only depend on django if the caller # hasn't provided their own modulestore from xmodule.modulestore.django import modulestore return modulestore()._get_modulestore_by_type(ModuleStoreEnum.Type.split) class PersistentCourseFactory(SplitFactory): """ Create a new course (not a new version of a course, but a whole new index entry). keywords: any xblock field plus (note, the below are filtered out; so, if they become legitimate xblock fields, they won't be settable via this factory) * org: defaults to textX * master_branch: (optional) defaults to ModuleStoreEnum.BranchName.draft * user_id: (optional) defaults to 'test_user' * display_name (xblock field): will default to 'Robot Super Course' unless provided """ FACTORY_FOR = CourseDescriptor # pylint: disable=W0613 @classmethod def _create(cls, target_class, course='999', run='run', org='testX', user_id=ModuleStoreEnum.UserID.test, master_branch=ModuleStoreEnum.BranchName.draft, **kwargs): modulestore = kwargs.pop('modulestore') root_block_id = kwargs.pop('root_block_id', 'course') # Write the data to the mongo datastore new_course = modulestore.create_course( org, course, run, user_id, fields=kwargs, master_branch=master_branch, root_block_id=root_block_id ) return new_course @classmethod def _build(cls, target_class, *args, **kwargs): raise NotImplementedError() class ItemFactory(SplitFactory): FACTORY_FOR = XModuleDescriptor display_name = factory.LazyAttributeSequence(lambda o, n: "{} {}".format(o.category, n)) # pylint: disable=W0613 @classmethod def _create(cls, target_class, parent_location, category='chapter', user_id=ModuleStoreEnum.UserID.test, definition_locator=None, force=False, continue_version=False, **kwargs): """ passes *kwargs* as the new item's field values: :param parent_location: (required) the location of the course & possibly parent :param category: (defaults to 'chapter') :param definition_locator (optional): the DescriptorLocator for the definition this uses or branches """ modulestore = kwargs.pop('modulestore')<|fim▁hole|> return modulestore.create_child( user_id, parent_location, category, defintion_locator=definition_locator, force=force, continue_version=continue_version, **kwargs ) else: return modulestore.create_item( user_id, parent_location, category, defintion_locator=definition_locator, force=force, continue_version=continue_version, **kwargs ) @classmethod def _build(cls, target_class, *args, **kwargs): raise NotImplementedError()<|fim▁end|>
if isinstance(parent_location, UsageKey):
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># $Id: __init__.py 7646 2013-04-17 14:17:37Z milde $ # Author: David Goodger <[email protected]> # Copyright: This module has been placed in the public domain. """ This package contains Docutils parser modules. """ __docformat__ = 'reStructuredText' import sys from docutils import Component if sys.version_info < (2,5): from docutils._compat import __import__ class Parser(Component): component_type = 'parser' config_section = 'parsers' def parse(self, inputstring, document): """Override to parse `inputstring` into document tree `document`.""" raise NotImplementedError('subclass must override this method') def setup_parse(self, inputstring, document): """Initial parse setup. Call at start of `self.parse()`.""" self.inputstring = inputstring self.document = document document.reporter.attach_observer(document.note_parse_message) def finish_parse(self): """Finalize parse details. Call at end of `self.parse()`.""" self.document.reporter.detach_observer( self.document.note_parse_message) _parser_aliases = { 'restructuredtext': 'rst', 'rest': 'rst', 'restx': 'rst', 'rtxt': 'rst',}<|fim▁hole|>def get_parser_class(parser_name): """Return the Parser class from the `parser_name` module.""" parser_name = parser_name.lower() if parser_name in _parser_aliases: parser_name = _parser_aliases[parser_name] try: module = __import__(parser_name, globals(), locals(), level=1) except ImportError: module = __import__(parser_name, globals(), locals(), level=0) return module.Parser<|fim▁end|>
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import { app } from "../src/app"<|fim▁hole|>import * as Mongoose from "mongoose" (<any>Mongoose).Promise = global.Promise Mongoose.connect("mongodb://localhost/simple-restful-api") .catch(e => console.log(e)) app.listen(5000) console.log("http://localhost:5000")<|fim▁end|>
<|file_name|>test_compliancebuilding.py<|end_file_name|><|fim▁begin|>import os import tempfile import unittest import logging from pyidf import ValidationLevel import pyidf<|fim▁hole|> class TestComplianceBuilding(unittest.TestCase): def setUp(self): self.fd, self.path = tempfile.mkstemp() def tearDown(self): os.remove(self.path) def test_create_compliancebuilding(self): pyidf.validation_level = ValidationLevel.error obj = ComplianceBuilding() # real var_building_rotation_for_appendix_g = 1.1 obj.building_rotation_for_appendix_g = var_building_rotation_for_appendix_g idf = IDF() idf.add(obj) idf.save(self.path, check=False) with open(self.path, mode='r') as f: for line in f: log.debug(line.strip()) idf2 = IDF(self.path) self.assertAlmostEqual(idf2.compliancebuildings[0].building_rotation_for_appendix_g, var_building_rotation_for_appendix_g)<|fim▁end|>
from pyidf.idf import IDF from pyidf.compliance_objects import ComplianceBuilding log = logging.getLogger(__name__)
<|file_name|>docker_test.go<|end_file_name|><|fim▁begin|>package docker import ( "testing" "time" "github.com/influxdata/telegraf/testutil" "github.com/docker/docker/api/types" "github.com/stretchr/testify/require" ) func TestDockerGatherContainerStats(t *testing.T) { var acc testutil.Accumulator stats := testStats() tags := map[string]string{ "container_name": "redis", "container_image": "redis/image", } gatherContainerStats(stats, &acc, tags, "123456789", true, true) // test docker_container_net measurement netfields := map[string]interface{}{ "rx_dropped": uint64(1), "rx_bytes": uint64(2), "rx_errors": uint64(3), "tx_packets": uint64(4), "tx_dropped": uint64(1), "rx_packets": uint64(2), "tx_errors": uint64(3), "tx_bytes": uint64(4), "container_id": "123456789", } nettags := copyTags(tags) nettags["network"] = "eth0" acc.AssertContainsTaggedFields(t, "docker_container_net", netfields, nettags) netfields = map[string]interface{}{ "rx_dropped": uint64(6), "rx_bytes": uint64(8), "rx_errors": uint64(10), "tx_packets": uint64(12), "tx_dropped": uint64(6), "rx_packets": uint64(8), "tx_errors": uint64(10), "tx_bytes": uint64(12), "container_id": "123456789", } nettags = copyTags(tags) nettags["network"] = "total" acc.AssertContainsTaggedFields(t, "docker_container_net", netfields, nettags) // test docker_blkio measurement blkiotags := copyTags(tags) blkiotags["device"] = "6:0" blkiofields := map[string]interface{}{ "io_service_bytes_recursive_read": uint64(100), "io_serviced_recursive_write": uint64(101), "container_id": "123456789", } acc.AssertContainsTaggedFields(t, "docker_container_blkio", blkiofields, blkiotags) blkiotags = copyTags(tags) blkiotags["device"] = "total" blkiofields = map[string]interface{}{ "io_service_bytes_recursive_read": uint64(100), "io_serviced_recursive_write": uint64(302), "container_id": "123456789", } acc.AssertContainsTaggedFields(t, "docker_container_blkio", blkiofields, blkiotags) // test docker_container_mem measurement memfields := map[string]interface{}{ "max_usage": uint64(1001), "usage": uint64(1111), "fail_count": uint64(1), "limit": uint64(2000), "total_pgmafault": uint64(0), "cache": uint64(0), "mapped_file": uint64(0), "total_inactive_file": uint64(0), "pgpgout": uint64(0), "rss": uint64(0), "total_mapped_file": uint64(0), "writeback": uint64(0), "unevictable": uint64(0), "pgpgin": uint64(0), "total_unevictable": uint64(0), "pgmajfault": uint64(0), "total_rss": uint64(44), "total_rss_huge": uint64(444), "total_writeback": uint64(55), "total_inactive_anon": uint64(0), "rss_huge": uint64(0), "hierarchical_memory_limit": uint64(0), "total_pgfault": uint64(0), "total_active_file": uint64(0), "active_anon": uint64(0), "total_active_anon": uint64(0), "total_pgpgout": uint64(0), "total_cache": uint64(0), "inactive_anon": uint64(0), "active_file": uint64(1), "pgfault": uint64(2), "inactive_file": uint64(3), "total_pgpgin": uint64(4), "usage_percent": float64(55.55), "container_id": "123456789", } acc.AssertContainsTaggedFields(t, "docker_container_mem", memfields, tags) // test docker_container_cpu measurement cputags := copyTags(tags) cputags["cpu"] = "cpu-total" cpufields := map[string]interface{}{ "usage_total": uint64(500), "usage_in_usermode": uint64(100), "usage_in_kernelmode": uint64(200), "usage_system": uint64(100), "throttling_periods": uint64(1), "throttling_throttled_periods": uint64(0), "throttling_throttled_time": uint64(0), "usage_percent": float64(400.0), "container_id": "123456789", } acc.AssertContainsTaggedFields(t, "docker_container_cpu", cpufields, cputags) cputags["cpu"] = "cpu0" cpu0fields := map[string]interface{}{ "usage_total": uint64(1), "container_id": "123456789", } acc.AssertContainsTaggedFields(t, "docker_container_cpu", cpu0fields, cputags) cputags["cpu"] = "cpu1" cpu1fields := map[string]interface{}{ "usage_total": uint64(1002), "container_id": "123456789", } acc.AssertContainsTaggedFields(t, "docker_container_cpu", cpu1fields, cputags) } func testStats() *types.StatsJSON { stats := &types.StatsJSON{} stats.Read = time.Now() stats.Networks = make(map[string]types.NetworkStats) stats.CPUStats.CPUUsage.PercpuUsage = []uint64{1, 1002} stats.CPUStats.CPUUsage.UsageInUsermode = 100 stats.CPUStats.CPUUsage.TotalUsage = 500 stats.CPUStats.CPUUsage.UsageInKernelmode = 200 stats.CPUStats.SystemUsage = 100 stats.CPUStats.ThrottlingData.Periods = 1 stats.PreCPUStats.CPUUsage.TotalUsage = 400 stats.PreCPUStats.SystemUsage = 50 stats.MemoryStats.Stats = make(map[string]uint64) stats.MemoryStats.Stats["total_pgmajfault"] = 0 stats.MemoryStats.Stats["cache"] = 0 stats.MemoryStats.Stats["mapped_file"] = 0 stats.MemoryStats.Stats["total_inactive_file"] = 0 stats.MemoryStats.Stats["pagpgout"] = 0 stats.MemoryStats.Stats["rss"] = 0 stats.MemoryStats.Stats["total_mapped_file"] = 0 stats.MemoryStats.Stats["writeback"] = 0 stats.MemoryStats.Stats["unevictable"] = 0 stats.MemoryStats.Stats["pgpgin"] = 0 stats.MemoryStats.Stats["total_unevictable"] = 0 stats.MemoryStats.Stats["pgmajfault"] = 0 stats.MemoryStats.Stats["total_rss"] = 44 stats.MemoryStats.Stats["total_rss_huge"] = 444 stats.MemoryStats.Stats["total_write_back"] = 55 stats.MemoryStats.Stats["total_inactive_anon"] = 0 stats.MemoryStats.Stats["rss_huge"] = 0 stats.MemoryStats.Stats["hierarchical_memory_limit"] = 0 stats.MemoryStats.Stats["total_pgfault"] = 0 stats.MemoryStats.Stats["total_active_file"] = 0 stats.MemoryStats.Stats["active_anon"] = 0 stats.MemoryStats.Stats["total_active_anon"] = 0 stats.MemoryStats.Stats["total_pgpgout"] = 0 stats.MemoryStats.Stats["total_cache"] = 0 stats.MemoryStats.Stats["inactive_anon"] = 0 stats.MemoryStats.Stats["active_file"] = 1 stats.MemoryStats.Stats["pgfault"] = 2 stats.MemoryStats.Stats["inactive_file"] = 3 stats.MemoryStats.Stats["total_pgpgin"] = 4 stats.MemoryStats.MaxUsage = 1001 stats.MemoryStats.Usage = 1111 stats.MemoryStats.Failcnt = 1 stats.MemoryStats.Limit = 2000 stats.Networks["eth0"] = types.NetworkStats{ RxDropped: 1, RxBytes: 2, RxErrors: 3, TxPackets: 4, TxDropped: 1, RxPackets: 2, TxErrors: 3, TxBytes: 4, } stats.Networks["eth1"] = types.NetworkStats{ RxDropped: 5, RxBytes: 6, RxErrors: 7, TxPackets: 8, TxDropped: 5, RxPackets: 6, TxErrors: 7, TxBytes: 8, } sbr := types.BlkioStatEntry{ Major: 6, Minor: 0, Op: "read", Value: 100, } sr := types.BlkioStatEntry{ Major: 6, Minor: 0, Op: "write", Value: 101, } sr2 := types.BlkioStatEntry{ Major: 6, Minor: 1, Op: "write", Value: 201, } stats.BlkioStats.IoServiceBytesRecursive = append( stats.BlkioStats.IoServiceBytesRecursive, sbr) stats.BlkioStats.IoServicedRecursive = append( stats.BlkioStats.IoServicedRecursive, sr) stats.BlkioStats.IoServicedRecursive = append( stats.BlkioStats.IoServicedRecursive, sr2) return stats } var gatherLabelsTests = []struct { include []string exclude []string expected []string notexpected []string }{ {[]string{}, []string{}, []string{"label1", "label2"}, []string{}}, {[]string{"*"}, []string{}, []string{"label1", "label2"}, []string{}}, {[]string{"lab*"}, []string{}, []string{"label1", "label2"}, []string{}}, {[]string{"label1"}, []string{}, []string{"label1"}, []string{"label2"}}, {[]string{"label1*"}, []string{}, []string{"label1"}, []string{"label2"}}, {[]string{}, []string{"*"}, []string{}, []string{"label1", "label2"}}, {[]string{}, []string{"lab*"}, []string{}, []string{"label1", "label2"}}, {[]string{}, []string{"label1"}, []string{"label2"}, []string{"label1"}}, {[]string{"*"}, []string{"*"}, []string{}, []string{"label1", "label2"}}, } func TestDockerGatherLabels(t *testing.T) { for _, tt := range gatherLabelsTests { var acc testutil.Accumulator d := Docker{ client: nil, testing: true, } for _, label := range tt.include { d.LabelInclude = append(d.LabelInclude, label) } for _, label := range tt.exclude { d.LabelExclude = append(d.LabelExclude, label) } err := d.Gather(&acc) require.NoError(t, err) for _, label := range tt.expected { if !acc.HasTag("docker_container_cpu", label) { t.Errorf("Didn't get expected label of %s. Test was: Include: %s Exclude %s", label, tt.include, tt.exclude) } } for _, label := range tt.notexpected { if acc.HasTag("docker_container_cpu", label) { t.Errorf("Got unexpected label of %s. Test was: Include: %s Exclude %s",<|fim▁hole|> } } } } var gatherContainerNames = []struct { include []string exclude []string expected []string notexpected []string }{ {[]string{}, []string{}, []string{"etcd", "etcd2"}, []string{}}, {[]string{"*"}, []string{}, []string{"etcd", "etcd2"}, []string{}}, {[]string{"etc*"}, []string{}, []string{"etcd", "etcd2"}, []string{}}, {[]string{"etcd"}, []string{}, []string{"etcd"}, []string{"etcd2"}}, {[]string{"etcd2*"}, []string{}, []string{"etcd2"}, []string{"etcd"}}, {[]string{}, []string{"etc*"}, []string{}, []string{"etcd", "etcd2"}}, {[]string{}, []string{"etcd"}, []string{"etcd2"}, []string{"etcd"}}, {[]string{"*"}, []string{"*"}, []string{"etcd", "etcd2"}, []string{}}, {[]string{}, []string{"*"}, []string{""}, []string{"etcd", "etcd2"}}, } func TestContainerNames(t *testing.T) { for _, tt := range gatherContainerNames { var acc testutil.Accumulator d := Docker{ client: nil, testing: true, ContainerInclude: tt.include, ContainerExclude: tt.exclude, } err := d.Gather(&acc) require.NoError(t, err) for _, metric := range acc.Metrics { if metric.Measurement == "docker_container_cpu" { if val, ok := metric.Tags["container_name"]; ok { var found bool = false for _, cname := range tt.expected { if val == cname { found = true break } } if !found { t.Errorf("Got unexpected container of %s. Test was -> Include: %s, Exclude: %s", val, tt.include, tt.exclude) } } } } for _, metric := range acc.Metrics { if metric.Measurement == "docker_container_cpu" { if val, ok := metric.Tags["container_name"]; ok { var found bool = false for _, cname := range tt.notexpected { if val == cname { found = true break } } if found { t.Errorf("Got unexpected container of %s. Test was -> Include: %s, Exclude: %s", val, tt.include, tt.exclude) } } } } } } func TestDockerGatherInfo(t *testing.T) { var acc testutil.Accumulator d := Docker{ client: nil, testing: true, TagEnvironment: []string{"ENVVAR1", "ENVVAR2", "ENVVAR3", "ENVVAR5", "ENVVAR6", "ENVVAR7", "ENVVAR8", "ENVVAR9"}, } err := acc.GatherError(d.Gather) require.NoError(t, err) acc.AssertContainsTaggedFields(t, "docker", map[string]interface{}{ "n_listener_events": int(0), "n_cpus": int(4), "n_used_file_descriptors": int(19), "n_containers": int(108), "n_containers_running": int(98), "n_containers_stopped": int(6), "n_containers_paused": int(3), "n_images": int(199), "n_goroutines": int(39), }, map[string]string{"engine_host": "absol"}, ) acc.AssertContainsTaggedFields(t, "docker_data", map[string]interface{}{ "used": int64(17300000000), "total": int64(107400000000), "available": int64(36530000000), }, map[string]string{ "unit": "bytes", "engine_host": "absol", }, ) acc.AssertContainsTaggedFields(t, "docker_container_cpu", map[string]interface{}{ "usage_total": uint64(1231652), "container_id": "b7dfbb9478a6ae55e237d4d74f8bbb753f0817192b5081334dc78476296e2173", }, map[string]string{ "container_name": "etcd2", "container_image": "quay.io:4443/coreos/etcd", "cpu": "cpu3", "container_version": "v2.2.2", "engine_host": "absol", "ENVVAR1": "loremipsum", "ENVVAR2": "dolorsitamet", "ENVVAR3": "=ubuntu:10.04", "ENVVAR7": "ENVVAR8=ENVVAR9", "label1": "test_value_1", "label2": "test_value_2", }, ) acc.AssertContainsTaggedFields(t, "docker_container_mem", map[string]interface{}{ "total_pgpgout": uint64(0), "usage_percent": float64(0), "rss": uint64(0), "total_writeback": uint64(0), "active_anon": uint64(0), "total_pgmafault": uint64(0), "total_rss": uint64(0), "total_unevictable": uint64(0), "active_file": uint64(0), "total_mapped_file": uint64(0), "pgpgin": uint64(0), "total_active_file": uint64(0), "total_active_anon": uint64(0), "total_cache": uint64(0), "inactive_anon": uint64(0), "pgmajfault": uint64(0), "total_inactive_anon": uint64(0), "total_rss_huge": uint64(0), "rss_huge": uint64(0), "hierarchical_memory_limit": uint64(0), "pgpgout": uint64(0), "unevictable": uint64(0), "total_inactive_file": uint64(0), "writeback": uint64(0), "total_pgfault": uint64(0), "total_pgpgin": uint64(0), "cache": uint64(0), "mapped_file": uint64(0), "inactive_file": uint64(0), "max_usage": uint64(0), "fail_count": uint64(0), "pgfault": uint64(0), "usage": uint64(0), "limit": uint64(18935443456), "container_id": "b7dfbb9478a6ae55e237d4d74f8bbb753f0817192b5081334dc78476296e2173", }, map[string]string{ "engine_host": "absol", "container_name": "etcd2", "container_image": "quay.io:4443/coreos/etcd", "container_version": "v2.2.2", "ENVVAR1": "loremipsum", "ENVVAR2": "dolorsitamet", "ENVVAR3": "=ubuntu:10.04", "ENVVAR7": "ENVVAR8=ENVVAR9", "label1": "test_value_1", "label2": "test_value_2", }, ) //fmt.Print(info) }<|fim▁end|>
label, tt.include, tt.exclude)
<|file_name|>2.py<|end_file_name|><|fim▁begin|>import timeit import pyximport; pyximport.install() from mod2 import cysum, cysum2 def pysum(start, step, count): ret = start for i in range(count):<|fim▁hole|> return ret print('Python', timeit.timeit('pysum(0, 1, 100)', 'from __main__ import pysum')) print('Cython', timeit.timeit('cysum(0, 1, 100)', 'from __main__ import cysum')) print('Cython with types', timeit.timeit('cysum2(0, 1, 100)', 'from __main__ import cysum2'))<|fim▁end|>
ret += step
<|file_name|>description.go<|end_file_name|><|fim▁begin|>package resources import ( "fmt" "strings" ) // Description holds information about information of the SKU description, whith strings to be included/omitted. type Description struct { Contains []string Omits []string } func (d *Description) fillForComputeInstance(machineType, usageType string) error { anythingButN1 := []string{"N2", "N2D", "E2", "Compute", "Memory", "Sole Tenancy"} if usageType == "Preemptible" { d.Contains = append(d.Contains, "Preemptible") } else {<|fim▁hole|> } // Commitment N1 machines don't have "Commitment" specified. if strings.HasPrefix(usageType, "Commit") { d.Contains = append(d.Contains, "Commitment") if strings.Contains(machineType, "n1") { d.Omits = append(d.Omits, "N1") d.Omits = append(d.Omits, anythingButN1...) } } else { d.Omits = append(d.Omits, "Commitment") } // Custom E2 machines don't have separate SKUs. if strings.Contains(machineType, "custom") { if !strings.HasPrefix(machineType, "e2") { d.Contains = append(d.Contains, "Custom") } } else { d.Omits = append(d.Omits, "Custom") } // Custom N1 machines don't have any type specified, so all types must be excluded. if strings.HasPrefix(machineType, "custom") { d.Omits = append(d.Omits, "N1") d.Omits = append(d.Omits, anythingButN1...) } else { switch { case strings.HasPrefix(machineType, "c2-"): d.Contains = append(d.Contains, "Compute") case strings.HasPrefix(machineType, "m1-") || strings.HasPrefix(machineType, "m2-"): d.Contains = append(d.Contains, "Memory") d.Omits = append(d.Omits, "Upgrade") case strings.HasPrefix(machineType, "n1-mega") || strings.HasPrefix(machineType, "n1-ultra"): d.Contains = append(d.Contains, "Memory") d.Omits = append(d.Omits, "Upgrade") case strings.HasPrefix(machineType, "n1-") || strings.HasPrefix(machineType, "f1-") || strings.HasPrefix(machineType, "g1-"): if !strings.HasPrefix(usageType, "Commit") { d.Contains = append(d.Contains, "N1") } default: // All other machines have their type specified. i := strings.Index(machineType, "-") if i < 0 { return fmt.Errorf("wrong machine type format") } d.Contains = append(d.Contains, strings.ToUpper(machineType[:i])+" ") } } return nil } func (d *Description) fillForComputeDisk(diskType string, regional bool) { switch diskType { case "pd-standard": d.Contains = []string{"Storage PD Capacity"} case "pd-ssd": d.Contains = []string{"SSD backed PD Capacity"} default: } if regional { d.Contains = append(d.Contains, "Regional") } else { d.Omits = append(d.Omits, "Regional") } }<|fim▁end|>
d.Omits = append(d.Omits, "Preemptible")
<|file_name|>net.rs<|end_file_name|><|fim▁begin|>//! A chat server that broadcasts a message to all connections. //! //! This example is explicitly more verbose than it has to be. This is to //! illustrate more concepts. //! //! A chat server for telnet clients. After a telnet client connects, the first //! line should contain the client's name. After that, all lines sent by a //! client are broadcasted to all other connected clients. //! //! Because the client is telnet, lines are delimited by "\r\n". //! //! You can test this out by running: //! //! cargo run --example chat //! //! And then in another terminal run: //! //! telnet localhost 6142 //! //! You can run the `telnet` command in any number of additional windows. //! //! You can run the second command in multiple windows and then chat between the //! two, seeing the messages from the other client as they're received. For all //! connected clients they'll all join the same room and see everyone else's //! messages. #![deny(deprecated)] extern crate tokio; extern crate futures; extern crate bytes; extern crate libspp; use tokio::net::{TcpListener}; use tokio::prelude::*; use libspp::prelude::*; use std::io; fn new_spp_handle<'a, R, W>(reader: R, writer: W) -> SppHandle<'a, R, W> where R: AsyncRead, W: AsyncWrite { SppHandle { reader, writer, mapper: libspp::mapper::new(), } } #[derive(Debug)] pub struct SppHandle<'a, R, W> { reader: Option<R>, writer: Option<W>, mapper: SppMapper<'a> } impl<R, W> Future for SppHandle<R, W> where R: AsyncRead, W: AsyncWrite, { type Item = (R, W); type Error = io::Error; fn poll(&mut self) -> Poll<Self::Item, Self::Error> { loop { } } } #[test] pub fn main() { let address = "127.0.0.1:6142".parse().unwrap(); let listener = TcpListener::bind(&address).unwrap(); println!("server running on localhost:6142"); let server = listener.incoming().for_each(move |stream| { let address = stream.peer_addr().unwrap(); println!("New connection from {} ", address); let (reader, writer) = stream.split(); tokio::spawn( tokio::io::copy(reader, writer).map(|amt| { println!("wrote {:?} bytes", amt) }).map_err(|err| { eprintln!("IO error {:?}", err) }) ); Ok(()) }) .map_err(|err| { println!("accept error = {:?}", err);<|fim▁hole|> tokio::run(server); }<|fim▁end|>
});
<|file_name|>database_adapter.py<|end_file_name|><|fim▁begin|># =============================================================================== # Copyright 2011 Jake Ross # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # =============================================================================== # =============enthought library imports======================= import os from datetime import datetime, timedelta from threading import Lock import six from sqlalchemy import create_engine, distinct, MetaData from sqlalchemy.exc import ( SQLAlchemyError, InvalidRequestError, StatementError, DBAPIError, OperationalError, ) from sqlalchemy.orm import sessionmaker from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound from traits.api import ( Password, Bool, Str, on_trait_change, Any, Property, cached_property, Int, ) from pychron.database.core.base_orm import AlembicVersionTable from pychron.database.core.query import compile_query from pychron.loggable import Loggable from pychron.regex import IPREGEX def obscure_host(h): if IPREGEX.match(h): h = "x.x.x.{}".format(h.split(".")[-1]) return h def binfunc(ds, hours): ds = [dx.timestamp for dx in ds] p1 = ds[0] delta_seconds = hours * 3600 td = timedelta(seconds=delta_seconds * 0.25) for i, di in enumerate(ds): i = max(0, i - 1) dd = ds[i] if (di - dd).total_seconds() > delta_seconds: yield p1 - td, dd + td p1 = di yield p1 - td, di + td class SessionCTX(object): def __init__(self, parent, use_parent_session=True): self._use_parent_session = use_parent_session self._parent = parent self._session = None self._psession = None def __enter__(self): if self._use_parent_session: self._parent.create_session() return self._parent.session else: self._psession = self._parent.session self._session = self._parent.session_factory() self._parent.session = self._session return self._session def __exit__(self, exc_type, exc_val, exc_tb): if self._session: self._session.close() else: self._parent.close_session() if self._psession: self._parent.session = self._psession self._psession = None class MockQuery: def join(self, *args, **kw): return self def filter(self, *args, **kw): # type: (object, object) -> object return self def all(self, *args, **kw): return [] def order_by(self, *args, **kw): return self class MockSession: def query(self, *args, **kw): return MockQuery() # def __getattr__(self, item): # return class DatabaseAdapter(Loggable): """ The DatabaseAdapter is a base class for interacting with a SQLAlchemy database. Two main subclasses are used by pychron, IsotopeAdapter and MassSpecDatabaseAdapter. This class provides attributes for describing the database url, i.e host, user, password etc, and methods for connecting and opening database sessions. It also provides some helper functions used extensively by the subclasses, e.g. ``_add_item``, ``_retrieve_items`` """ session = None sess_stack = 0 reraise = False connected = Bool(False) kind = Str prev_kind = Str username = Str host = Str password = Password timeout = Int session_factory = None application = Any test_func = "get_versions" version_func = "get_versions" autoflush = True autocommit = False commit_on_add = True # name used when writing to database # save_username = Str connection_parameters_changed = Bool url = Property(depends_on="connection_parameters_changed") datasource_url = Property(depends_on="connection_parameters_changed") path = Str echo = False verbose_retrieve_query = False verbose = True connection_error = Str _session_lock = None modified = False _trying_to_add = False _test_connection_enabled = True def __init__(self, *args, **kw): super(DatabaseAdapter, self).__init__(*args, **kw) self._session_lock = Lock() def create_all(self, metadata): """ Build a database schema with the current connection :param metadata: SQLAchemy MetaData object """ # if self.kind == 'sqlite': metadata.create_all(self.session.bind) # def session_ctx(self, sess=None, commit=True, rollback=True): # """ # Make a new session context. # # :return: ``SessionCTX`` # """ # with self._session_lock: # if sess is None: # sess = self.sess # return SessionCTX(sess, parent=self, commit=commit, rollback=rollback) _session_cnt = 0 def session_ctx(self, use_parent_session=True): with self._session_lock: return SessionCTX(self, use_parent_session) def create_session(self, force=False): if self.connect(test=False): if self.session_factory: if force: self.debug("force create new session {}".format(id(self))) if self.session: self.session.close() self.session = self.session_factory() self._session_cnt = 1 else: if not self.session: # self.debug('create new session {}'.format(id(self))) self.session = self.session_factory() self._session_cnt += 1 else: self.warning("no session factory") else: self.session = MockSession() def close_session(self): if self.session and not isinstance(self.session, MockSession): self.session.flush() self._session_cnt -= 1 if not self._session_cnt: self.debug("close session {}".format(id(self))) self.session.close() self.session = None @property def enabled(self): return self.kind in ["mysql", "sqlite", "postgresql", "mssql"] @property def save_username(self): from pychron.globals import globalv return globalv.username @on_trait_change("username,host,password,name,kind,path") def reset_connection(self): """ Trip the ``connection_parameters_changed`` flag. Next ``connect`` call with use the new values """ self.connection_parameters_changed = True self.session_factory = None self.session = None # @caller def connect( self, test=True, force=False, warn=True, version_warn=True, attribute_warn=False ): """ Connect to the database :param test: Test the connection by running ``test_func`` :param force: Test connection even if connection parameters haven't changed :param warn: Warn if the connection test fails :param version_warn: Warn if database/pychron versions don't match :return: True if connected else False :rtype: bool """ self.connection_error = "" if force: self.debug("forcing database connection") if self.connection_parameters_changed: self._test_connection_enabled = True force = True if not self.connected or force: # self.connected = True if self.kind == 'sqlite' else False self.connected = False pool_recycle = 600 if self.kind == "sqlite": self.connected = True test = False pool_recycle = -1 self.connection_error = ( 'Database "{}" kind not set. ' 'Set in Preferences. current kind="{}"'.format(self.name, self.kind) ) if not self.enabled: from pychron.core.ui.gui import invoke_in_main_thread invoke_in_main_thread(self.warning_dialog, self.connection_error) else: url = self.url if url is not None: self.info( "{} connecting to database {}".format(id(self), self.public_url) ) engine = create_engine( url, echo=self.echo, pool_recycle=pool_recycle ) self.session_factory = sessionmaker( bind=engine, autoflush=self.autoflush, expire_on_commit=False, autocommit=self.autocommit, ) if test: if not self._test_connection_enabled: warn = False else: if self.test_func: self.connected = self._test_db_connection(version_warn) else: self.connected = True else: self.connected = True if self.connected: self.info("connected to db {}".format(self.public_url)) # self.initialize_database() else: self.connection_error = 'Not Connected to Database "{}".\nAccess Denied for user= {} \ host= {}\nurl= {}'.format( self.name, self.username, self.host, self.public_url ) if warn: from pychron.core.ui.gui import invoke_in_main_thread invoke_in_main_thread( self.warning_dialog, self.connection_error ) self.connection_parameters_changed = False return self.connected # def initialize_database(self): # pass def rollback(self): if self.session: self.session.rollback() def flush(self): """ flush the session """ if self.session: try: self.session.flush() except: self.session.rollback() def expire(self, i): if self.session: self.session.expire(i) def expire_all(self): if self.session: self.session.expire_all() def commit(self): """ commit the session """ if self.session: try: self.session.commit() except BaseException as e: self.warning("Commit exception: {}".format(e)) self.session.rollback() def delete(self, obj): if self.session: self.session.delete(obj) def post_commit(self): if self._trying_to_add: self.modified = True def add_item(self, *args, **kw): return self._add_item(*args, **kw) # def get_session(self): # """ # return the current session or make a new one # # :return: Session # """ # sess = self.sess # if sess is None: # self.debug('$$$$$$$$$$$$$$$$ session is None') # sess = self.session_factory() # # return sess def get_migrate_version(self, **kw): """ Query the AlembicVersionTable """ q = self.session.query(AlembicVersionTable) mv = q.one() return mv def get_versions(self, **kw): pass @property def public_datasource_url(self): if self.kind == "sqlite": url = "{}:{}".format( os.path.basename(os.path.dirname(self.path)), os.path.basename(self.path), ) else: url = "{}:{}".format(obscure_host(self.host), self.name) return url @cached_property def _get_datasource_url(self): if self.kind == "sqlite": url = "{}:{}".format( os.path.basename(os.path.dirname(self.path)), os.path.basename(self.path), ) else: url = "{}:{}".format(self.host, self.name) return url @property def public_url(self): kind = self.kind user = self.username host = self.host name = self.name if kind == "sqlite": pu = "{}:{}".format( os.path.basename(os.path.dirname(self.path)), os.path.basename(self.path), ) else: pu = "{}://{}@{}/{}".format(kind, user, host, name) return pu @cached_property def _get_url(self): kind = self.kind password = self.password user = self.username host = self.host name = self.name timeout = self.timeout if kind in ("mysql", "postgresql", "mssql"): if kind == "mysql": # add support for different mysql drivers driver = self._import_mysql_driver() if driver is None: return elif kind == "mssql": driver = self._import_mssql_driver() if driver is None: return else: driver = "pg8000" if password: user = "{}:{}".format(user, password) prefix = "{}+{}://{}@".format(kind, driver, user) if driver == "pyodbc": url = "{}{}".format(prefix, name) else: url = "{}{}/{}".format(prefix, host, name) if kind == "mysql" and self.timeout: url = "{}?connect_timeout={}".format(url, timeout) else: url = "sqlite:///{}".format(self.path) return url def _import_mssql_driver(self): driver = None try: import pyodbc driver = "pyodbc" except ImportError: try: import pymssql driver = "pymssql" except ImportError: pass self.info('using mssql driver="{}"'.format(driver)) return driver def _import_mysql_driver(self): try: """ pymysql https://github.com/petehunt/PyMySQL/ """ import pymysql driver = "pymysql" except ImportError: try: import _mysql driver = "mysqldb" except ImportError: self.warning_dialog( "A mysql driver was not found. Install PyMySQL or MySQL-python" ) return self.info('using mysql driver="{}"'.format(driver)) return driver def _test_db_connection(self, version_warn): self.connected = True self.create_session() try: self.info("testing database connection {}".format(self.test_func)) vers = getattr(self, self.test_func)(reraise=True) if version_warn: self._version_warn_hook() connected = True except OperationalError: self.warning("Operational connection failed to {}".format(self.public_url)) connected = False self._test_connection_enabled = False except Exception as e: self.debug_exception() self.warning( "connection failed to {} exception={}".format(self.public_url, e) ) connected = False finally: self.info("closing test session") self.close_session() return connected def _version_warn_hook(self): pass # def test_version(self): # ver = getattr(self, self.version_func)() # ver = ver.version_num # aver = version.__alembic__ # if ver != aver: # return 'Database is out of data. Pychron ver={}, Database ver={}'.format(aver, ver) def _add_item(self, obj): sess = self.session if sess: sess.add(obj) try: if self.autoflush: sess.flush() self.modified = True self._trying_to_add = True if not self.autocommit and self.commit_on_add: sess.commit() return obj except SQLAlchemyError as e: import traceback self.debug( "add_item exception {} {}".format(obj, traceback.format_exc()) ) sess.rollback() if self.reraise: raise else: self.critical("No session") def _add_unique(self, item, attr, name): nitem = getattr(self, "get_{}".format(attr))(name) if nitem is None: self.info("adding {}= {}".format(attr, name)) self._add_item(item) nitem = item return nitem def _get_date_range(self, q, asc, desc, hours=0): lan = q.order_by(asc).first() han = q.order_by(desc).first() lan = datetime.now() if not lan else lan.timestamp han = datetime.now() if not han else han.timestamp td = timedelta(hours=hours) return lan - td, han + td def _delete_item(self, value, name=None): if name is not None: func = getattr(self, "get_{}".format(name)) item = func(value) else: item = value if item: self.debug("deleting value={},name={},item={}".format(value, name, item)) self.session.delete(item) def _retrieve_items( self, table, joins=None, filters=None, limit=None, order=None, distinct_=False, query_hook=None, reraise=False, func="all", group_by=None, verbose_query=False, ): sess = self.session if sess is None or isinstance(sess, MockSession): self.debug("USING MOCKSESSION************** {}".format(sess)) return [] if distinct_: if isinstance(distinct_, bool): q = sess.query(distinct(table)) else: q = sess.query(distinct(distinct_)) elif isinstance(table, tuple): q = sess.query(*table) else: q = sess.query(table) if joins: try: for ji in joins: if ji != table: q = q.join(ji) except InvalidRequestError: if reraise: raise if filters is not None: for fi in filters: q = q.filter(fi) if order is not None: if not isinstance(order, tuple): order = (order,) q = q.order_by(*order) if group_by is not None: if not isinstance(order, tuple): group_by = (group_by,) q = q.group_by(*group_by) if limit is not None: q = q.limit(limit) if query_hook: q = query_hook(q) if verbose_query or self.verbose_retrieve_query: # print compile_query(q) self.debug(compile_query(q)) items = self._query(q, func, reraise) if items is None: items = [] return items def _retrieve_first(self, table, value=None, key="name", order_by=None): if value is not None: if not isinstance(value, (str, int, six.text_type, int, float)): return value q = self.session.query(table) if value is not None: q = q.filter(getattr(table, key) == value) try: if order_by is not None: q = q.order_by(order_by) return q.first() except SQLAlchemyError as e: print("execption first", e) return def _query_all(self, q, **kw): ret = self._query(q, "all", **kw) return ret or [] def _query_first(self, q, **kw): return self._query(q, "first", **kw) def _query_one(self, q, **kw): q = q.limit(1) return self._query(q, "one", **kw) def _query(self, q, func, reraise=False, verbose_query=False): if verbose_query: try: cq = compile_query(q) self.debug(cq) except BaseException: cq = "Query failed to compile" self.debug_exception() # print compile_query(q) f = getattr(q, func) try: return f() except NoResultFound: if verbose_query: self.info("no results found for query -- {}".format(cq)) except OperationalError as e: self.debug("_query operation exception") self.debug_exception() except SQLAlchemyError as e: if self.verbose: self.debug("_query exception {}".format(e)) try: self.rollback() self.reset_connection() self.connect() except BaseException: pass if reraise: raise e def _append_filters(self, f, kw): filters = kw.get("filters", []) if isinstance(f, (tuple, list)): filters.extend(f) else: filters.append(f) kw["filters"] = filters return kw def _append_joins(self, f, kw): joins = kw.get("joins", []) if isinstance(f, (tuple, list)): joins.extend(f) else: joins.append(f)<|fim▁hole|> def _retrieve_item( self, table, value, key="name", last=None, joins=None, filters=None, options=None, verbose=True, verbose_query=False, ): if not isinstance(value, (str, int, six.text_type, int, float, list, tuple)): return value if not isinstance(value, (list, tuple)): value = (value,) if not isinstance(key, (list, tuple)): key = (key,) def __retrieve(s): q = s.query(table) if joins: try: for ji in joins: if ji != table: q = q.join(ji) except InvalidRequestError: pass if filters is not None: for fi in filters: q = q.filter(fi) for k, v in zip(key, value): q = q.filter(getattr(table, k) == v) if last: q = q.order_by(last) if verbose_query or self.verbose_retrieve_query: self.debug(compile_query(q)) ntries = 3 import traceback for i in range(ntries): try: return q.one() except (DBAPIError, OperationalError, StatementError): self.debug(traceback.format_exc()) s.rollback() continue except MultipleResultsFound: if verbose: self.debug( "multiples row found for {} {} {}. Trying to get last row".format( table.__tablename__, key, value ) ) try: if hasattr(table, "id"): q = q.order_by(table.id.desc()) return q.limit(1).all()[-1] except (SQLAlchemyError, IndexError, AttributeError) as e: if verbose: self.debug( "no rows for {} {} {}".format( table.__tablename__, key, value ) ) break except NoResultFound: if verbose and self.verbose: self.debug( "no row found for {} {} {}".format( table.__tablename__, key, value ) ) break close = False if self.session is None: self.create_session() close = True ret = __retrieve(self.session) if close: self.close_session() return ret def _get_items( self, table, gtables, join_table=None, filter_str=None, limit=None, order=None, key=None, ): if isinstance(join_table, str): join_table = gtables[join_table] q = self._get_query(table, join_table=join_table, filter_str=filter_str) if order: for o in order if isinstance(order, list) else [order]: q = q.order_by(o) if limit: q = q.limit(limit) # reorder based on id if order: q = q.from_self() q = q.order_by(table.id) res = q.all() if key: return [getattr(ri, key) for ri in res] return res class PathDatabaseAdapter(DatabaseAdapter): path_table = None def add_path(self, rec, path, **kw): if self.path_table is None: raise NotImplementedError kw = self._get_path_keywords(path, kw) p = self.path_table(**kw) rec.path = p return p def _get_path_keywords(self, path, args): n = os.path.basename(path) r = os.path.dirname(path) args["root"] = r args["filename"] = n return args class SQLiteDatabaseAdapter(DatabaseAdapter): kind = "sqlite" def build_database(self): self.connect(test=False) if not os.path.isfile(self.path): meta = MetaData() self._build_database(self.session, meta) def _build_database(self, sess, meta): raise NotImplementedError # ============= EOF =============================================<|fim▁end|>
kw["joins"] = joins return kw
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># encoding: utf-8 import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'TrackedUser' db.create_table('user_analytics_trackeduser', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('cookie', self.gf('django.db.models.fields.TextField')(null=True, blank=True)), ('user_agent', self.gf('django.db.models.fields.TextField')(null=True, blank=True)), )) db.send_create_signal('user_analytics', ['TrackedUser']) def backwards(self, orm): # Deleting model 'TrackedUser' db.delete_table('user_analytics_trackeduser') <|fim▁hole|> 'Meta': {'object_name': 'TrackedUser'}, 'cookie': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'user_agent': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}) } } complete_apps = ['user_analytics']<|fim▁end|>
models = { 'user_analytics.trackeduser': {
<|file_name|>hog.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # coding: utf-8 # # License: BSD; see LICENSE for more details. from pygments.lexer import RegexLexer, include, bygroups import pygments.token as t class SnortLexer(RegexLexer): name = 'Snort' aliases = ['snort', 'hog'] filenames = ['*.rules'] tokens = { 'root': [ (r'#.*$', t.Comment), (r'(\$\w+)', t.Name.Variable), (r'\b(any|(\d{1,3}\.){3}\d{1,3}(/\d+)?)', t.Name.Variable), (r'^\s*(log|pass|alert|activate|dynamic|drop|reject|sdrop|' r'ruletype|var|portvar|ipvar)', t.Keyword.Type), (r'\b(metadata)(?:\s*:)', t.Keyword, 'metadata'), (r'\b(reference)(?:\s*:)', t.Keyword, 'reference'), (r'\b(msg|reference|gid|sid|rev|classtype|priority|metadata|' r'content|http_encode|uricontent|urilen|isdataat|pcre|pkt_data|' r'file_data|base64_decode|base64_data|byte_test|byte_jump|' r'byte_extract|ftp_bounce|pcre|asn1|cvs|dce_iface|dce_opnum|' r'dce_stub_data|sip_method|sip_stat_code|sip_header|sip_body|' r'gtp_type|gtp_info|gtp_version|ssl_version|ssl_state|nocase|' r'rawbytes|depth|offset|distance|within|http_client_body|' r'http_cookie|http_raw_cookie|http_header|http_raw_header|' r'http_method|http_uri|http_raw_uri|http_stat_code|' r'http_stat_msg|fast_pattern|fragoffset|fragbits|' r'ttl|tos|id|ipopts|dsize|flags|flow|flowbits|seq|ack|window|' r'itype|icode|icmp_id|icmp_seq|rpc|ip_proto|sameip|' r'stream_reassemble|stream_size|logto|session|resp|react|tag|' r'activates|activated_by|replace|detection_filter|treshold)' r'(?:\s*:)', t.Keyword), (r'\b(tcp|udp|icmp|ip)', t.Keyword.Constant), (r'\b(hex|dec|oct|string|type|output|any|engine|soid|service|' r'norm|raw|relative|bytes|big|little|align|invalid-entry|' r'enable|disable|client|server|both|either|printable|binary|' r'all|session|host|packets|seconds|bytes|src|dst|track|by_src|' r'by_dst|uri|header|cookie|utf8|double_encode|non_ascii|' r'uencode|bare_byte|ascii|iis_encode|bitstring_overflow|' r'double_overflow|oversize_length|absolute_offset|' r'relative_offset|rr|eol|nop|ts|sec|esec|lsrr|lsrre|' r'ssrr|satid|to_client|to_server|from_client|from_server|' r'established|not_established|stateless|no_stream|only_stream|' r'no_frag|only_frag|set|setx|unset|toggle|isset|isnotset|' r'noalert|limit|treshold|count|str_offset|str_depth|tagged)', t.Name.Attribute), (r'(<-|->|<>)', t.Operator), (ur'”', t.String, 'fancy-string'), (ur'“', t.String, 'fancy-string'), (r'"', t.String, 'dq-string'), (r'\'', t.String, 'sq-string'), (r'(\d+)', t.Number), (r';', t.Punctuation), (r'\\', t.String.Escape), (r'\s+', t.Whitespace), ], 'hex': [ (r'\|([a-fA-F0-9 ]+)\|', t.Number.Hex), ], 'dq-string': [ include('hex'), (r'([^"])', t.String), (r'"', t.String, '#pop') ], 'sq-string': [ include('hex'), (r'([^\'])', t.String), (r'\'', t.String, '#pop') ], 'fancy-string': [ include('hex'), (ur'([^”])', t.String), (ur'”', t.String, '#pop') ], 'metadata': [ (r'\s', t.Whitespace), (r'([\w_-]+)(\s+)([\w_-]+)', bygroups(t.Name.Variable, t.Whitespace, t.Name.Attribute)), (r';', t.Punctuation, '#pop'), ], 'reference': [ (r'(\w+)(,)(?:\s*)([^;]+)', bygroups(t.Name.Variable, t.Punctuation, t.Name.Attribute)), (r';', t.Punctuation, '#pop') ] } if __name__ == '__main__': from pygments import highlight from pygments.formatters import Terminal256Formatter from sys import argv<|fim▁hole|> for arg in argv[1:]: input = io.open(arg, 'r') code = input.read(-1) print("Highlighting " + arg) print(highlight(code, SnortLexer(encoding='chardet'), Terminal256Formatter(encoding='utf-8'))) else: code = """ alert tcp $HOME_NET any -> 192.168.1.0/24 111 (content:"|00 01 86 a5|"; msg: "mountd access";) alert tcp any any -> any 21 (content:"site exec"; content:"%"; msg:"site exec buffer overflow attempt";) alert tcp !192.168.1.0/24 any -> 192.168.1.0/24 111 (content: "|00 01 86 a5|"; msg: "external mountd access";) """ print(highlight(code, SnortLexer(), Terminal256Formatter()))<|fim▁end|>
if len(argv) > 1: import io
<|file_name|>pub-restricted-error.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(pub_restricted)] struct Bar(pub(())); struct Foo { pub(crate) () foo: usize, //~ ERROR expected identifier } <|fim▁hole|>fn main() {}<|fim▁end|>
<|file_name|>tax_summary_edit.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python """<|fim▁hole|>edits mothur taxonomy summary file transfers last name that is not "unclassified" or "uncultured" to "unclassified" or "uncultured" assignment make sure that the file has default sorting (by rankID) Copyright: tax_summary_edit edits mothur taxonomy summary file Copyright (C) 2016 William Brazelton This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import sys infilename = sys.argv[1] outfilename = infilename + '.renamed.txt' outfile = open(outfilename,'a') infile = open(infilename) for line in infile: if "unclassified" in line: columns = line.split('\t') tax = columns[2] newtax = tax + ' ' + lasttax outfile.write(columns[0]) outfile.write('\t') outfile.write(columns[1]) outfile.write('\t') outfile.write(newtax) for tab in columns[3:]: outfile.write('\t') outfile.write(tab) elif "uncultured" in line: columns = line.split('\t') tax = columns[2] newtax = tax + ' ' + lasttax outfile.write(columns[0]) outfile.write('\t') outfile.write(columns[1]) outfile.write('\t') outfile.write(newtax) for tab in columns[3:]: outfile.write('\t') outfile.write(tab) else: outfile.write(line) columns = line.split('\t') lasttax = columns[2] infile.close() outfile.close()<|fim▁end|>
<|file_name|>ParseException.java<|end_file_name|><|fim▁begin|>/* Generated By:JavaCC: Do not edit this line. ParseException.java Version 3.0 */ package org.cfeclipse.cfml.parser.cfscript; /** * This exception is thrown when parse errors are encountered. * You can explicitly create objects of this exception type by <|fim▁hole|> * calling the method generateParseException in the generated * parser. * * You can modify this class to customize your error reporting * mechanisms so long as you retain the public fields. */ public class ParseException extends Exception { /** * This constructor is used by the method "generateParseException" * in the generated parser. Calling this constructor generates * a new object of this type with the fields "currentToken", * "expectedTokenSequences", and "tokenImage" set. The boolean * flag "specialConstructor" is also set to true to indicate that * this constructor was used to create this object. * This constructor calls its super class with the empty string * to force the "toString" method of parent class "Throwable" to * print the error message in the form: * ParseException: <result of getMessage> */ public ParseException(Token currentTokenVal, int[][] expectedTokenSequencesVal, String[] tokenImageVal ) { super(""); specialConstructor = true; currentToken = currentTokenVal; expectedTokenSequences = expectedTokenSequencesVal; tokenImage = tokenImageVal; } /** * The following constructors are for use by you for whatever * purpose you can think of. Constructing the exception in this * manner makes the exception behave in the normal way - i.e., as * documented in the class "Throwable". The fields "errorToken", * "expectedTokenSequences", and "tokenImage" do not contain * relevant information. The JavaCC generated code does not use * these constructors. */ public ParseException() { super(); specialConstructor = false; } public ParseException(String message) { super(message); specialConstructor = false; } /** * This variable determines which constructor was used to create * this object and thereby affects the semantics of the * "getMessage" method (see below). */ protected boolean specialConstructor; /** * This is the last token that has been consumed successfully. If * this object has been created due to a parse error, the token * followng this token will (therefore) be the first error token. */ public Token currentToken; /** * Each entry in this array is an array of integers. Each array * of integers represents a sequence of tokens (by their ordinal * values) that is expected at this point of the parse. */ public int[][] expectedTokenSequences; /** * This is a reference to the "tokenImage" array of the generated * parser within which the parse error occurred. This array is * defined in the generated ...Constants interface. */ public String[] tokenImage; /** * This method has the standard behavior when this object has been * created using the standard constructors. Otherwise, it uses * "currentToken" and "expectedTokenSequences" to generate a parse * error message and returns it. If this object has been created * due to a parse error, and you do not catch it (it gets thrown * from the parser), then this method is called during the printing * of the final stack trace, and hence the correct error message * gets displayed. */ public String getMessage() { if (!specialConstructor) { return super.getMessage(); } String expected = ""; int maxSize = 0; for (int i = 0; i < expectedTokenSequences.length; i++) { if (maxSize < expectedTokenSequences[i].length) { maxSize = expectedTokenSequences[i].length; } for (int j = 0; j < expectedTokenSequences[i].length; j++) { expected += tokenImage[expectedTokenSequences[i][j]] + " "; } if (expectedTokenSequences[i][expectedTokenSequences[i].length - 1] != 0) { expected += "..."; } expected += eol + " "; } String retval = "Encountered \""; Token tok = currentToken.next; for (int i = 0; i < maxSize; i++) { if (i != 0) retval += " "; if (tok.kind == 0) { retval += tokenImage[0]; break; } retval += add_escapes(tok.image); tok = tok.next; } retval += "\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn; retval += "." + eol; if (expectedTokenSequences.length == 1) { retval += "Was expecting:" + eol + " "; } else { retval += "Was expecting one of:" + eol + " "; } retval += expected; return retval; } /** * The end of line string for this machine. */ protected String eol = System.getProperty("line.separator", "\n"); /** * Used to convert raw characters to their escaped version * when these raw version cannot be used as part of an ASCII * string literal. */ protected String add_escapes(String str) { StringBuffer retval = new StringBuffer(); char ch; for (int i = 0; i < str.length(); i++) { switch (str.charAt(i)) { case 0 : continue; case '\b': retval.append("\\b"); continue; case '\t': retval.append("\\t"); continue; case '\n': retval.append("\\n"); continue; case '\f': retval.append("\\f"); continue; case '\r': retval.append("\\r"); continue; case '\"': retval.append("\\\""); continue; case '\'': retval.append("\\\'"); continue; case '\\': retval.append("\\\\"); continue; default: if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) { String s = "0000" + Integer.toString(ch, 16); retval.append("\\u" + s.substring(s.length() - 4, s.length())); } else { retval.append(ch); } continue; } } return retval.toString(); } }<|fim▁end|>
<|file_name|>MinusMD.cpp<|end_file_name|><|fim▁begin|>// Mantid Repository : https://github.com/mantidproject/mantid // // Copyright &copy; 2018 ISIS Rutherford Appleton Laboratory UKRI, // NScD Oak Ridge National Laboratory, European Spallation Source // & Institut Laue - Langevin // SPDX - License - Identifier: GPL - 3.0 + #include "MantidMDAlgorithms/MinusMD.h" #include "MantidDataObjects/MDBox.h" #include "MantidDataObjects/MDBoxIterator.h" #include "MantidDataObjects/MDEventFactory.h" #include "MantidDataObjects/MDEventWorkspace.h" #include "MantidKernel/System.h" using namespace Mantid::Kernel; using namespace Mantid::API; using namespace Mantid::DataObjects; namespace Mantid { namespace MDAlgorithms { // Register the algorithm into the AlgorithmFactory DECLARE_ALGORITHM(MinusMD) //---------------------------------------------------------------------------------------------- /// Algorithm's name for identification. @see Algorithm::name const std::string MinusMD::name() const { return "MinusMD"; } /// Algorithm's version for identification. @see Algorithm::version int MinusMD::version() const { return 1; } //---------------------------------------------------------------------------------------------- //---------------------------------------------------------------------------------------------- /// Is the operation commutative? bool MinusMD::commutative() const { return false; } //---------------------------------------------------------------------------------------------- /// Check the inputs and throw if the algorithm cannot be run void MinusMD::checkInputs() { if (m_lhs_event || m_rhs_event) { if (m_lhs_histo || m_rhs_histo) throw std::runtime_error("Cannot subtract a MDHistoWorkspace and a " "MDEventWorkspace (only MDEventWorkspace - " "MDEventWorkspace is allowed)."); if (m_lhs_scalar || m_rhs_scalar) throw std::runtime_error("Cannot subtract a MDEventWorkspace and a " "scalar (only MDEventWorkspace - " "MDEventWorkspace is allowed)."); } } //---------------------------------------------------------------------------------------------- /** Perform the subtraction. * * Will do m_out_event -= m_operand_event * * @param ws :: MDEventWorkspace being added to */ template <typename MDE, size_t nd> void MinusMD::doMinus(typename MDEventWorkspace<MDE, nd>::sptr ws1) { typename MDEventWorkspace<MDE, nd>::sptr ws2 = boost::dynamic_pointer_cast<MDEventWorkspace<MDE, nd>>(m_operand_event); if (!ws1 || !ws2) throw std::runtime_error("Incompatible workspace types passed to MinusMD."); MDBoxBase<MDE, nd> *box1 = ws1->getBox(); MDBoxBase<MDE, nd> *box2 = ws2->getBox(); Progress prog(this, 0.0, 0.4, box2->getBoxController()->getTotalNumMDBoxes()); // How many events you started with size_t initial_numEvents = ws1->getNPoints(); // Make a leaf-only iterator through all boxes with events in the RHS // workspace<|fim▁hole|> if (box) { // Copy the events from WS2 and add them into WS1 const std::vector<MDE> &events = box->getConstEvents(); // Perform a copy while flipping the signal std::vector<MDE> eventsCopy; eventsCopy.reserve(events.size()); for (auto it = events.begin(); it != events.end(); it++) { MDE eventCopy(*it); eventCopy.setSignal(-eventCopy.getSignal()); eventsCopy.push_back(eventCopy); } // Add events, with bounds checking box1->addEvents(eventsCopy); box->releaseEvents(); } prog.report("Substracting Events"); } while (it2.next()); this->progress(0.41, "Splitting Boxes"); // This is freed in the destructor of the ThreadPool class, // it should not be a memory leak auto prog2 = new Progress(this, 0.4, 0.9, 100); ThreadScheduler *ts = new ThreadSchedulerFIFO(); ThreadPool tp(ts, 0, prog2); ws1->splitAllIfNeeded(ts); prog2->resetNumSteps(ts->size(), 0.4, 0.6); tp.joinAll(); this->progress(0.95, "Refreshing cache"); ws1->refreshCache(); // Set a marker that the file-back-end needs updating if the # of events // changed. if (ws1->getNPoints() != initial_numEvents) ws1->setFileNeedsUpdating(true); } //---------------------------------------------------------------------------------------------- /// Run the algorithm with an MDEventWorkspace as output void MinusMD::execEvent() { // Now we add m_operand_event into m_out_event. CALL_MDEVENT_FUNCTION(this->doMinus, m_out_event); // Clear masking (box flags) from the output workspace m_out_event->clearMDMasking(); // Set to the output setProperty("OutputWorkspace", m_out_event); } //---------------------------------------------------------------------------------------------- /// Run the algorithm with a MDHisotWorkspace as output and operand void MinusMD::execHistoHisto( Mantid::DataObjects::MDHistoWorkspace_sptr out, Mantid::DataObjects::MDHistoWorkspace_const_sptr operand) { out->subtract(*operand); } //---------------------------------------------------------------------------------------------- /// Run the algorithm with a MDHisotWorkspace as output, scalar and operand void MinusMD::execHistoScalar( Mantid::DataObjects::MDHistoWorkspace_sptr out, Mantid::DataObjects::WorkspaceSingleValue_const_sptr scalar) { out->subtract(scalar->y(0)[0], scalar->e(0)[0]); } } // namespace MDAlgorithms } // namespace Mantid<|fim▁end|>
MDBoxIterator<MDE, nd> it2(box2, 1000, true); do { MDBox<MDE, nd> *box = dynamic_cast<MDBox<MDE, nd> *>(it2.getBox());
<|file_name|>artistphoto.py<|end_file_name|><|fim▁begin|>################################################################# # This file is part of glyr # + a command-line tool and library to download various sort of music related metadata. # + Copyright (C) [2011-2012] [Christopher Pahl] # + Hosted at: https://github.com/sahib/glyr # # glyr is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # glyr is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with glyr. If not, see <http://www.gnu.org/licenses/>. ################################################################# #!/usr/bin/env python # encoding: utf-8 from tests.__common__ import * not_found_options = { 'get_type': 'artistphoto', 'artist': 'HorseTheBand', 'album': 'Given, but not used.', 'title': 'Accidentally given' } TESTCASES = [{ # {{{ 'name': 'bbcmusic', 'data': [{ 'options': { 'get_type': 'artistphoto', 'artist': 'The Rolling Stones' }, 'expect': len_greater_0 }, { 'options': not_found_options, 'expect': len_equal_0 }], }, { # }}} # {{{ 'name': 'discogs', 'data': [{ 'options': { 'get_type': 'artistphoto', 'artist': 'Nirvana' }, 'expect': len_greater_0 }, { 'options': not_found_options, 'expect': len_equal_0 }], }, { # }}} # {{{ 'name': 'flickr', 'data': [{ 'options': { 'get_type': 'artistphoto', 'artist': 'Die Ärzte' }, 'expect': len_greater_0 }, { 'options': not_found_options, 'expect': len_equal_0 }], }, { # }}} # {{{ 'name': 'google', 'data': [{ 'options': { 'get_type': 'artistphoto', 'artist': 'DeVildRiVeR' }, 'expect': len_greater_0 }, { 'options': not_found_options, 'expect': len_equal_0 }], }, { # }}} # {{{ 'name': 'lastfm',<|fim▁hole|> }, 'expect': len_greater_0 }, { 'options': not_found_options, 'expect': len_equal_0 }], }, { # }}} # {{{ 'name': 'singerpictures', 'data': [{ 'options': { 'get_type': 'artistphoto', 'artist': 'Equilibrium' }, 'expect': len_greater_0 }, { 'options': not_found_options, 'expect': len_equal_0 }], }, { # }}} # {{{ 'name': 'rhapsody', 'data': [{ 'options': { 'get_type': 'artistphoto', 'artist': 'In Flames' }, 'expect': len_greater_0 }, { 'options': not_found_options, 'expect': len_equal_0 }], } ]<|fim▁end|>
'data': [{ 'options': { 'get_type': 'artistphoto', 'artist': 'Alestorm'
<|file_name|>hexcodes.json.d.ts<|end_file_name|><|fim▁begin|>import { HexcodesDataset } from 'emojibase'; <|fim▁hole|>declare const data: HexcodesDataset; export default data;<|fim▁end|>
<|file_name|>LinkCurrentWindowExample.py<|end_file_name|><|fim▁begin|>from muntjac.api import VerticalLayout, Link from muntjac.terminal.theme_resource import ThemeResource from muntjac.terminal.external_resource import ExternalResource class LinkCurrentWindowExample(VerticalLayout): _CAPTION = 'Open Google' _TOOLTIP = 'http://www.google.com' _ICON = ThemeResource('../sampler/icons/icon_world.gif') <|fim▁hole|> def __init__(self): super(LinkCurrentWindowExample, self).__init__() self.setSpacing(True) # Link w/ text and tooltip l = Link(self._CAPTION, ExternalResource('http://www.google.com')) l.setDescription(self._TOOLTIP) self.addComponent(l) # Link w/ text, icon and tooltip l = Link(self._CAPTION, ExternalResource('http://www.google.com')) l.setDescription(self._TOOLTIP) l.setIcon(self._ICON) self.addComponent(l) # Link w/ icon and tooltip l = Link() l.setResource(ExternalResource('http://www.google.com')) l.setDescription(self._TOOLTIP) l.setIcon(self._ICON) self.addComponent(l)<|fim▁end|>
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #![deny(unsafe_code)]<|fim▁hole|>extern crate log; extern crate msg; extern crate script_traits; extern crate servo_config; extern crate webrender_api; extern crate webvr_traits; mod webvr_thread; pub use webvr_thread::{WebVRThread, WebVRCompositorHandler};<|fim▁end|>
extern crate ipc_channel; #[macro_use]