prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>PRESUBMIT.py<|end_file_name|><|fim▁begin|># Copyright 2017 the V8 project authors. All rights reserved.<|fim▁hole|># found in the LICENSE file. def CheckChangeOnCommit(input_api, output_api): tests = input_api.canned_checks.GetUnitTestsInDirectory( input_api, output_api, '.', files_to_check=['test_scripts.py$']) return input_api.RunTests(tests)<|fim▁end|>
# Use of this source code is governed by a BSD-style license that can be
<|file_name|>fn-coerce-field.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. struct r<'a> { field: ||: 'a,<|fim▁hole|>pub fn main() { fn f() {} let _i: r = r {field: f}; }<|fim▁end|>
}
<|file_name|>RangeFilter.js<|end_file_name|><|fim▁begin|>define(function() { "use strict"; function RangeFilter(columnKey, isMinEnabled, minValue, isMaxEnabled, maxValue) { InputValidator.validateNotNull("columnKey", columnKey); InputValidator.validateNotNull("isMinEnabled", isMinEnabled); InputValidator.validateNotNull("minValue", minValue); InputValidator.validateNotNull("isMaxEnabled", isMaxEnabled); InputValidator.validateNotNull("maxValue", maxValue); this.columnKey = function() { return columnKey; }; this.isMinEnabled = function() { return isMinEnabled; }; this.minValue = function() { return minValue; }; this.isMaxEnabled = function() { return isMaxEnabled; }; this.maxValue = function() { return maxValue; }; this.passes = function(data) { InputValidator.validateNotNull("data", data); var value = data[columnKey]; return (!isMinEnabled || minValue <= value) && (!isMaxEnabled || value <= maxValue); }; this.toObject = function() { return ( { type: "RangeFilter", columnKey: columnKey, isMinEnabled: isMinEnabled,<|fim▁hole|> isMaxEnabled: isMaxEnabled, maxValue: maxValue, }); }; this.toString = function() { return "RangeFilter (" + isMinEnabled + " " + minValue + "\u2264" + columnKey + "\u2264" + isMaxEnabled + " " + maxValue + ")"; }; } RangeFilter.fromObject = function(object) { InputValidator.validateNotNull("object", object); var columnKey = object.columnKey; var isMinEnabled = object.isMinEnabled; var minValue = object.minValue; var isMaxEnabled = object.isMaxEnabled; var maxValue = object.maxValue; return new RangeFilter(columnKey, isMinEnabled, minValue, isMaxEnabled, maxValue); }; return RangeFilter; });<|fim▁end|>
minValue: minValue,
<|file_name|>stack.py<|end_file_name|><|fim▁begin|>import copy import mufsim.utils as util import mufsim.gamedb as db import mufsim.stackitems as si from mufsim.errors import MufRuntimeError from mufsim.insts.base import Instruction, instr class InstPushItem(Instruction): value = 0 def __init__(self, line, val): self.value = val super(InstPushItem, self).__init__(line) def execute(self, fr): fr.data_push(self.value) def __str__(self): return si.item_repr(self.value) class InstGlobalVar(Instruction): varnum = 0 varname = 0 def __init__(self, line, vnum, vname): self.varnum = vnum self.varname = vname super(InstGlobalVar, self).__init__(line) def execute(self, fr): fr.data_push(si.GlobalVar(self.varnum)) def __str__(self): return "LV%d: %s" % (self.varnum, self.varname) class InstFuncVar(Instruction): varnum = 0 varname = 0 def __init__(self, line, vnum, vname): self.varnum = vnum self.varname = vname super(InstFuncVar, self).__init__(line) def execute(self, fr): fr.data_push(si.FuncVar(self.varnum)) def __str__(self): return "SV%d: %s" % (self.varnum, self.varname) @instr("secure_sysvars") class InstSecureSysvars(Instruction): def execute(self, fr): fr.globalvar_set(0, fr.user) fr.globalvar_set(1, si.DBRef(db.getobj(fr.user).location)) fr.globalvar_set(2, fr.trigger) fr.globalvar_set(3, fr.command) @instr("!") class InstBang(Instruction): def execute(self, fr): fr.check_underflow(2) v = fr.data_pop(si.GlobalVar, si.FuncVar) val = fr.data_pop() if isinstance(v, si.GlobalVar): fr.globalvar_set(v.value, val) elif isinstance(v, si.FuncVar): fr.funcvar_set(v.value, val) def __str__(self): return "!" @instr("@") class InstAt(Instruction): def execute(self, fr): v = fr.data_pop(si.GlobalVar, si.FuncVar) if isinstance(v, si.GlobalVar): val = fr.globalvar_get(v.value) fr.data_push(val) elif isinstance(v, si.FuncVar): val = fr.funcvar_get(v.value) fr.data_push(val) def __str__(self): return "@" @instr("dup") class InstDup(Instruction): def execute(self, fr): a = fr.data_pop() fr.data_push(a) fr.data_push(a) @instr("shallow_copy") class InstShallowCopy(Instruction): def execute(self, fr): a = fr.data_pop() fr.data_push(a) fr.data_push(copy.copy(a)) @instr("deep_copy") class InstDeepCopy(Instruction): def execute(self, fr): a = fr.data_pop() fr.data_push(a) fr.data_push(copy.deepcopy(a)) @instr("?dup") class InstQDup(Instruction): def execute(self, fr): a = fr.data_pop() if isinstance(a, si.DBRef): if a.value != -1: fr.data_push(a) elif a: fr.data_push(a) fr.data_push(a) @instr("dupn") class InstDupN(Instruction): def execute(self, fr): n = fr.data_pop(int) fr.check_underflow(n) for i in range(n): fr.data_push(fr.data_pick(n)) @instr("ldup") class InstLDup(Instruction): def execute(self, fr): n = fr.data_pick(1) if not isinstance(n, int): raise MufRuntimeError("Expected integer argument.") n += 1 fr.check_underflow(n) for i in range(n): fr.data_push(fr.data_pick(n)) @instr("pop") class InstPop(Instruction): def execute(self, fr): fr.data_pop() @instr("popn") class InstPopN(Instruction): def execute(self, fr): n = fr.data_pop(int) fr.check_underflow(n) for i in range(n): fr.data_pop() @instr("swap") class InstSwap(Instruction): def execute(self, fr): fr.check_underflow(2) b = fr.data_pop() a = fr.data_pop() fr.data_push(b) fr.data_push(a) @instr("rot") class InstRot(Instruction): def execute(self, fr): fr.check_underflow(3) a = fr.data_pull(3) fr.data_push(a) @instr("-rot") class InstNegRot(Instruction): def execute(self, fr): fr.check_underflow(3) c = fr.data_pop() b = fr.data_pop() a = fr.data_pop() fr.data_push(c) fr.data_push(a) fr.data_push(b) @instr("rotate") class InstRotate(Instruction): def execute(self, fr): num = fr.data_pop(int) fr.check_underflow(num) if not num: return if num < 0: a = fr.data_pop() fr.data_insert((-num) - 1, a) elif num > 0: a = fr.data_pull(num) fr.data_push(a) @instr("pick") class InstPick(Instruction): def execute(self, fr): num = fr.data_pop(int) fr.check_underflow(num) if not num: return if num < 0: raise MufRuntimeError("Expected positive integer.") else: a = fr.data_pick(num) fr.data_push(a) @instr("over") class InstOver(Instruction): def execute(self, fr): fr.check_underflow(2) a = fr.data_pick(2) fr.data_push(a) @instr("put") class InstPut(Instruction): def execute(self, fr): fr.check_underflow(2) num = fr.data_pop(int) val = fr.data_pop() fr.check_underflow(num) if not num: return if num < 0: raise MufRuntimeError("Value out of range") else: fr.data_put(num, val) @instr("nip") class InstNip(Instruction): def execute(self, fr): fr.check_underflow(3) b = fr.data_pop() a = fr.data_pop() fr.data_push(b) @instr("tuck") class InstTuck(Instruction): def execute(self, fr): fr.check_underflow(3) b = fr.data_pop() a = fr.data_pop() fr.data_push(b) fr.data_push(a) fr.data_push(b) @instr("reverse") class InstReverse(Instruction): def execute(self, fr): num = fr.data_pop(int) fr.check_underflow(num) if not num: return arr = [fr.data_pop() for i in range(num)] for val in arr: fr.data_push(val) <|fim▁hole|> num = fr.data_pop(int) fr.check_underflow(num) if not num: return arr = [fr.data_pop() for i in range(num)] for val in arr: fr.data_push(val) fr.data_push(num) @instr("{") class InstMark(Instruction): def execute(self, fr): fr.data_push(si.Mark()) @instr("}") class InstMarkCount(Instruction): def execute(self, fr): for i in range(fr.data_depth()): a = fr.data_pick(i + 1) if isinstance(a, si.Mark): fr.data_pull(i + 1) fr.data_push(i) return raise MufRuntimeError("StackUnderflow") @instr("depth") class InstDepth(Instruction): def execute(self, fr): fr.data_push(fr.data_depth()) @instr("fulldepth") class InstFullDepth(Instruction): def execute(self, fr): fr.data_push(fr.data_full_depth()) @instr("variable") class InstVariable(Instruction): def execute(self, fr): vnum = fr.data_pop(int) fr.data_push(si.GlobalVar(vnum)) @instr("localvar") class InstLocalVar(Instruction): def execute(self, fr): vnum = fr.data_pop(int) fr.data_push(si.GlobalVar(vnum)) @instr("caller") class InstCaller(Instruction): def execute(self, fr): fr.data_push(fr.caller_get()) @instr("prog") class InstProg(Instruction): def execute(self, fr): fr.data_push(fr.program) @instr("trig") class InstTrig(Instruction): def execute(self, fr): fr.data_push(fr.trigger) @instr("cmd") class InstCmd(Instruction): def execute(self, fr): fr.data_push(fr.command) @instr("checkargs") class InstCheckArgs(Instruction): itemtypes = { 'a': ([si.Address], "address"), 'd': ([si.DBRef], "dbref"), 'D': ([si.DBRef], "valid object dbref"), 'e': ([si.DBRef], "exit dbref"), 'E': ([si.DBRef], "valid exit dbref"), 'f': ([si.DBRef], "program dbref"), 'F': ([si.DBRef], "valid program dbref"), 'i': ([int], "integer"), 'l': ([si.Lock], "lock"), 'p': ([si.DBRef], "player dbref"), 'P': ([si.DBRef], "valid player dbref"), 'r': ([si.DBRef], "room dbref"), 'R': ([si.DBRef], "valid room dbref"), 's': ([str], "string"), 'S': ([str], "non-null string"), 't': ([si.DBRef], "thing dbref"), 'T': ([si.DBRef], "valid thing dbref"), 'v': ([si.GlobalVar, si.FuncVar], "variable"), '?': ([], "any"), } objtypes = { 'D': "", 'P': "player", 'R': "room", 'T': "thing", 'E': "exit", 'F': "program", } def checkargs_part(self, fr, fmt, depth=1): count = "" pos = len(fmt) - 1 while pos >= 0: ch = fmt[pos] pos -= 1 if ch == " ": continue elif util.is_int(ch): count = ch + count continue elif ch == "}": newpos = pos cnt = 1 if not count else int(count) for i in range(cnt): val = fr.data_pick(depth) depth += 1 fr.check_type(val, [int]) for j in range(val): newpos, depth = self.checkargs_part( fr, fmt[:pos + 1], depth) pos = newpos count = "" elif ch == "{": return (pos, depth) elif ch in self.itemtypes: cnt = 1 if not count else int(count) count = "" for i in range(cnt): val = fr.data_pick(depth) depth += 1 types, label = self.itemtypes[ch] fr.check_type(val, types) if ch == "S" and val == "": raise MufRuntimeError( "Expected %s at depth %d" % (label, depth)) if si.DBRef in types: typ = self.objtypes[ch.upper()] if ( not db.validobj(val) and ch.isupper() ) or ( db.validobj(val) and typ and db.getobj(val).objtype != typ ): raise MufRuntimeError( "Expected %s at depth %d" % (label, depth)) def execute(self, fr): argexp = fr.data_pop(str) self.checkargs_part(fr, argexp) # vim: expandtab tabstop=4 shiftwidth=4 softtabstop=4 nowrap<|fim▁end|>
@instr("lreverse") class InstLReverse(Instruction): def execute(self, fr):
<|file_name|>fix_tb_basedon.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- """ Finds revisions from the Thunderbird migration that don't have based_on set correctly, and are still relavent, and fixes that. Run this script like `./manage.py runscript fix_tb_basedon`. """ import sys from traceback import print_exc <|fim▁hole|>from kitsune.wiki.models import Document, Revision def run(): try: run_() except Exception: print_exc() raise class Progress(): def __init__(self, total): self.current = 0 self.total = total def tick(self, incr=1): self.current += incr self.draw() def draw(self): self._wr('{0.current} / {0.total}\r'.format(self)) def _wr(self, s): sys.stdout.write(s) sys.stdout.flush() def run_(): to_process = list(Document.objects.filter( ~Q(parent=None), current_revision__based_on=None, products__slug='thunderbird')) if len(to_process) == 0: print 'Nothing to do.' prog = Progress(len(to_process)) for doc in to_process: prog.tick() oldest_parent_rev = (Revision.objects.filter(document=doc.parent) .order_by('id')[0]) # It has localizations, clearly it should be localizable. if not doc.parent.is_localizable: doc.parent.is_localizable = True doc.parent.save() doc.current_revision.based_on = oldest_parent_rev doc.current_revision.save()<|fim▁end|>
from django.db.models import Q
<|file_name|>wrap-actions.js<|end_file_name|><|fim▁begin|>import getCompletions from "./get-completions" <|fim▁hole|>export const addAutosuggestionCompleters = (ori, system) => (context) => { return ori(context).concat([{ getCompletions(...args) { // Add `context`, then `system` as the last args return getCompletions(...args, context, system) } }]) }<|fim▁end|>
// Add an autosuggest completer
<|file_name|>main.loading.js<|end_file_name|><|fim▁begin|>function paddAppendClear() { jQuery('.append-clear').append('<div class="clear"></div>'); } function paddWrapInner1() { jQuery('.wrap-inner-1').wrapInner('<div class="inner"></div>'); } function paddWrapInner3() { jQuery('.wrap-inner-3').wrapInner('<div class="m"></div>'); jQuery('.wrap-inner-3').prepend('<div class="t"></div>'); jQuery('.wrap-inner-3').append('<div class="b"></div>'); } function paddToggle(classname,value) { jQuery(classname).focus(function() { if (value == jQuery(classname).val()) { jQuery(this).val(''); } }); jQuery(classname).blur(function() { if ('' == jQuery(classname).val()) { jQuery(this).val(value); }<|fim▁hole|>jQuery(document).ready(function() { jQuery.noConflict(); jQuery('div#menubar div > ul').superfish({ hoverClass: 'hover', speed: 500, animation: { opacity: 'show', height: 'show' } }); paddAppendClear(); paddWrapInner1(); paddWrapInner3(); jQuery('p.older-articles').titleBoxShadow('#ebebeb'); jQuery('.hentry-large .title').titleBoxShadow('#ebebeb'); jQuery('.hentry-large .thumbnail img').imageBoxShadow('#ebebeb'); jQuery('input#s').val('Search this site'); paddToggle('input#s','Search this site'); jQuery('div.search form').click(function () { jQuery('input#s').focus(); }); });<|fim▁end|>
}); }
<|file_name|>dev.go<|end_file_name|><|fim▁begin|>package build import ( "bytes" "context" "errors" "io" "strings" "time" "github.com/bblfsh/sdk/v3/driver" "github.com/bblfsh/sdk/v3/internal/docker" "github.com/bblfsh/sdk/v3/protocol" "google.golang.org/grpc" protocol1 "gopkg.in/bblfsh/sdk.v1/protocol" ) const ( cliPort = "9432" dockerSchema = "docker-daemon:" ) type ServerInstance struct { cli *docker.Client user *grpc.ClientConn bblfshd *docker.Container } <|fim▁hole|> id += ":latest" } cmd := []string{"bblfshctl", "driver", "install", lang, dockerSchema + id} printCommand("docker", append([]string{"exec", id}, cmd...)...) e, err := d.cli.CreateExec(docker.CreateExecOptions{ Context: ctx, Container: d.bblfshd.ID, AttachStdout: true, AttachStderr: true, Cmd: cmd, }) if err != nil { return err } buf := bytes.NewBuffer(nil) err = d.cli.StartExec(e.ID, docker.StartExecOptions{ Context: ctx, OutputStream: buf, ErrorStream: buf, }) if err != nil { return err } else if str := buf.String(); strings.Contains(strings.ToLower(str), "error") { return errors.New(strings.TrimSpace(str)) } return nil } func (d *ServerInstance) ClientV1(ctx context.Context) (protocol1.ProtocolServiceClient, error) { if d.user == nil { addr := d.bblfshd.NetworkSettings.IPAddress conn, err := grpc.DialContext(ctx, addr+":"+cliPort, grpc.WithInsecure(), grpc.WithBlock()) if err != nil { return nil, err } d.user = conn } return protocol1.NewProtocolServiceClient(d.user), nil } func (d *ServerInstance) ClientV2(ctx context.Context) (driver.Driver, error) { if d.user == nil { addr := d.bblfshd.NetworkSettings.IPAddress conn, err := grpc.DialContext(ctx, addr+":"+cliPort, grpc.WithInsecure(), grpc.WithBlock()) if err != nil { return nil, err } d.user = conn } return protocol.AsDriver(d.user), nil } func (s *ServerInstance) DumpLogs(w io.Writer) error { return getLogs(s.cli, s.bblfshd.ID, w) } func (d *ServerInstance) Close() error { if d.user != nil { _ = d.user.Close() } return d.cli.RemoveContainer(docker.RemoveContainerOptions{ ID: d.bblfshd.ID, Force: true, }) } // RunWithDriver starts a bblfshd server and installs a specified driver to it. func RunWithDriver(bblfshdVers, lang, id string) (*ServerInstance, error) { cli, err := docker.Dial() if err != nil { return nil, err } const ( bblfshd = "bblfsh/bblfshd" // needed to install driver from Docker instance sock = docker.Socket + ":" + docker.Socket ) image := bblfshd if bblfshdVers != "" { image += ":" + bblfshdVers } printCommand("docker", "run", "--rm", "--privileged", "-v", sock, image) c, err := docker.Run(cli, docker.CreateContainerOptions{ Config: &docker.Config{ Image: image, }, HostConfig: &docker.HostConfig{ AutoRemove: true, Privileged: true, Binds: []string{sock}, }, }) if err != nil { return nil, err } s := &ServerInstance{cli: cli, bblfshd: c} ctx, cancel := context.WithTimeout(context.Background(), time.Minute*3) defer cancel() if err := s.installFromDocker(ctx, lang, id); err != nil { s.Close() return nil, err } return s, nil } func getLogs(cli *docker.Client, id string, w io.Writer) error { return cli.AttachToContainer(docker.AttachToContainerOptions{ Container: id, OutputStream: w, ErrorStream: w, Logs: true, Stdout: true, Stderr: true, }) }<|fim▁end|>
func (d *ServerInstance) installFromDocker(ctx context.Context, lang, id string) error { if !strings.Contains(id, ":") {
<|file_name|>Reducer.js<|end_file_name|><|fim▁begin|>import InputValidator from "../../common/js/InputValidator.js"; import ObjectUtilities from "../../common/js/ObjectUtilities.js"; import Action from "./Action.js"; import DefaultFilters from "./DefaultFilters.js"; import InitialState from "./InitialState.js"; var Reducer = {}; Reducer.root = function(state, action) { LOGGER.debug("root() type = " + action.type); if (typeof state === 'undefined') { return new InitialState(); } var newFilters, newFilteredTableRow; switch (action.type) { case Action.REMOVE_FILTERS: newFilteredTableRow = []; newFilteredTableRow = newFilteredTableRow.concat(state.tableRows); return Object.assign( {}, state, { filteredTableRows: newFilteredTableRow, }); case Action.SET_DEFAULT_FILTERS: newFilters = DefaultFilters.create(); return Object.assign( {}, state, { filters: newFilters, });<|fim▁hole|> case Action.SET_FILTERS: LOGGER.debug("Reducer filters = "); Object.getOwnPropertyNames(action.filters).forEach(function(propertyName) { LOGGER.debug(propertyName + ": " + action.filters[propertyName]); }); newFilters = Object.assign( {}, state.filters); newFilters = ObjectUtilities.merge(newFilters, action.filters); newFilteredTableRow = Reducer.filterTableRow(state.tableRows, newFilters); Reducer.saveToLocalStorage(newFilters); return Object.assign( {}, state, { filters: newFilters, filteredTableRows: newFilteredTableRow, }); case Action.TOGGLE_FILTER_SHOWN: return Object.assign( {}, state, { isFilterShown: !state.isFilterShown, }); default: LOGGER.warn("Reducer.root: Unhandled action type: " + action.type); return state; } }; Reducer.filterTableRow = function(tableRows, filters) { InputValidator.validateNotNull("tableRows", tableRows); InputValidator.validateNotNull("filters", filters); var answer = []; tableRows.forEach(function(data) { if (Reducer.passes(data, filters)) { answer.push(data); } }); return answer; }; Reducer.passes = function(data, filters) { InputValidator.validateNotNull("data", data); InputValidator.validateNotNull("filters", filters); var answer = true; var propertyNames = Object.getOwnPropertyNames(filters); for (var i = 0; i < propertyNames.length; i++) { var propertyName = propertyNames[i]; var filter = filters[propertyName]; if (!filter.passes(data)) { answer = false; break; } } return answer; }; Reducer.saveToLocalStorage = function(filters) { InputValidator.validateNotNull("filters", filters); var filterObjects = []; Object.getOwnPropertyNames(filters).forEach(function(columnKey) { var filter = filters[columnKey]; filterObjects.push(filter.toObject()); }); localStorage.filters = JSON.stringify(filterObjects); }; export default Reducer;<|fim▁end|>
<|file_name|>minimizebutton.cpp<|end_file_name|><|fim▁begin|>// // This source file is part of appleseed. // Visit http://appleseedhq.net/ for additional information and resources. // // This software is released under the MIT license. // // Copyright (c) 2014-2015 Marius Avram, The appleseedhq Organization // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // // Interface header. #include "minimizebutton.h" // Qt headers. #include <QAction> #include <QDockWidget> #include <QMouseEvent> #include <QStyle> namespace appleseed { namespace studio { // // MinimizeButton class implementation. // MinimizeButton::MinimizeButton(QDockWidget* dock_widget, QWidget* parent) : QPushButton(dock_widget->windowTitle(), parent) , m_dock_widget(dock_widget) , m_on(true) , m_minimized(false) { setObjectName("toggle_button_on"); connect( m_dock_widget->toggleViewAction(), SIGNAL(toggled(bool)), SLOT(slot_minimize())); } bool MinimizeButton::is_on() const { return m_on; } void MinimizeButton::set_fullscreen(const bool on) { if (on) { // Setting fullscreen on. m_minimized = m_on; if (!m_on)<|fim▁hole|> else { // Deactivating fullscreen. Keep state before fullscreen. if (!m_minimized) m_dock_widget->toggleViewAction()->activate(QAction::Trigger); } } void MinimizeButton::mousePressEvent(QMouseEvent* event) { if (event->buttons() & Qt::LeftButton) m_dock_widget->toggleViewAction()->activate(QAction::Trigger); } void MinimizeButton::slot_minimize() { m_on = !m_on; setObjectName(m_on ? "toggle_button_on" : "toggle_button_off"); // Force stylesheet reloading for this widget. style()->unpolish(this); style()->polish(this); } } // namespace studio } // namespace appleseed<|fim▁end|>
m_dock_widget->toggleViewAction()->activate(QAction::Trigger); }
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! A Rust library for allocation-limited computation of the Discrete Cosine Transform. //! //! 1D DCTs are allocation-free but 2D requires allocation. //! //! Features: //! //! * `simd`: use SIMD types to speed computation (2D DCT only) //! * `cos-approx`: use a Taylor series approximation of cosine instead of the stdlib //! implementation (which is usually much slower but also higher precision) use std::f64::consts::{PI, SQRT_2}; use std::ops::Range; /// An allocation-free one-dimensional Discrete Cosine Transform. /// /// Each iteration produces the next DCT value in the sequence. #[derive(Clone, Debug)] pub struct DCT1D<'a> { data: &'a [f64], curr: Range<usize>, } impl<'a> DCT1D<'a> { /// Create a new DCT 1D adaptor from a 1D vector of data. pub fn new(data: &[f64]) -> DCT1D { let curr = 0 .. data.len(); DCT1D { data: data, curr: curr, } } // Converted from the C implementation here: // http://unix4lyfe.org/dct/listing2.c // Source page: // http://unix4lyfe.org/dct/ (Accessed 8/10/2014) fn next_dct_val(&mut self) -> Option<f64> { self.curr.next().map(|u| { let mut z = 0.0; let data_len = self.data.len(); for (x_idx, &x) in self.data.iter().enumerate() { z += x * cos( PI * u as f64 * (2 * x_idx + 1) as f64 / (2 * data_len) as f64 ); } if u == 0 { z *= 1.0 / SQRT_2; } z / 2.0 }) } } impl<'a> Iterator for DCT1D<'a> { type Item = f64; fn next(&mut self) -> Option<f64> { self.next_dct_val() } } /// An implementation of cosine that switches to a Taylor-series approximation when throughput is /// preferred over precision. #[inline(always)] pub fn cos(x: f64) -> f64 { // This branch should be optimized out. if cfg!(feature = "cos-approx") { // Normalize to [0, pi] or else the Taylor series spits out very wrong results. let x = (x.abs() + PI) % (2.0 * PI) - PI; // Approximate the cosine of `val` using a 4-term Taylor series. // Can be expanded for higher precision. let x2 = x.powi(2); let x4 = x.powi(4); let x6 = x.powi(6); let x8 = x.powi(8); 1.0 - (x2 / 2.0) + (x4 / 24.0) - (x6 / 720.0) + (x8 / 40320.0) } else { x.cos() } } /// Perform a 2D DCT on a 1D-packed vector with a given rowstride. /// /// E.g. a vector of length 9 with a rowstride of 3 will be processed as a 3x3 matrix. /// /// Returns a vector of the same size packed in the same way. pub fn dct_2d(packed_2d: &[f64], rowstride: usize) -> Vec<f64> { assert_eq!(packed_2d.len() % rowstride, 0); let mut row_dct: Vec<f64> = packed_2d .chunks(rowstride) .flat_map(DCT1D::new) .collect(); swap_rows_columns(&mut row_dct, rowstride); let mut column_dct: Vec<f64> = packed_2d .chunks(rowstride) .flat_map(DCT1D::new) .collect(); swap_rows_columns(&mut column_dct, rowstride); column_dct } fn swap_rows_columns(data: &mut [f64], rowstride: usize) { let height = data.len() / rowstride; for y in 0 .. height { for x in 0 .. rowstride { data.swap(y * rowstride + x, x * rowstride + y); } } } #[cfg_attr(all(test, feature = "cos-approx"), test)] #[cfg_attr(not(all(test, feature = "cos-approx")), allow(dead_code))] fn test_cos_approx() { const ERROR: f64 = 0.05; fn test_cos_approx(x: f64) { let approx = cos(x); let cos = x.cos(); assert!( approx.abs_sub(x.cos()) <= ERROR, "Approximation cos({x}) = {approx} was outside a tolerance of {error}; control value: {cos}", x = x, approx = approx, error = ERROR, cos = cos, ); } let test_values = [PI, PI / 2.0, PI / 4.0, 1.0, -1.0, 2.0 * PI, 3.0 * PI, 4.0 / 3.0 * PI]; for &x in &test_values {<|fim▁hole|>} /* #[cfg(feature = "simd")] mod dct_simd { use simdty::f64x2; use std::f64::consts::{PI, SQRT_2}; macro_rules! valx2 ( ($val:expr) => ( ::simdty::f64x2($val, $val) ) ); const PI: f64x2 = valx2!(PI); const ONE_DIV_SQRT_2: f64x2 = valx2!(1 / SQRT_2); const SQRT_2: f64x2 = valx2!(SQRT_2); pub dct_rows(vals: &[Vec<f64>]) -> Vec<Vec<f64>> { let mut out = Vec::with_capacity(vals.len()); for pair in vals.iter().chunks(2) { if pair.len() == 2 { let vals = pair[0].iter().cloned().zip(pair[1].iter().cloned()) .map(f64x2) .collect(); dct_1dx2(vals); } } fn dct_1dx2(vec: Vec<f64x2>) -> Vec<f64x2> { let mut out = Vec::with_capacity(vec.len()); for u in 0 .. vec.len() { let mut z = valx2!(0.0); for x in 0 .. vec.len() { z += vec[x] * cos_approx( PI * valx2!( u as f64 * (2 * x + 1) as f64 / (2 * vec.len()) as f64 ) ); } if u == 0 { z *= ONE_DIV_SQRT_2; } out.insert(u, z / valx2!(2.0)); } out } fn cos_approx(x2: f64x2) -> f64x2 { #[inline(always)] fn powi(val: f64x2, pow: i32) -> f64x2 { unsafe { llvmint::powi_v2f64(val, pow) } } let x2 = powi(val, 2); let x4 = powi(val, 4); let x6 = powi(val, 6); let x8 = powi(val, 8); valx2!(1.0) - (x2 / valx2!(2.0)) + (x4 / valx2!(24.0)) - (x6 / valx2!(720.0)) + (x8 / valx2!(40320.0)) } } */<|fim▁end|>
test_cos_approx(x); test_cos_approx(-x); }
<|file_name|>MemoryOffsetBackingStore.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.connect.storage; import org.apache.kafka.connect.errors.ConnectException; import org.apache.kafka.connect.runtime.WorkerConfig; import org.apache.kafka.connect.util.Callback; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.nio.ByteBuffer; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; /** * Implementation of OffsetBackingStore that doesn't actually persist any data. To ensure this * behaves similarly to a real backing store, operations are executed asynchronously on a * background thread. */ public class MemoryOffsetBackingStore implements OffsetBackingStore { private static final Logger log = LoggerFactory.getLogger(MemoryOffsetBackingStore.class); protected Map<ByteBuffer, ByteBuffer> data = new HashMap<>(); protected ExecutorService executor; public MemoryOffsetBackingStore() { } @Override public void configure(WorkerConfig config) { } @Override public void start() { executor = Executors.newSingleThreadExecutor(); } @Override public void stop() { if (executor != null) { executor.shutdown(); // Best effort wait for any get() and set() tasks (and caller's callbacks) to complete. try { executor.awaitTermination(30, TimeUnit.SECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } if (!executor.shutdownNow().isEmpty()) { throw new ConnectException("Failed to stop MemoryOffsetBackingStore. Exiting without cleanly " + "shutting down pending tasks and/or callbacks."); } executor = null; } } @Override public Future<Map<ByteBuffer, ByteBuffer>> get( final Collection<ByteBuffer> keys, final Callback<Map<ByteBuffer, ByteBuffer>> callback) { return executor.submit(new Callable<Map<ByteBuffer, ByteBuffer>>() { @Override public Map<ByteBuffer, ByteBuffer> call() throws Exception { Map<ByteBuffer, ByteBuffer> result = new HashMap<>(); for (ByteBuffer key : keys) { result.put(key, data.get(key)); } if (callback != null) callback.onCompletion(null, result); return result; } }); } @Override public Future<Void> set(final Map<ByteBuffer, ByteBuffer> values, final Callback<Void> callback) { return executor.submit(new Callable<Void>() { @Override public Void call() throws Exception { for (Map.Entry<ByteBuffer, ByteBuffer> entry : values.entrySet()) { data.put(entry.getKey(), entry.getValue()); } save(); if (callback != null) callback.onCompletion(null, null); return null; } }); }<|fim▁hole|> // Hook to allow subclasses to persist data protected void save() { } }<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>""" scraping the utility functions for the actual web scraping """ import ssl import datetime import requests import re # this is the endpoint that my new version of this program will # abuse with possible store ids. this is a much more reliable "darts at the wall" # technique than the previous location-based one QUERY_URL = "https://www.wawa.com/Handlers/LocationByStoreNumber.ashx" # from testing, I have confirmed certain "series" of store IDs # 0000 series are all old stores in PA, NJ, MD, DE, and VA # 5000 series are all stores in FL # 8000 series are all new stores in PA, NJ, MD, DE, and VA POSSIBLE_STORE_NUMS = list(range(5000, 6000)) POSSIBLE_STORE_NUMS.extend(list(range(0, 1000))) POSSIBLE_STORE_NUMS.extend(list(range(8000, 9000))) # currently only tracking these gas types to keep a consistent csv schema. # other types are not consistent across all wawas GAS_TYPES = ["diesel", "plus", "unleaded", "premium"] def parse_gas_prices(in_location): """ Breaks open the json for the gas prices :param in_location: The Wawa location we are looking at (dict) :return: The gas price info (dict) """ out_data = {} try: fuel_data = in_location["fuelTypes"] for ft in fuel_data: lowered = ft["description"].lower() if lowered in GAS_TYPES: out_data[lowered + "_price"] = ft["price"] # no gas sold at this Wawa except KeyError: for gt in GAS_TYPES: out_data[gt + "_price"] = "" return out_data<|fim▁hole|> Basic function that converts a camel-cased word to use underscores :param in_string: The camel-cased string (str) :return: The underscore'd string (str) """ s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', in_string) return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() def parse_amenities(in_location): """ Breaks open the json for the amenities offered at the Wawa location :param in_location: The Wawa location (dict) :return: The amenity info (dict) """ out_data = {} for amenity, value in in_location["amenities"].items(): out_data["has_" + camel_to_underscore(amenity).lower()] = value return out_data def get_addresses(in_location): """ Parses info for the Wawa address and coordinates :param in_location: The Wawa location (dict) :return: The address and coordincate info (dict) """ friendly = in_location["addresses"][0] physical = in_location["addresses"][1] out_friendly = { "address": friendly["address"], "city": friendly["city"], "state": friendly["state"], "zip": friendly["zip"] } out_physical = { "longitude": physical["loc"][1], "latitude": physical["loc"][0], } return {"address": out_friendly, "coordinates": out_physical} def get_wawa_data(limit=None): """ Hits the store number url endpoint to pull down Wawa locations and parse each one's information. We don't know the store numbers as there is not list of store numbers. Through testing I was able to narrow down "series" of store numbers, so we iterate through ranges of possible store numbers, skipping any 404 errors (invalid store id responses returned by url calls). :param limit: A cap on the number of Wawa results returned (int) (optional) :return: Parsed Wawa information (list<dict>) """ ssl._create_default_https_context = ssl._create_unverified_context output = [] for i in POSSIBLE_STORE_NUMS: response = requests.get(QUERY_URL, params={"storeNumber": i}) if response.status_code != 404: location = response.json() geographic_data = get_addresses(location) address = geographic_data["address"] coordinates = geographic_data["coordinates"] gas_prices = parse_gas_prices(location) amenities = parse_amenities(location) this_location_output = { "has_menu": location["hasMenu"], "last_updated": datetime.datetime.strptime(location["lastUpdated"], "%m/%d/%Y %I:%M %p"), "location_id": location["locationID"], "open_24_hours": location["open24Hours"], "regional_director": location["regionalDirector"], "store_close": location["storeClose"], "store_name": location["storeName"], "store_number": location["storeNumber"], "store_open": location["storeOpen"], "telephone": location["telephone"] } this_location_output = {**this_location_output, **address} this_location_output = {**this_location_output, **coordinates} this_location_output = {**this_location_output, **gas_prices} this_location_output = {**this_location_output, **amenities} output.append(this_location_output) if limit and len(output) == limit: break return output<|fim▁end|>
def camel_to_underscore(in_string): """
<|file_name|>exactlyone.go<|end_file_name|><|fim▁begin|><|fim▁hole|>// Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package constraint import ( "fmt" "github.com/hashicorp/go-multierror" ) // ExactlyOne item in a collection must match all check Constraints type ExactlyOne struct { Constraints []Check } var _ Range = &ExactlyOne{} // ValidateItems implements Range func (e *ExactlyOne) ValidateItems(arr []interface{}, p Params) error { var matches int var err error mainloop: for _, a := range arr { for _, c := range e.Constraints { er := c.ValidateItem(a, p) if er != nil { err = multierror.Append(err, er) continue mainloop } } matches++ } switch matches { case 0: err = multierror.Append(err, fmt.Errorf("no item matched constraints: %v", arr)) return multierror.Flatten(err) case 1: return nil default: return fmt.Errorf("multiple items(%d) matched constraints: %v", matches, arr) } }<|fim▁end|>
// Copyright 2019 Istio Authors //
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #![feature(globs, macro_rules, phase, thread_local, link_args)] #![allow(experimental, non_camel_case_types)] #![deny(unused_imports, unused_variables, unused_mut)] #![feature(phase)] #[phase(plugin, link)] extern crate log; #[phase(plugin)] extern crate "plugins" as servo_plugins; extern crate servo; extern crate compositing; extern crate azure; extern crate geom; extern crate gfx; extern crate gleam; extern crate glfw; extern crate glfw_app; extern crate js; extern crate layers; extern crate png; extern crate script; extern crate "net" as servo_net; extern crate "msg" as servo_msg; extern crate "util" as servo_util; extern crate style; extern crate stb_image; extern crate rustrt; extern crate libc; extern crate "url" as std_url; <|fim▁hole|>#[cfg(target_os="macos")] extern crate core_graphics; #[cfg(target_os="macos")] extern crate core_text; // Must come first. pub mod macros; pub mod browser; pub mod browser_host; pub mod command_line; pub mod cookie; pub mod core; pub mod drag_data; pub mod eutil; pub mod frame; pub mod interfaces; pub mod print_settings; pub mod process_message; pub mod render_handler; pub mod request; pub mod request_context; pub mod response; pub mod stream; pub mod string; pub mod string_list; pub mod string_map; pub mod string_multimap; pub mod stubs; pub mod switches; pub mod task; pub mod types; pub mod urlrequest; pub mod v8; pub mod values; pub mod window; pub mod wrappers; pub mod xml_reader; pub mod zip_reader;<|fim▁end|>
#[cfg(target_os="macos")] extern crate cgl; #[cfg(target_os="macos")] extern crate cocoa;
<|file_name|>text.py<|end_file_name|><|fim▁begin|># # Copyright 2010 Free Software Foundation, Inc. # # This file was generated by gr_modtool, a tool from the GNU Radio framework # This file is a part of gr-paint # # SPDX-License-Identifier: GPL-3.0-or-later # # """ Utilities for extracting text from generated classes. """ def is_string(txt): if isinstance(txt, str): return True try: if isinstance(txt, str): return True<|fim▁hole|> return False def description(obj): if obj is None: return None return description_bit(obj).strip() def description_bit(obj): if hasattr(obj, 'content'): contents = [description_bit(item) for item in obj.content] result = ''.join(contents) elif hasattr(obj, 'content_'): contents = [description_bit(item) for item in obj.content_] result = ''.join(contents) elif hasattr(obj, 'value'): result = description_bit(obj.value) elif is_string(obj): return obj else: raise Exception('Expecting a string or something with content, content_ or value attribute') # If this bit is a paragraph then add one some line breaks. if hasattr(obj, 'name') and obj.name == 'para': result += "\n\n" return result<|fim▁end|>
except NameError: pass
<|file_name|>configuration.go<|end_file_name|><|fim▁begin|>package astilog<|fim▁hole|> import "flag" // Flags var ( AppName = flag.String("logger-app-name", "", "the logger's app name") Filename = flag.String("logger-filename", "", "the logger's filename") Verbose = flag.Bool("logger-verbose", false, "if true, then log level is debug") ) // Formats const ( FormatJSON = "json" FormatText = "text" ) // Outs const ( OutFile = "file" OutStdOut = "stdout" OutSyslog = "syslog" ) // Configuration represents the configuration of the logger type Configuration struct { AppName string `toml:"app_name"` DisableColors bool `toml:"disable_colors"` DisableTimestamp bool `toml:"disable_timestamp"` Filename string `toml:"filename"` FullTimestamp bool `toml:"full_timestamp"` Format string `toml:"format"` MessageKey string `toml:"message_key"` Out string `toml:"out"` TimestampFormat string `toml:"timestamp_format"` Verbose bool `toml:"verbose"` } // SetHandyFlags sets handy flags func SetHandyFlags() { Verbose = flag.Bool("v", false, "if true, then log level is debug") } // FlagConfig generates a Configuration based on flags func FlagConfig() Configuration { return Configuration{ AppName: *AppName, Filename: *Filename, Verbose: *Verbose, } }<|fim▁end|>
<|file_name|>module.js<|end_file_name|><|fim▁begin|>"use strict"; var Extension = require("../runtime/extension"); /** * @constructor Trait * @memberof module:impulse * * @summary Traits allow classes to be extended without modification, and support isTypeOf() when used as parameters. **/ function Trait(parent, funcs, required) { this._parent = parent || null; this._types = new Set(); this._methods = funcs; this.required = required; } Trait.isTypeOf = function(that) { return that instanceof this; } Trait.prototype.add = function (type) { this._types = this._types.add(type); return this; } Trait.prototype.isTypeOf = function (value) { for (var scope = this; scope !== null; scope = scope._parent) { for (var type of scope._types) { if (type.isTypeOf(value)) { return true; } } } return false; } Trait.prototype.bind = function() { return this._methods.apply(null, arguments); } Trait.addtrait = function(type, parent) { var trait = parent ? clone(parent) : new Trait(); return trait.add(type); } function clone(object) { if (object == null || typeof object != "object") { return object; } var copy = new object.constructor(); for (var property in object) { if (object.hasOwnProperty(property)) { copy[property] = object[property]; } } return copy; } // // Exports // module.exports = Trait; /* Value : Type 0 : 1 Bottom == Void 1 : 1 Unit () == () 1 : 1 Scalar 1 == Number 1 : N Union or Intersection 1 == (Number | String), (Number & String) N : 1 Array [1, 2, 3] == [Number] N : N Record (1, "foo") == (Number, String) 1 : 0 Untyped Scalar N : 1 Top 1, "foo" == Object <|fim▁hole|> eval("var symbol = Symbol.symbol;"); import foo, bar from library; import * from library.module; */<|fim▁end|>
import { symbol } from "core-js/es6/symbol"; import * from core-js.fn.object.assign;
<|file_name|>test_dfs.py<|end_file_name|><|fim▁begin|>"""Tests for the DFS module""" import unittest from dfs import dfsTraverse class test_dfsTraverse(unittest.TestCase): """Test the correct order in traversing a graph""" def setUp(self): """Create a graph and a tuple with the correct traverse""" self.correctResTup = ('a', 'b', 'e', 'g', 'f', 'c', 'h', 'd') self.graphDict = {'a': ('b', 'g', 'd'), 'b': ('e', 'a', 'f'), 'd': ('a', 'f'), 'e': ('b', 'g'), 'g': ('e', 'a'), 'f': ('b', 'd', 'c'), 'c': ('f', 'h'), 'h': ('c')} def test_traverse(self): """Test the traverse function""" result = dfsTraverse(self.graphDict, 'a') self.assertEqual(result, self.correctResTup) if __name__ == '__main__':<|fim▁hole|> unittest.main()<|fim▁end|>
<|file_name|>dictionaries.go<|end_file_name|><|fim▁begin|>package hashcat3<|fim▁hole|> Name string Path string } type Dictionaries []Dictionary func (d Dictionaries) Len() int { return len(d) } func (d Dictionaries) Swap(i, j int) { d[i], d[j] = d[j], d[i] } func (d Dictionaries) Less(i, j int) bool { return d[i].Name < d[j].Name }<|fim▁end|>
type Dictionary struct {
<|file_name|>Direct.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1<|fim▁hole|><|fim▁end|>
oid sha256:be847f24aac166b803f1ff5ccc7e4d7bc3fb5d960543e35f779068a754294c94 size 1312
<|file_name|>D1.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # # Problem: Endless Knight # Language: Python # Author: KirarinSnow # Usage: python thisfile.py <input.in >output.out # Comments: OK for large, but may time out on small. from itertools import * MOD = 10007 # Precompute factorial table mod MOD fact = [1] * MOD for i in xrange(1, MOD): fact[i] = (fact[i-1] * i) # n choose k -- using Lucas's theorem def choose(n, k): if k > n: return 0 elif n < MOD: return (fact[n]/fact[n-k]/fact[k])%MOD else: prod = 1 while n > 0: prod *= choose(n%MOD, k%MOD) prod %= MOD n /= MOD k /= MOD return prod def compute(): h, w, r = map(int, raw_input().split()) rocks = [map(int, raw_input().split()) for i in range(r)] if (h+w-2)%3 != 0: return 0 # normalize rock coordinates h, w = h-1-(h+w-2)/3, w-1-(h+w-2)/3 for i in range(r): row, col = rocks[i] if (row+col-2)%3 != 0: rocks[i] = None else: rocks[i] = [row-1-(row+col-2)/3, col-1-(row+col-2)/3] if rocks[i][0] < 0 or rocks[i][0] > h: rocks[i] = None elif rocks[i][1] < 0 or rocks[i][1] > w: rocks[i] = None total = 0 for num in range(r+1): for perm in permutations(range(r), num): # verify increasing property of permutation inc = True for i in range(num): if rocks[perm[i]] == None: inc = False break if i > 0: if rocks[perm[i]][0] < rocks[perm[i-1]][0]: inc = False break if rocks[perm[i]][1] < rocks[perm[i-1]][1]: inc = False<|fim▁hole|> # number of paths going through all points prod = 1 for j in range(1, len(points)): dh = points[j][0] - points[j-1][0] dw = points[j][1] - points[j-1][1] prod *= choose(dh+dw, dw) prod %= MOD # inclusion-exclusion total += (-1)**num * prod total %= MOD return total for i in range(input()): print "Case #%d: %d" % (i+1, compute())<|fim▁end|>
break if inc: points = [[0,0]] + [rocks[j] for j in perm] + [[h,w]]
<|file_name|>SystemConfiguration.js<|end_file_name|><|fim▁begin|>/*global Ext, i18n*/ //<debug> console.log(new Date().toLocaleTimeString() + ": Log: Load: WPAKD.view.desktop.icons.SystemConfiguration"); //</debug> Ext.define("WPAKD.view.desktop.icons.SystemConfiguration", { extend: "Ext.Container", alias: "widget.desktopiconssystemconfiguration", draggable: true, floating: true, layout: {type: "vbox", align: "center"}, width: 80, defaults: {frame: true}, items: [{ xtype: "container", height: 10 }, { xtype: "container" , html: "<center><i class=\"fa fa-cog\"></i></center>" , style: "font-size: 3em;" , height: 40<|fim▁hole|> html: i18n.gettext("System <br />Configuration") }], initComponent: function() { var me = this; Ext.applyIf(me, { listeners: { el: { dblclick: function() {me.fireEvent("WPAKD.controller.desktop.icons.Icons.iconDbClick", this, "WEB_CFG_SYSTEM");} , click: function() {me.fireEvent("WPAKD.controller.desktop.icons.Icons.iconClick", this, "WEB_CFG_SYSTEM");} } } }); me.callParent(arguments); } });<|fim▁end|>
, width: 40 }, { xtype: "container", style: {"text-align": "center", color: "#FFFFFF"},
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>export * from './item-repository'; export * from './user-repository';<|fim▁end|>
<|file_name|>testlistfmt_ka_GE.js<|end_file_name|><|fim▁begin|>/* * testlistfmt_ka_GE.js - test the list formatter object * * Copyright © 2020, JEDLSoft * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSe-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ if (typeof(ListFmt) === "undefined") { var ListFmt = require("../../lib/ListFmt.js"); } if (typeof(ilib) === "undefined") { var ilib = require("../../lib/ilib.js"); } module.exports.testlistfmt_ka_GE = { setUp: function(callback) { ilib.clearCache(); callback(); }, testListFmtkaGENumberFormatOne: function(test) { var fmt = new ListFmt({ locale: "ka-GE" }); test.expect(2); test.ok(fmt !== null); test.equal(fmt.format(["ერთი"]), "ერთი"); test.done(); }, testListFmtkaGENumberFormatTwo: function(test) { var fmt = new ListFmt({ locale: "ka-GE" }); test.expect(2); test.ok(fmt !== null); test.equal(fmt.format(["ერთი", "ორი"]), "ერთი და ორი"); test.done(); }, testListFmtkaGENumberFormatThree: function(test) { var fmt = new ListFmt({ locale: "ka-GE" }); test.expect(2); test.ok(fmt !== null); test.equal(fmt.format(["ერთი", "ორი", "სამი"]), "ერთი, ორი და სამი"); test.done(); }, testListFmtkaGENumberFormatFour: function(test) { var fmt = new ListFmt({ locale: "ka-GE" }); test.expect(2); test.ok(fmt !== null); test.equal(fmt.format(["ერთი", "ორი", "სამი", "ოთხი"]), "ერთი, ორი, სამი და ოთხი"); test.done(); }, testListFmtkaGENumberFormatFive: function(test) { var fmt = new ListFmt({ locale: "ka-GE" }); test.expect(2); test.ok(fmt !== null); test.equal(fmt.format(["ერთი", "ორი", "სამი", "ოთხი", "ხუთი"]), "ერთი, ორი, სამი, ოთხი და ხუთი"); test.done(); }, testListFmtUnitStylekaGENumberFormatOneShort: function(test) { var fmt = new ListFmt({ locale: "ka-GE", style: "unit" }); test.expect(2); test.ok(fmt !== null); test.equal(fmt.format(["ერთი"]), "ერთი"); test.done(); }, testListFmtUnitStylekaGENumberFormatTwoShort: function(test) { var fmt = new ListFmt({ locale: "ka-GE", style: "unit" }); test.expect(2); test.ok(fmt !== null); test.equal(fmt.format(["ერთი", "ორი"]), "ერთი, ორი"); test.done(); }, testListFmtUnitStylekaGENumberFormatThreeShort: function(test) { var fmt = new ListFmt({ locale: "ka-GE", style: "unit" }); test.expect(2); test.ok(fmt !== null); test.equal(fmt.format(["ერთი", "ორი", "სამი"]), "ერთი, ორი, სამი"); test.done(); }, testListFmtUnitStylekaGENumberFormatFourShort: function(test) { var fmt = new ListFmt({ locale: "ka-GE", style: "unit" }); test.expect(2); test.ok(fmt !== null); test.equal(fmt.format(["ერთი", "ორი", "სამი", "ოთხი"]), "ერთი, ორი, სამი, ოთხი"); test.done(); }, testListFmtUnitStylekaGENumberFormatFiveShort: function(test) { var fmt = new ListFmt({ locale: "ka-GE", style: "unit" }); test.expect(2); test.ok(fmt !== null); test.equal(fmt.format(["ერთი", "ორი", "სამი", "ოთხი", "ხუთი"]), "ერთი, ორი, სამი, ოთხი, ხუთი"); test.done(); }, testListFmtUnitStylekaGENumberFormatOneFull: function(test) { var fmt = new ListFmt({ locale: "ka-GE", style: "unit", length: "full" });<|fim▁hole|> test.expect(2); test.ok(fmt !== null); test.equal(fmt.format(["ერთი"]), "ერთი"); test.done(); }, testListFmtUnitStylekaGENumberFormatTwoFull: function(test) { var fmt = new ListFmt({ locale: "ka-GE", style: "unit", length: "full" }); test.expect(2); test.ok(fmt !== null); test.equal(fmt.format(["ერთი", "ორი"]), "ერთი, ორი"); test.done(); }, testListFmtUnitStylekaGENumberFormatThreeFull: function(test) { var fmt = new ListFmt({ locale: "ka-GE", style: "unit", length: "full" }); test.expect(2); test.ok(fmt !== null); test.equal(fmt.format(["ერთი", "ორი", "სამი"]), "ერთი, ორი, სამი"); test.done(); }, testListFmtUnitStylekaGENumberFormatFourFull: function(test) { var fmt = new ListFmt({ locale: "ka-GE", style: "unit", length: "full" }); test.expect(2); test.ok(fmt !== null); test.equal(fmt.format(["ერთი", "ორი", "სამი", "ოთხი"]), "ერთი, ორი, სამი, ოთხი"); test.done(); }, testListFmtUnitStylekaGENumberFormatFiveFull: function(test) { var fmt = new ListFmt({ locale: "ka-GE", style: "unit", length: "full" }); test.expect(2); test.ok(fmt !== null); test.equal(fmt.format(["ერთი", "ორი", "სამი", "ოთხი", "ხუთი"]), "ერთი, ორი, სამი, ოთხი, ხუთი"); test.done(); }, testListFmtORStylekaGENumberFormatOne: function(test) { var fmt = new ListFmt({ locale: "ka-GE", style: "disjunction" }); test.expect(2); test.ok(fmt !== null); test.equal(fmt.format(["ერთი"]), "ერთი"); test.done(); }, testListFmtORStylekaGENumberFormatTwo: function(test) { var fmt = new ListFmt({ locale: "ka-GE", style: "disjunction" }); test.expect(2); test.ok(fmt !== null); test.equal(fmt.format(["ერთი", "ორი"]), "ერთი ან ორი"); test.done(); }, testListFmtORStylekaGENumberFormatThree: function(test) { var fmt = new ListFmt({ locale: "ka-GE", style: "disjunction" }); test.expect(2); test.ok(fmt !== null); test.equal(fmt.format(["ერთი", "ორი", "სამი"]), "ერთი, ორი ან სამი"); test.done(); }, testListFmtORStylekaGENumberFormatFour: function(test) { var fmt = new ListFmt({ locale: "ka-GE", style: "disjunction" }); test.expect(2); test.ok(fmt !== null); test.equal(fmt.format(["ერთი", "ორი", "სამი", "ოთხი"]), "ერთი, ორი, სამი ან ოთხი"); test.done(); }, testListFmtORStylekaGENumberFormatFiveFull: function(test) { var fmt = new ListFmt({ locale: "ka-GE", style: "disjunction" }); test.expect(2); test.ok(fmt !== null); test.equal(fmt.format(["ერთი", "ორი", "სამი", "ოთხი", "ხუთი"]), "ერთი, ორი, სამი, ოთხი ან ხუთი"); test.done(); } };<|fim▁end|>
<|file_name|>mainPlugin.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ /*************************************************************************** GeepsSpStats A QGIS plugin Spatial Statistics by PySAL ------------------- begin : 2014-07-01 git sha : $Format:%H$ copyright : (C) 2014 by GEEPS / Gaia3D email : [email protected] ***************************************************************************/ /*************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * ***************************************************************************/ """ from PyQt4.QtGui import * from PyQt4.QtCore import * import os.path from Utility import * from Widget_MoransI import Widget_MoransI from Widget_GetisOrdsG import Widget_GetisOrdsG from Widget_NearestNeighbor import Widget_NearestNeighbor from Widget_KFunction import Widget_KFunction from Widget_KnoxStatistic import Widget_KnoxStatistic from Widget_SpatialScan import Widget_SpatialScan class WidgetContainer(object): def __init__(self, iface, classTemplet, dockType=Qt.RightDockWidgetArea): self.__iface = iface self.__dockwidget = None self.__oloWidget = None self.__classTemplet = classTemplet self.__title = classTemplet.title self.__objectName = classTemplet.objectName self.__dockType = dockType # Private def __setDocWidget(self): self.__dockwidget = QDockWidget(self.__title, self.__iface.mainWindow() ) self.__dockwidget.setObjectName(self.__objectName) self.__oloWidget = self.__classTemplet(self.__iface, self.__dockwidget) self.__dockwidget.setWidget(self.__oloWidget) self.__oloWidget.updateGuiLayerList() def __initGui(self): self.__setDocWidget() self.__iface.addDockWidget(self.__dockType, self.__dockwidget) def __unload(self): self.__dockwidget.close() self.__iface.removeDockWidget( self.__dockwidget ) # 이벤트 헨들러가 자동제거 되지 않아 강제로 제거 self.__oloWidget.disconnectGlobalSignal() del self.__oloWidget self.__dockwidget = None # Public def setVisible(self, visible): if visible: if self.__dockwidget is None: self.__initGui() else: if not self.__dockwidget is None:<|fim▁hole|> # TODO: reflash def repaint(self): if self.__dockwidget: self.__dockwidget.update() self.__dockwidget.repaint() ### QGIS Plugin Implementation. class GeepsSpStats: crrWidget = None def __init__(self, iface): """Constructor. :param iface: An interface instance that will be passed to this class which provides the hook by which you can manipulate the QGIS application at run time. :type iface: QgsInterface """ # Save reference to the QGIS interface self.iface = iface # reference to map canvas self.canvas = self.iface.mapCanvas() # initialize plugin directory self.plugin_dir = os.path.dirname(__file__) # initialize locale locale = QSettings().value('locale/userLocale')[0:2] locale_path = os.path.join( self.plugin_dir, 'i18n', 'GeepsSpStats_{}.qm'.format(locale)) # 한국어는 GeepsSpStats_ko.qm 파일이 필요 if os.path.exists(locale_path): self.translator = QTranslator() self.translator.load(locale_path) if qVersion() > '4.3.3': QCoreApplication.installTranslator(self.translator) # Overview #self.crrWidget = WidgetContainer(iface, Widget_MoransI) # noinspection PyMethodMayBeStatic def tr(self, message): """Get the translation for a string using Qt translation API. We implement this ourselves since we do not inherit QObject. :param message: String for translation. :type message: str, QString :returns: Translated version of message. :rtype: QString """ # noinspection PyTypeChecker,PyArgumentList,PyCallByClass return QCoreApplication.translate('GeepsSpStats', message) def initGui(self): """Create the menu entries and toolbar icons inside the QGIS GUI.""" # Qt에서는 Action이 메뉴의 최종 아이템이라 생각하면 됨 actions = self.iface.mainWindow().menuBar().actions() self.mainMenu = QMenu(self.iface.mainWindow()) self.mainMenu.setTitle(self.tr(u'Spatial Statics')) # 이미 메뉴가 있다면 그냥 있는 것 이용 for action in actions: if action.text() == self.tr(u'Spatial Statics'): self.mainMenu = action.menu() break ### MENU1 : spatial clusters detection icon = QIcon(os.path.dirname(__file__) + "/images/publish-to-geonode.png") self.menu1 = self.mainMenu.addMenu(icon, self.tr(u'Spatial Autocorrelation')) self.mainMenu.addMenu(self.menu1) # Moran's I Statistic Menu self.moransI_Action = QAction(self.tr("Moran's I Statistic"), self.iface.mainWindow()) self.menu1.addAction(self.moransI_Action) self.moransI_Action.triggered.connect(self.showWidgetMoransI) ### MENU2 : Spatial Clustering icon = QIcon(os.path.dirname(__file__) + "/images/tree.png") self.menu2 = self.mainMenu.addMenu(icon, self.tr(u'Spatial Clustering')) self.mainMenu.addMenu(self.menu2) # Getis-Ord's G Statistic Menu self.getisOrdsG_Action = QAction(self.tr("Getis-Ord's G Statistic"), self.iface.mainWindow()) self.menu2.addAction(self.getisOrdsG_Action) self.getisOrdsG_Action.triggered.connect(self.showWidgetGetisOrdsG) # Nearest neighbor statistic Menu self.nearestNeighborStatistic_Action = QAction( self.tr(u"Nearest Neighbor Statistic"), self.menu2) self.menu2.addAction(self.nearestNeighborStatistic_Action) self.nearestNeighborStatistic_Action.triggered.connect(self.showWidgetNearestNeighbor) # K-function Menu self.Kfunction_Action = QAction(self.tr(u"K-function"), self.menu2) self.menu2.addAction(self.Kfunction_Action) self.Kfunction_Action.triggered.connect(self.showWidgetKFunction) ### MENU3 : Spatiotemporal Clustering icon = QIcon(os.path.dirname(__file__) + "/images/workspace.png") self.menu3 = self.mainMenu.addMenu(icon, self.tr(u'Spatiotemporal Clustering')) self.mainMenu.addMenu(self.menu3) # Knox statistic Menu self.knoxStatistic_Action = QAction(self.tr(u"Knox Statistic"), self.menu3) self.menu3.addAction(self.knoxStatistic_Action) self.knoxStatistic_Action.triggered.connect(self.showWidgetKnoxStatistic) ### MENU4 : spatial clusters detection icon = QIcon(os.path.dirname(__file__) + "/images/view.png") self.menu4 = self.mainMenu.addMenu(icon, self.tr(u'Spatial Clusters Detection')) self.mainMenu.addMenu(self.menu4) # Knox statistic Menu self.spatialScanStatistic_Action = QAction(self.tr(u"Spatial Scan Statistic"), self.menu4) self.menu4.addAction(self.spatialScanStatistic_Action) self.spatialScanStatistic_Action.triggered.connect(self.showWidgetSpatialScan) # ### HELP # icon = QIcon(os.path.dirname(__file__) + "/images/help.png") # self.help_Action = QAction(icon, self.tr(u"About GEEPS Spatial Stats"), self.menu1) # self.mainMenu.addAction(self.help_Action) # self.help_Action.triggered.connect(self.run) ### Main Menu 등록 menuBar = self.iface.mainWindow().menuBar() menuBar.insertMenu(self.iface.firstRightStandardMenu().menuAction(), self.mainMenu) def unload(self): """Removes the plugin menu item and icon from QGIS GUI.""" self.mainMenu.deleteLater() if not self.crrWidget is None: self.crrWidget.setVisible( False ) del self.crrWidget self.crrWidget = None def getLayerList(self): retLayerList = [] for layer in self.canvas.layers(): retLayerList.append(layer.name()) return retLayerList def showWidgetMoransI(self): if not self.crrWidget is None: self.crrWidget.setVisible(False) del self.crrWidget self.crrWidget = None self.crrWidget = WidgetContainer(self.iface, Widget_MoransI) self.crrWidget.setVisible(True) # TODO: UI reflash self.crrWidget.repaint() pass def showWidgetGetisOrdsG(self): if not self.crrWidget is None: self.crrWidget.setVisible(False) del self.crrWidget self.crrWidget = None self.crrWidget = WidgetContainer(self.iface, Widget_GetisOrdsG) self.crrWidget.setVisible(True) pass def showWidgetNearestNeighbor(self): if not self.crrWidget is None: self.crrWidget.setVisible(False) del self.crrWidget self.crrWidget = None self.crrWidget = WidgetContainer(self.iface, Widget_NearestNeighbor) self.crrWidget.setVisible(True) pass def showWidgetKFunction(self): if not self.crrWidget is None: self.crrWidget.setVisible(False) del self.crrWidget self.crrWidget = None self.crrWidget = WidgetContainer(self.iface, Widget_KFunction) self.crrWidget.setVisible(True) pass def showWidgetKnoxStatistic(self): if not self.crrWidget is None: self.crrWidget.setVisible(False) del self.crrWidget self.crrWidget = None self.crrWidget = WidgetContainer(self.iface, Widget_KnoxStatistic) self.crrWidget.setVisible(True) pass def showWidgetSpatialScan(self): if not self.crrWidget is None: self.crrWidget.setVisible(False) del self.crrWidget self.crrWidget = None self.crrWidget = WidgetContainer(self.iface, Widget_SpatialScan) self.crrWidget.setVisible(True) pass def run(self): alert("Under Construction!!!")<|fim▁end|>
self.__unload()
<|file_name|>default.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # this file is released under public domain and you can use without limitations ######################################################################### ## This is a sample controller ## - index is the default action of any application ## - user is required for authentication and authorization ## - download is for downloading files uploaded in the db (does streaming) ## - call exposes all registered services (none by default) ######################################################################### KEY = 'miclaveparafirmarurls'; def index(): """ Pantalla de bienvenida estatica, sólo muestra un enlace hacia el manejador """ return dict() @auth.requires_login() def manager(): """ Permite crear eventos nuevos, ver/copiar los creados por otros y editar los que se fueron creados por el usuario actual """ form = FORM(T('New Event'), INPUT(_name='name', requires=IS_NOT_EMPTY()), INPUT(_type='submit')) if form.accepts(request, session): # Generar el nuevo evento vacío name = form.vars.name owner_event = auth.user_id event_data = base_json_event_data.replace('"name":null', '"name":"'+name+'"') shared_data = base_json_shared_data id_event = db.events.insert(name=name, owner_event=owner_event, json_event_data=event_data, json_shared_data=shared_data) if id_event: redirect(URL('default', 'event', args=[db.events[id_event].slug])) else: response.flash = T('The new event can\'t be created') elif form.errors: response.flash = T('The form has errors') if request.vars.id_event: if request.vars.operation == 'copy': if not URL.verify(request, hmac_key=KEY): # verifica que la accion sea legitima raise HTTP(403) shared_data = base_json_shared_data row = db.events[request.vars.id_event] event_data = row.json_event_data name = row.name + T('(copy)') if db.events.insert(name=name, owner_event=auth.user_id, json_event_data=event_data, json_shared_data=shared_data): response.flash = T('Event copied') else: response.flash = T('It can\'t be copied') elif request.vars.operation == 'delete': if not URL.verify(request, hmac_key=KEY): # verifica que la accion sea legitima raise HTTP(403) if db.events[request.vars.id_event] \ and db.events[request.vars.id_event].owner_event == auth.user_id: del db.events[request.vars.id_event] response.flash = T('Event deleted') else: response.flash = T('You do not have permission to do that') events = db(db.events).select(db.events.ALL) return dict(events=events,form=form,key=KEY) @auth.requires_login() def event(): """ Es la página principal de la aplicación, tiene dos modos que dependen de los privilegios del usuario, si el usuario creo este evento se abre en modo edición de lo contrario en modo lectura, a excepción de los datos compartidos (actualmente sólo de asistencia). El parametro será el nombre del evento como slug, para abrir directamente la página. Es requerido un usuario logueado. """ if not request.args[0]: redirect(URL('default', 'manager')) event = db(db.events.slug == request.args[0]).select(db.events.ALL).first() if not event: event = db.events[request.args[0]] if event: redirect(URL('default', 'event', args=[event.slug])) else: raise HTTP(404, T('Event not found')) is_owner = event.owner_event == auth.user_id return dict(event=event, is_owner=is_owner) def print_event(): """ Genera una vista apta para impresión con los datos de la ventana padre (event). """ return dict() <|fim▁hole|> Es un webservice de JSON para subir y recoger datos sobre el evento. El único que puede subir datos es el creador del evento, los usuarios registrados son los que pueden leer los datos. El parametro id_event devuelve los datos JSON de ese evento, si se usa el metodo post/get data se pueden subir datos que sobreescribiran los actuales (sólo el creador puede hacer esto), devuelve true si los datos fueron actualizados con éxito, false si hubo error. """ if request.vars.id_event: if request.vars.data: if db.events[request.vars.id_event].owner_event == auth.user_id: # Actualizar los valores db.events[request.vars.id_event] = dict(json_event_data=request.vars.data) return 'true' else: raise HTTP(500, 'false') else: # Devolver json return db.events[request.vars.id_event].json_event_data else: raise HTTP(400, 'false') @auth.requires_login() def shared_event_data(): """ Es un webservice de JSON, en este caso cualquiera registrado puede modificar los datos del evento. El primer parametro indica sobre que evento se quiere trabajar, los parametros get/post son variable, id_object y value. Variable indica a que objeto json hijo se va a aplicar el cambio, el id a que hijo de la variable, finalmente el valor indica el nuevo valor, de no existir se genera y si existe es reemplazado. Si ningún parametro es dado devuelve todo el objeto JSON shared_data. Es necesario estar logueado. """ if request.vars.id_event: if request.vars.variable \ and request.vars.id_object and request.vars.value: json_shared_data = db.events[request.vars.id_event].json_shared_data import json python_shared_data = json.loads(json_shared_data) python_shared_data[request.vars.variable][request.vars.id_object] = request.vars.value json_shared_data = json.dumps(python_shared_data) # Actualizar el json en la base de datos db.events[request.vars.id_event] = dict(json_shared_data=json_shared_data) return 'true' else: return db.events[request.vars.id_event].json_shared_data else: raise (400, 'false') @auth.requires_login() def rename_event(): """ Renombrar el evento """ if request.vars.id_event and request.vars.new_name: if db.events[request.vars.id_event].owner_event == auth.user_id: db.events[request.vars.id_event] = dict(name=request.vars.new_name) else: raise (500, 'false') else: raise (400, 'false') def user(): """ exposes: http://..../[app]/default/user/login http://..../[app]/default/user/logout http://..../[app]/default/user/register http://..../[app]/default/user/profile http://..../[app]/default/user/retrieve_password http://..../[app]/default/user/change_password http://..../[app]/default/user/manage_users (requires membership in use @auth.requires_login() @auth.requires_membership('group name') @auth.requires_permission('read','table name',record_id) to decorate functions that need access control """ return dict(form=auth()) @cache.action() def download(): """ allows downloading of uploaded files http://..../[app]/default/download/[filename] """ return response.download(request, db)<|fim▁end|>
@auth.requires_login() def event_data(): """
<|file_name|>wrong_transmute.rs<|end_file_name|><|fim▁begin|>use super::WRONG_TRANSMUTE; use clippy_utils::diagnostics::span_lint; use rustc_hir::Expr; use rustc_lint::LateContext; use rustc_middle::ty::{self, Ty}; /// Checks for `wrong_transmute` lint. /// Returns `true` if it's triggered, otherwise returns `false`.<|fim▁hole|> (ty::Float(_) | ty::Char, ty::Ref(..) | ty::RawPtr(_)) => { span_lint( cx, WRONG_TRANSMUTE, e.span, &format!("transmute from a `{}` to a pointer", from_ty), ); true }, _ => false, } }<|fim▁end|>
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>, from_ty: Ty<'tcx>, to_ty: Ty<'tcx>) -> bool { match (&from_ty.kind(), &to_ty.kind()) {
<|file_name|>decorators.py<|end_file_name|><|fim▁begin|>import functools class memoize(object): def __init__ (self, func): self.func = func def __call__ (self, *args, **kwargs): if (args, str(kwargs)) in self.__dict__: value = self.__dict__[args, str(kwargs)] else: value = self.func(*args, **kwargs) self.__dict__[args, str(kwargs)] = value return value def __repr__(self): """ Return the function's docstring. """ return self.func.__doc__ or '' def __get__(self, obj, objtype): """ Support instance methods. """ return functools.partial(self.__call__, obj) class cached_property(object): """Property descriptor that caches the return value of the get function. *Examples* .. code-block:: python @cached_property def connection(self): return Connection() @connection.setter # Prepares stored value def connection(self, value): if value is None: raise TypeError("Connection must be a connection") return value @connection.deleter def connection(self, value): # Additional action to do at del(self.attr) if value is not None: print("Connection %r deleted" % (value, )) """ def __init__(self, fget=None, fset=None, fdel=None, doc=None): self.__get = fget self.__set = fset self.__del = fdel self.__doc__ = doc or fget.__doc__ self.__name__ = fget.__name__ self.__module__ = fget.__module__ def __get__(self, obj, type=None): if obj is None: return self try: return obj.__dict__[self.__name__] except KeyError: value = obj.__dict__[self.__name__] = self.__get(obj)<|fim▁hole|> return self if self.__set is not None: value = self.__set(obj, value) obj.__dict__[self.__name__] = value def __delete__(self, obj): if obj is None: return self try: value = obj.__dict__.pop(self.__name__) except KeyError: pass else: if self.__del is not None: self.__del(obj, value) def setter(self, fset): return self.__class__(self.__get, fset, self.__del) def deleter(self, fdel): return self.__class__(self.__get, self.__set, fdel)<|fim▁end|>
return value def __set__(self, obj, value): if obj is None:
<|file_name|>chart_b.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from common.db_sum import _metric_meta_db '''get the data from table by name''' def get_data_by_name(name, status=[1], other=0): result = [] where = '' if status: status = ",".join([str(x) for x in status]) where += ' and status in ({}) '.format(status) if other: where += ' and id not in ({}) '.format(other) sql = """ select * from t_chart_reports where name="{}" {}; """.format(name, where) try: result = _metric_meta_db.query(sql) if result: result = result[0] except Exception, e: from traceback import print_exc print_exc() return result '''get chart from table by ids''' def get_data_by_ids(sids): result = [] sids = [str(x) for x in sids] sql = """ select * from t_chart_reports where id in ({}); """.format(",".join(sids)) try: result = _metric_meta_db.query(sql) except Exception, e: from traceback import print_exc print_exc() return result '''get the data from table by id''' def get_data_by_id(sid): result = [] sql = """ select * from t_chart_reports where id={} and status=1; """.format(int(sid)) try: result = _metric_meta_db.query(sql) if result: result = result[0] except Exception, e: from traceback import print_exc print_exc() return result '''save data to chart table''' def save(form): hid = _metric_meta_db.insert('t_chart_reports', **form) return hid '''update chart table's data by id ''' def update(form): _metric_meta_db.update('t_chart_reports', where="id={}".format(form['id']), **form) return form['id'] '''get highchart_edit json''' def get_chart(chart, data): result = {} if chart and data: if chart.get('series', False): first = data[0] data = get_column_combine(data) lens = len(first) series = chart['series'] tmp_series = [] if series: now_key = -1 for key, item in enumerate(series): if key < lens - 1: now_key = key item['name'] = first[key + 1] item['data'] = data[key] tmp_series.append(item) else: break template_series = series[-1] for key, item in enumerate(first): if key == 0: continue elif now_key < key - 1: tmp = dict(template_series) tmp['name'] = item tmp['data'] = data[key - 1] tmp['_colorIndex'] = key - 1<|fim▁hole|> else: tmp_series = series chart['series'] = tmp_series result = chart return result '''parse new data to highchart_edit json data''' def get_column_combine(data): result = [] if data: lens = len(data[0]) if lens > 0: result = [[] for i in xrange(lens)] for key, item in enumerate(data): if key > 0: for k, it in enumerate(item): if k > 0: if it == '': result[k - 1].append([item[0], None]) else: if type(it) == str or type(it) == unicode: try: if r"." in it: if r"," in it: tmp = it.replace(",", "") it = float(tmp) else: it = float(it) elif r"," in it: tmp = it.replace(",", "") it = int(tmp) else: it = int(it) except Exception, e: from traceback import print_exc print_exc() result[k - 1].append([item[0], it]) return result '''get the chart list''' def get_chart_list(sid="", name="", fields=[], iscount=False, current=1, rowCount=20): where = [] limit = '' if sid: if type(sid) != list: sid = [sid] where.append("""and id in ({})""".format(",".join(map(str, sid)))) if name: where.append("""and name like "%{}%" """.format(name)) if rowCount: stc = (int(current) - 1) * int(rowCount) if not stc: stc = 0 limit = "limit {},{}".format(int(current) - 1, rowCount) content = "*" orders = "order by id desc" if iscount: limit = "" content = "count(*) as c" orders = "" elif fields: content = ",".join(fields) sql = """ select {} from t_chart_reports where status=1 {} {} {}; """.format(content, " ".join(where), orders, limit) result = _metric_meta_db.query(sql) if iscount: if result: return result[0]['c'] else: return 0 else: if result: return result else: return []<|fim▁end|>
tmp['_symbolIndex'] = key - 1 tmp_series.append(tmp)
<|file_name|>app.module.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>import { HomePage } from '../pages/home/home'; import { ResultPage } from '../pages/result/result'; import { ChuckNorrisJokesService } from '../services/chuck-noris-jokes-service'; const cloudSettings: CloudSettings = { 'core': { 'app_id': '128221e6' }, 'push': { 'sender_id': '484083244506', 'pluginConfig': { 'ios': { 'badge': true, 'sound': true }, 'android': { 'iconColor': '#343434', 'sound': true, 'vibrate': true } } } }; @NgModule({ declarations: [ MyApp, HomePage, ResultPage ], imports: [ IonicModule.forRoot(MyApp), CloudModule.forRoot(cloudSettings) ], bootstrap: [IonicApp], entryComponents: [ MyApp, HomePage, ResultPage ], providers: [ChuckNorrisJokesService] }) export class AppModule { }<|fim▁end|>
import { NgModule } from '@angular/core'; import { IonicApp, IonicModule } from 'ionic-angular'; import { CloudSettings, CloudModule } from '@ionic/cloud-angular'; import { MyApp } from './app.component';
<|file_name|>18_summon.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python-i # draws SUMMON logo # import math import summon from summon.core import *<|fim▁hole|>from summon import shapes, colors def interleave(a, b): c = [] for i in xrange(0, len(a), 2): c.extend(a[i:i+2] + b[i:i+2]) return c def curve(x, y, start, end, radius, width): p = shapes.arc_path(x, y, start, end, radius, 30) p2 = shapes.arc_path(x, y, start, end, radius-width, 30) return triangle_strip(*interleave(p, p2)) def draw_u(top, bottom, w, t): return group(shapes.box(-w,top, -w+t, bottom+w), shapes.box(w,top, w-t, bottom+w), curve(0, bottom+w, -math.pi, 0.0, w, t)) def draw_m(top, bottom, w, t): return group( translate(0, -2*w+t, rotate(180, draw_u(top, bottom, w, t))), translate(2*w-t, -2*w+t, rotate(180, draw_u(top, bottom, w, t)))) def draw_summon(): t = 150 # thickness w = 200 # width s = 50 # spacing top = w bottom = -3*w+t return translate(-7*w+t-2.5*s, -(top + bottom) / 2.0, # S curve(0, 0, 0, 1.5*math.pi, w, t), curve(0, -2*w+t, -math.pi, .5*math.pi, w, t), # U translate(2*w+s, 0, draw_u(top, bottom, w, t)), # M translate(4*w+2*s, 0, draw_m(top, bottom, w, t)), # M translate(8*w-t+3*s, 0, draw_m(top, bottom, w, t)), # 0 translate(12*w-2*t+4*s, 0, curve(0, 0, 0.0, math.pi, w, t), shapes.box(-w,top-w, -w+t, bottom+w), shapes.box(w,top-w, w-t, bottom+w), curve(0, bottom+w, -math.pi, 0.0, w, t)), # N translate(14*w-2*t+5*s, 0, translate(0, -2*w+t, rotate(180, draw_u(top, bottom, w, t)))) ) def blur(x, col): return group( # color fade quads(col, -2000, 0, 2000, 0, color(0, 0, 0, 0), 2000, 300, -2000, 300), # white fades quads(color(1, 1, 1, 1), -2000, 0, -2000, 600, color(1, 1, 1, 0), -x, 600, -x, 0), quads(color(1, 1, 1, 1), 2000, 0, 2000, 600, color(1, 1, 1, 0), x, 600, x, 0)) def draw_summon_logo(): return group( blur(1200, color(0, .2, .5, .8)), rotate(180, blur(0, color(0, 0, .5, .5))), color(0, 0, 0), draw_summon(), color(0, 0, 0), text_clip("visualization prototyping and scripting", -1600, -450, 1600, -900, 0, 20, "top", "center")) # draw logo win = summon.Window("18_summon", size=(800,400)) win.set_bgcolor(1, 1, 1) win.add_group(draw_summon_logo()) win.home()<|fim▁end|>
<|file_name|>booted.go<|end_file_name|><|fim▁begin|>// -*- Mode: Go; indent-tabs-mode: t -*- /* * Copyright (C) 2014-2015 Canonical Ltd * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License version 3 as * published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * */ package snapstate import ( "fmt" "github.com/snapcore/snapd/boot" "github.com/snapcore/snapd/logger" "github.com/snapcore/snapd/overlord/state" "github.com/snapcore/snapd/release" "github.com/snapcore/snapd/snap" ) <|fim▁hole|>// still has the "active" version set to "v2" which is // misleading. This code will check what kernel/os booted and set // those versions active.To do this it creates a Change and kicks // start it directly. func UpdateBootRevisions(st *state.State) error { const errorPrefix = "cannot update revisions after boot changes: " if release.OnClassic { return nil } // nothing to check if there's no kernel ok, err := HasSnapOfType(st, snap.TypeKernel) if err != nil { return fmt.Errorf(errorPrefix+"%s", err) } if !ok { return nil } kernel, err := boot.GetCurrentBoot(snap.TypeKernel) if err != nil { return fmt.Errorf(errorPrefix+"%s", err) } base, err := boot.GetCurrentBoot(snap.TypeBase) if err != nil { return fmt.Errorf(errorPrefix+"%s", err) } var tsAll []*state.TaskSet for _, actual := range []*boot.NameAndRevision{kernel, base} { info, err := CurrentInfo(st, actual.Name) if err != nil { logger.Noticef("cannot get info for %q: %s", actual.Name, err) continue } if actual.Revision != info.SideInfo.Revision { // FIXME: check that there is no task // for this already in progress ts, err := RevertToRevision(st, actual.Name, actual.Revision, Flags{}) if err != nil { return err } tsAll = append(tsAll, ts) } } if len(tsAll) == 0 { return nil } msg := fmt.Sprintf("Update kernel and core snap revisions") chg := st.NewChange("update-revisions", msg) for _, ts := range tsAll { chg.AddAll(ts) } st.EnsureBefore(0) return nil }<|fim▁end|>
// UpdateBootRevisions synchronizes the active kernel and OS snap versions // with the versions that actually booted. This is needed because a // system may install "os=v2" but that fails to boot. The bootloader // fallback logic will revert to "os=v1" but on the filesystem snappy
<|file_name|>bitcoin_hu.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="hu" version="2.0"> <defaultcodec>UTF-8</defaultcodec> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About CandyCoin</source> <translation>A CandyCoinról</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;CandyCoin&lt;/b&gt; version</source> <translation>&lt;b&gt;CandyCoin&lt;/b&gt; verzió</translation> </message> <message> <location line="+57"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation> Ez egy kísérleti program. MIT/X11 szoftverlicenc alatt kiadva, lásd a mellékelt fájlt COPYING vagy http://www.opensource.org/licenses/mit-license.php. Ez a termék az OpenSSL Project által lett kifejlesztve az OpenSSL Toolkit (http://www.openssl.org/) és kriptográfiai szoftvertben való felhasználásra, írta Eric Young ([email protected]) és UPnP szoftver, írta Thomas Bernard.</translation> </message> <message> <location filename="../aboutdialog.cpp" line="+14"/> <source>Copyright</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The CandyCoin developers</source> <translation type="unfinished"/> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Címjegyzék</translation> </message> <message> <location line="+19"/> <source>Double-click to edit address or label</source> <translation>Dupla-kattintás a cím vagy a címke szerkesztéséhez</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Új cím létrehozása</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>A kiválasztott cím másolása a vágólapra</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation>&amp;Új cím</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+63"/> <source>These are your CandyCoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>Ezekkel a CandyCoin-címekkel fogadhatod kifizetéseket. Érdemes lehet minden egyes kifizető számára külön címet létrehozni, hogy könnyebben nyomon követhesd, kitől kaptál már pénzt.</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>&amp;Copy Address</source> <translation>&amp;Cím másolása</translation> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>&amp;QR kód mutatása</translation> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a CandyCoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Export the data in the current tab to a file</source> <translation>Jelenlegi nézet exportálása fájlba</translation> </message> <message> <location line="+3"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="-44"/> <source>Verify a message to ensure it was signed with a specified CandyCoin address</source> <translation>Üzenet ellenőrzése, hogy valóban a megjelölt CandyCoin címekkel van-e aláírva.</translation> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation>Üzenet ellenőrzése</translation> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>&amp;Törlés</translation> </message> <message> <location filename="../addressbookpage.cpp" line="-5"/> <source>These are your CandyCoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Copy &amp;Label</source> <translation>Címke &amp;másolása</translation> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation>Sz&amp;erkesztés</translation> </message> <message> <location line="+1"/> <source>Send &amp;Coins</source> <translation type="unfinished"/> </message> <message> <location line="+260"/> <source>Export Address Book Data</source> <translation>Címjegyzék adatainak exportálása</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Vesszővel elválasztott fájl (*. csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>Hiba exportálás közben</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>%1 nevű fájl nem írható.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>Címke</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Cím</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(nincs címke)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>Kulcsszó párbeszédablak</translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Add meg a jelszót</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Új jelszó</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Új jelszó újra</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+33"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Írd be az új jelszót a tárcához.&lt;br/&gt;Használj legalább 10&lt;br/&gt;véletlenszerű karaktert&lt;/b&gt; vagy &lt;b&gt;legalább nyolc szót&lt;/b&gt;.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Tárca kódolása</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>A tárcád megnyitásához a műveletnek szüksége van a tárcád jelszavára.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Tárca megnyitása</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>A tárcád dekódolásához a műveletnek szüksége van a tárcád jelszavára.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Tárca dekódolása</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Jelszó megváltoztatása</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Írd be a tárca régi és új jelszavát.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Biztosan kódolni akarod a tárcát?</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR LITECOINS&lt;/b&gt;!</source> <translation>Figyelem: Ha kódolod a tárcát, és elveszíted a jelszavad, akkor &lt;b&gt;AZ ÖSSZES LITECOINODAT IS EL FOGOD VESZÍTENI!&lt;/b&gt;</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Biztosan kódolni akarod a tárcát?</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>FONTOS: A pénztárca-fájl korábbi mentéseit ezzel az új, titkosított pénztárca-fájllal kell helyettesíteni. Biztonsági okokból a pénztárca-fájl korábbi titkosítás nélküli mentései haszontalanná válnak amint elkezdi használni az új, titkosított pénztárcát.</translation> </message> <message> <location line="+100"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation type="unfinished"/> </message> <message> <location line="-130"/> <location line="+58"/> <source>Wallet encrypted</source> <translation>Tárca kódolva</translation> </message> <message> <location line="-56"/> <source>CandyCoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your CandyCoins from being stolen by malware infecting your computer.</source> <translation>CandyCoin will close now to finish the encryption process. Ne feledd, hogy a tárca titkosítása sem nyújt teljes védelmet az adathalász programok fertőzésével szemben.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+42"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>Tárca kódolása sikertelen.</translation> </message> <message> <location line="-54"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Tárca kódolása belső hiba miatt sikertelen. A tárcád nem lett kódolva.</translation> </message> <message> <location line="+7"/> <location line="+48"/> <source>The supplied passphrases do not match.</source> <translation>A megadott jelszavak nem egyeznek.</translation> </message> <message> <location line="-37"/> <source>Wallet unlock failed</source> <translation>Tárca megnyitása sikertelen</translation> </message> <message> <location line="+1"/> <location line="+11"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Hibás jelszó.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>Dekódolás sikertelen.</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>Jelszó megváltoztatva.</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+233"/> <source>Sign &amp;message...</source> <translation>Üzenet aláírása...</translation> </message> <message> <location line="+280"/> <source>Synchronizing with network...</source> <translation>Szinkronizálás a hálózattal...</translation> </message> <message> <location line="-349"/> <source>&amp;Overview</source> <translation>&amp;Áttekintés</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>Tárca általános áttekintése</translation> </message> <message> <location line="+20"/> <source>&amp;Transactions</source> <translation>&amp;Tranzakciók</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Tranzakciótörténet megtekintése</translation> </message> <message> <location line="+7"/> <source>Edit the list of stored addresses and labels</source> <translation>Tárolt címek és címkék listájának szerkesztése</translation> </message> <message> <location line="-14"/> <source>Show the list of addresses for receiving payments</source> <translation>Kiizetést fogadó címek listája</translation> </message> <message> <location line="+31"/> <source>E&amp;xit</source> <translation>&amp;Kilépés</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Kilépés</translation> </message> <message> <location line="+4"/> <source>Show information about CandyCoin</source> <translation>Információk a CandyCoinról</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>A &amp;Qt-ról</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Információk a Qt ról</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Opciók...</translation> </message> <message> <location line="+6"/> <source>&amp;Encrypt Wallet...</source> <translation>Tárca &amp;kódolása...</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>&amp;Bisztonsági másolat készítése a Tárcáról</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>Jelszó &amp;megváltoztatása...</translation> </message> <message> <location line="+285"/> <source>Importing blocks from disk...</source> <translation>A blokkok importálása lemezről...</translation> </message> <message> <location line="+3"/> <source>Reindexing blocks on disk...</source> <translation>A blokkok lemezen történő ujraindexelése...</translation> </message> <message> <location line="-347"/> <source>Send coins to a CandyCoin address</source> <translation>Érmék küldése megadott címre</translation> </message> <message> <location line="+49"/> <source>Modify configuration options for CandyCoin</source> <translation>CandyCoin konfigurációs opciók</translation> </message> <message> <location line="+9"/> <source>Backup wallet to another location</source> <translation>Biztonsági másolat készítése a Tárcáról egy másik helyre</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Tárcakódoló jelszó megváltoztatása</translation> </message> <message> <location line="+6"/> <source>&amp;Debug window</source> <translation>&amp;Debug ablak</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>Hibakereső és diagnosztikai konzol megnyitása</translation> </message> <message> <location line="-4"/> <source>&amp;Verify message...</source> <translation>Üzenet &amp;valódiságának ellenőrzése</translation> </message> <message> <location line="-165"/> <location line="+530"/> <source>CandyCoin</source> <translation>CandyCoin</translation> </message> <message> <location line="-530"/> <source>Wallet</source> <translation>Tárca</translation> </message> <message> <location line="+101"/> <source>&amp;Send</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Receive</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>&amp;Addresses</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>&amp;About CandyCoin</source> <translation>&amp;A CandyCoinról</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show or hide the main Window</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Encrypt the private keys that belong to your wallet</source> <translation>A pénztárcájához tartozó privát kulcsok titkosítása</translation> </message> <message> <location line="+7"/> <source>Sign messages with your CandyCoin addresses to prove you own them</source> <translation>Üzenet aláírása a CandyCoin címmel, amivel bizonyítja, hogy a cím az ön tulajdona.</translation> </message> <message> <location line="+2"/> <source>Verify messages to ensure they were signed with specified CandyCoin addresses</source> <translation>Annak ellenőrzése, hogy az üzenetek valóban a megjelölt CandyCoin címekkel vannak-e alaírva</translation> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation>&amp;Fájl</translation> </message> <message> <location line="+7"/> <source>&amp;Settings</source> <translation>&amp;Beállítások</translation> </message> <message> <location line="+6"/> <source>&amp;Help</source> <translation>&amp;Súgó</translation> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation>Fül eszköztár</translation> </message> <message> <location line="+17"/> <location line="+10"/> <source>[testnet]</source> <translation>[teszthálózat]</translation> </message> <message> <location line="+47"/> <source>CandyCoin client</source> <translation>CandyCoin kliens</translation> </message> <message numerus="yes"> <location line="+141"/> <source>%n active connection(s) to CandyCoin network</source> <translation><numerusform>%n aktív kapcsolat a CandyCoin-hálózattal</numerusform><numerusform>%n aktív kapcsolat a CandyCoin-hálózattal</numerusform></translation> </message> <message> <location line="+22"/> <source>No block source available...</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Processed %1 blocks of transaction history.</source> <translation>A tranzakció-történet %1 blokkja feldolgozva.</translation> </message> <message numerus="yes"> <location line="+20"/> <source>%n hour(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n week(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+4"/> <source>%1 behind</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Last received block was generated %1 ago.</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>Error</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+70"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation type="unfinished"/> </message> <message> <location line="-140"/> <source>Up to date</source> <translation>Naprakész</translation> </message> <message> <location line="+31"/> <source>Catching up...</source> <translation>Frissítés...</translation> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation>Tranzakciós díj jóváhagyása</translation> </message> <message> <location line="+8"/> <source>Sent transaction</source> <translation>Tranzakció elküldve.</translation> </message> <message> <location line="+0"/> <source>Incoming transaction</source> <translation>Beérkező tranzakció</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Dátum: %1 Összeg: %2 Típus: %3 Cím: %4 </translation> </message> <message> <location line="+33"/> <location line="+23"/> <source>URI handling</source> <translation type="unfinished"/> </message> <message> <location line="-23"/> <location line="+23"/> <source>URI can not be parsed! This can be caused by an invalid CandyCoin address or malformed URI parameters.</source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>Tárca &lt;b&gt;kódolva&lt;/b&gt; és jelenleg &lt;b&gt;nyitva&lt;/b&gt;.</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>Tárca &lt;b&gt;kódolva&lt;/b&gt; és jelenleg &lt;b&gt;zárva&lt;/b&gt;.</translation> </message> <message> <location filename="../bitcoin.cpp" line="+111"/> <source>A fatal error occurred. CandyCoin can no longer continue safely and will quit.</source> <translation type="unfinished"/> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+104"/> <source>Network Alert</source> <translation type="unfinished"/> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Cím szerkesztése</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>Cím&amp;ke</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>A címhez tartozó címke</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Cím</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>Az ehhez a címjegyzék-bejegyzéshez tartozó cím. Ez csak a küldő címeknél módosítható.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation>Új fogadó cím</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Új küldő cím</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Fogadó cím szerkesztése</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Küldő cím szerkesztése</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>A megadott &quot;%1&quot; cím már szerepel a címjegyzékben.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid CandyCoin address.</source> <translation>A megadott &quot;%1&quot; cím nem egy érvényes CandyCoin-cím.</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>Tárca feloldása sikertelen</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Új kulcs generálása sikertelen</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+424"/> <location line="+12"/> <source>CandyCoin-Qt</source> <translation type="unfinished"/> </message> <message> <location line="-12"/> <source>version</source> <translation>verzió</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>Használat:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation>parancssoros opciók</translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>UI opciók</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>Indítás lekicsinyítve </translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation type="unfinished"/> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Opciók</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>&amp;Fő</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>Tranzakciós &amp;díj fizetése</translation> </message> <message> <location line="+31"/> <source>Automatically start CandyCoin after logging in to the system.</source> <translation>Induljon el a CandyCoin a számítógép bekapcsolásakor</translation> </message> <message> <location line="+3"/> <source>&amp;Start CandyCoin on system login</source> <translation>&amp;Induljon el a számítógép bekapcsolásakor</translation> </message> <message> <location line="+35"/> <source>Reset all client options to default.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Reset Options</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>&amp;Network</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Automatically open the CandyCoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>A CandyCoin-kliens portjának automatikus megnyitása a routeren. Ez csak akkor működik, ha a routered támogatja az UPnP-t és az engedélyezve is van rajta.</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>&amp;UPnP port-feltérképezés</translation> </message> <message> <location line="+7"/> <source>Connect to the CandyCoin network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation>SOCKS proxyn keresztüli csatlakozás a CandyCoin hálózatához (pl. Tor-on keresztüli csatlakozás esetén)</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation>&amp;Csatlakozás SOCKS proxyn keresztül:</translation> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>Proxy IP címe (pl.: 127.0.0.1)</translation> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Proxy portja (pl.: 9050)</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation type="unfinished"/> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Kicsinyítés után csak eszköztár-ikont mutass</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;Kicsinyítés a tálcára az eszköztár helyett</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Az alkalmazásból való kilépés helyett az eszköztárba kicsinyíti az alkalmazást az ablak bezárásakor. Ez esetben az alkalmazás csak a Kilépés menüponttal zárható be.</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>K&amp;icsinyítés záráskor</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>&amp;Megjelenítés</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting CandyCoin.</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>&amp;Mértékegység:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Válaszd ki az interfészen és érmék küldésekor megjelenítendő alapértelmezett alegységet.</translation> </message> <message> <location line="+9"/> <source>Whether to show CandyCoin addresses in the transaction list or not.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>&amp;Címek megjelenítése a tranzakciólistában</translation> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>Megszakítás</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation>Alkalmazás</translation> </message> <message> <location filename="../optionsdialog.cpp" line="+53"/> <source>default</source> <translation>alapértelmezett</translation> </message> <message> <location line="+130"/> <source>Confirm options reset</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Some settings may require a client restart to take effect.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Do you want to proceed?</source> <translation type="unfinished"/> </message> <message> <location line="+42"/> <location line="+9"/> <source>Warning</source> <translation>Figyelem</translation> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting CandyCoin.</source> <translation>Ez a beállítás a CandyCoin ujraindítása után lép érvénybe.</translation> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation type="unfinished"/> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Űrlap</translation> </message> <message> <location line="+50"/> <location line="+166"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the CandyCoin network after a connection is established, but this process has not completed yet.</source> <translation>A kijelzett információ lehet, hogy elavult. A pénztárcája automatikusan szinkronizálja magát a CandyCoin hálózattal miután a kapcsolat létrejön, de ez e folyamat még nem fejeződött be.</translation> </message> <message> <location line="-124"/> <source>Balance:</source> <translation>Egyenleg:</translation> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>Megerősítetlen:</translation> </message> <message> <location line="-78"/> <source>Wallet</source> <translation>Tárca</translation> </message> <message> <location line="+107"/> <source>Immature:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation type="unfinished"/> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Legutóbbi tranzakciók&lt;/b&gt;</translation> </message> <message> <location line="-101"/> <source>Your current balance</source> <translation>Aktuális egyenleged</translation> </message> <message> <location line="+29"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>Még megerősítésre váró, a jelenlegi egyenlegbe be nem számított tranzakciók</translation> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation>Nincs szinkronban.</translation> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start candycoin: click-to-pay handler</source> <translation type="unfinished"/> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation>QR kód párbeszédablak</translation> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>Fizetés kérése</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>Összeg:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>Címke:</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>Üzenet:</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>Mentés má&amp;sként</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation>Hiba lépett fel az URI QR kóddá alakításakor</translation> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation>A megadott összeg nem érvényes. Kérem ellenőrizze.</translation> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>A keletkezett URI túl hosszú, próbálja meg csökkenteni a cimkeszöveg / üzenet méretét.</translation> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation>QR kód mentése</translation> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation>PNG Képfájlok (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Kliens néve</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+339"/> <source>N/A</source> <translation>Nem elérhető</translation> </message> <message> <location line="-217"/> <source>Client version</source> <translation>Kliens verzió</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>&amp;Információ</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation type="unfinished"/> </message> <message> <location line="+49"/> <source>Startup time</source> <translation>Bekapcsolás ideje</translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>Hálózat</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>Kapcsolatok száma</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation>Teszthálózaton</translation> </message> <message> <location line="+23"/> <source>Block chain</source> <translation>Blokklánc</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Aktuális blokkok száma</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation>Becsült összes blokk</translation> </message> <message> <location line="+23"/> <source>Last block time</source> <translation>Utolsó blokk ideje</translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>&amp;Megnyitás</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Show the CandyCoin-Qt help message to get a list with possible CandyCoin command-line options.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation type="unfinished"/> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>&amp;Konzol</translation> </message> <message> <location line="-260"/> <source>Build date</source> <translation>Fordítás dátuma</translation> </message> <message> <location line="-104"/> <source>CandyCoin - Debug window</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>CandyCoin Core</source> <translation type="unfinished"/> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Open the CandyCoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation type="unfinished"/> </message> <message> <location line="+102"/> <source>Clear console</source> <translation>Konzol törlése</translation> </message> <message> <location filename="../rpcconsole.cpp" line="-30"/> <source>Welcome to the CandyCoin RPC console.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Navigálhat a fel és le nyilakkal, és &lt;b&gt;Ctrl-L&lt;/b&gt; -vel törölheti a képernyőt.</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation type="unfinished"/> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+124"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Érmék küldése</translation> </message> <message> <location line="+50"/> <source>Send to multiple recipients at once</source> <translation>Küldés több címzettnek egyszerre</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>&amp;Címzett hozzáadása</translation> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation>Az összes tranzakciós mező eltávolítása</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>Mindent &amp;töröl</translation> </message> <message> <location line="+22"/> <source>Balance:</source> <translation>Egyenleg:</translation> </message> <message> <location line="+10"/> <source>123.456 BTC</source> <translation>123.456 BTC</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Küldés megerősítése</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>&amp;Küldés</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-59"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; %2-re (%3)</translation> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>Küldés megerősítése</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>Valóban el akarsz küldeni %1-t?</translation> </message> <message> <location line="+0"/> <source> and </source> <translation> és</translation> </message> <message> <location line="+23"/> <source>The recipient address is not valid, please recheck.</source> <translation>A címzett címe érvénytelen, kérlek, ellenőrizd.</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>A fizetendő összegnek nagyobbnak kell lennie 0-nál.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>Nincs ennyi candycoin az egyenlegeden.</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>A küldeni kívánt összeg és a %1 tranzakciós díj együtt meghaladja az egyenlegeden rendelkezésedre álló összeget.</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Többször szerepel ugyanaz a cím. Egy küldési műveletben egy címre csak egyszer lehet küldeni.</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Hiba: a tranzakciót elutasították. Ezt az okozhatja, ha már elköltöttél valamennyi érmét a tárcádból például ha a wallet.dat-od egy másolatát használtad, és így az elköltés csak abban lett jelölve, de itt nem.</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>Űrlap</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>Összeg:</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>Címzett:</translation> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. FNHGjAavM3RPqFZ4h3YEqT4uoNxENoY7MX)</source> <translation type="unfinished"/> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation>Milyen címkével kerüljön be ez a cím a címtáradba? </translation> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation>Címke:</translation> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation>Válassz egy címet a címjegyzékből</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Cím beillesztése a vágólapról</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>Címzett eltávolítása</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a CandyCoin address (e.g. FNHGjAavM3RPqFZ4h3YEqT4uoNxENoY7MX)</source> <translation>Adj meg egy CandyCoin-címet (pl.: FNHGjAavM3RPqFZ4h3YEqT4uoNxENoY7MX )</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>&amp;Sign Message</source> <translation>Üzenet aláírása...</translation> </message> <message> <location line="+6"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Aláírhat a címeivel üzeneteket, amivel bizonyíthatja, hogy a címek az önéi. Vigyázzon, hogy ne írjon alá semmi félreérthetőt, mivel a phising támadásokkal megpróbálhatják becsapni, hogy az azonosságát átírja másokra. Csak olyan részletes állításokat írjon alá, amivel egyetért.</translation> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. FNHGjAavM3RPqFZ4h3YEqT4uoNxENoY7MX)</source> <translation>Adj meg egy CandyCoin-címet (pl.: FNHGjAavM3RPqFZ4h3YEqT4uoNxENoY7MX )</translation> </message> <message> <location line="+10"/> <location line="+213"/> <source>Choose an address from the address book</source> <translation>Válassz egy címet a címjegyzékből</translation> </message> <message> <location line="-203"/> <location line="+213"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="-203"/> <source>Paste address from clipboard</source> <translation>Cím beillesztése a vágólapról</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>Ide írja az aláírandó üzenetet</translation> </message> <message> <location line="+7"/> <source>Signature</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Copy the current signature to the system clipboard</source> <translation>A jelenleg kiválasztott aláírás másolása a rendszer-vágólapra</translation> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this CandyCoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Reset all sign message fields</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>Mindent &amp;töröl</translation> </message> <message> <location line="-87"/> <source>&amp;Verify Message</source> <translation>Üzenet ellenőrzése</translation> </message> <message> <location line="+6"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>Írja be az aláírás címét, az üzenetet (ügyelve arra, hogy az új-sor, szóköz, tab, stb. karaktereket is pontosan) és az aláírást az üzenet ellenőrzéséhez. Ügyeljen arra, ne gondoljon többet az aláírásról, mint amennyi az aláírt szövegben ténylegesen áll, hogy elkerülje a köztes-ember (man-in-the-middle) támadást.</translation> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. FNHGjAavM3RPqFZ4h3YEqT4uoNxENoY7MX)</source> <translation>Adj meg egy CandyCoin-címet (pl.: FNHGjAavM3RPqFZ4h3YEqT4uoNxENoY7MX )</translation> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified CandyCoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Verify &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Reset all verify message fields</source> <translation type="unfinished"/> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a CandyCoin address (e.g. FNHGjAavM3RPqFZ4h3YEqT4uoNxENoY7MX)</source> <translation>Adj meg egy CandyCoin-címet (pl.: FNHGjAavM3RPqFZ4h3YEqT4uoNxENoY7MX )</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Enter CandyCoin signature</source> <translation>Adja meg a CandyCoin aláírást</translation> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation>A megadott cím nem érvényes.</translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>Ellenőrizze a címet és próbálja meg újra.</translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation type="unfinished"/> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation type="unfinished"/> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation type="unfinished"/> </message> </context> <context> <name>SplashScreen</name> <message> <location filename="../splashscreen.cpp" line="+22"/> <source>The CandyCoin developers</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>[testnet]</source> <translation>[teszthálózat]</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+20"/> <source>Open until %1</source> <translation>Megnyitva %1-ig</translation> </message> <message> <location line="+6"/> <source>%1/offline</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/megerősítetlen</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 megerősítés</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>Állapot</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Dátum</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Generated</source> <translation>Legenerálva</translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>Űrlap</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>Címzett</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation type="unfinished"/> </message> <message> <location line="-2"/> <source>label</source> <translation>címke</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation>Jóváírás</translation> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>elutasítva</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>Terhelés</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>Tranzakciós díj</translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>Nettó összeg</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>Üzenet</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>Megjegyzés</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>A frissen generált érméket csak 120 blokkal később tudod elkölteni. Ez a blokk nyomban szétküldésre került a hálózatba, amint legeneráltad, hogy hozzáadhassák a blokklánchoz. Ha nem kerül be a láncba, úgy az állapota &quot;elutasítva&quot;-ra módosul, és nem költheted el az érméket. Ez akkor következhet be időnként, ha egy másik csomópont mindössze néhány másodperc különbséggel generált le egy blokkot a tiédhez képest.</translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>Tranzakció</translation> </message> <message> <location line="+3"/> <source>Inputs</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Amount</source> <translation>Összeg</translation> </message> <message> <location line="+1"/> <source>true</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>false</source> <translation type="unfinished"/> </message> <message> <location line="-209"/> <source>, has not been successfully broadcast yet</source> <translation>, még nem sikerült elküldeni.</translation> </message> <message numerus="yes"> <location line="-35"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+70"/> <source>unknown</source> <translation>ismeretlen</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Tranzakció részletei</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Ez a mező a tranzakció részleteit mutatja</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+225"/> <source>Date</source> <translation>Dátum</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Típus</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Cím</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Összeg</translation> </message> <message numerus="yes"> <location line="+57"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+3"/> <source>Open until %1</source> <translation>%1-ig megnyitva</translation> </message> <message> <location line="+3"/> <source>Offline (%1 confirmations)</source> <translation>Offline (%1 megerősítés)</translation> </message> <message> <location line="+3"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation>Megerősítetlen (%1 %2 megerősítésből)</translation> </message> <message> <location line="+3"/> <source>Confirmed (%1 confirmations)</source> <translation>Megerősítve (%1 megerősítés)</translation> </message> <message numerus="yes"> <location line="+8"/> <source>Mined balance will be available when it matures in %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+5"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Ezt a blokkot egyetlen másik csomópont sem kapta meg, így valószínűleg nem lesz elfogadva!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Legenerálva, de még el nem fogadva.</translation> </message> <message> <location line="+43"/> <source>Received with</source> <translation>Erre a címre</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Erről az</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Erre a címre</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Magadnak kifizetve</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Kibányászva</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(nincs)</translation> </message> <message> <location line="+199"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Tranzakció állapota. Húzd ide a kurzort, hogy lásd a megerősítések számát.</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Tranzakció fogadásának dátuma és időpontja.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Tranzakció típusa.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>A tranzakció címzettjének címe.</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Az egyenleghez jóváírt vagy ráterhelt összeg.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+52"/> <location line="+16"/> <source>All</source> <translation>Mind</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Mai</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>Ezen a héten</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>Ebben a hónapban</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>Múlt hónapban</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Ebben az évben</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Tartomány ...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Erre a címre</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Erre a címre</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>Magadnak</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Kibányászva</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Más</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Írd be a keresendő címet vagy címkét</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Minimális összeg</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Cím másolása</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Címke másolása</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Összeg másolása</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Címke szerkesztése</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>Tranzakciós részletek megjelenítése</translation> </message> <message> <location line="+139"/> <source>Export Transaction Data</source> <translation>Tranzakció adatainak exportálása</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Vesszővel elválasztott fájl (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Megerősítve</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Dátum</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Típus</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Címke</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Cím</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Összeg</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>Azonosító</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>Hiba lépett fel exportálás közben</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>%1 fájlba való kiírás sikertelen.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Tartomány:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>meddig</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+193"/> <source>Send Coins</source> <translation>Érmék küldése</translation> </message> </context> <context> <name>WalletView</name> <message> <location filename="../walletview.cpp" line="+42"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Export the data in the current tab to a file</source> <translation>Jelenlegi nézet exportálása fájlba</translation> </message> <message> <location line="+193"/> <source>Backup Wallet</source> <translation>Biztonsági másolat készítése a Tárcáról</translation> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation>Tárca fájl (*.dat)</translation> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation>Biztonsági másolat készítése sikertelen</translation> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation>Hiba lépett fel a Tárca másik helyre való mentése közben</translation> </message> <message> <location line="+4"/> <source>Backup Successful</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The wallet data was successfully saved to the new location.</source> <translation type="unfinished"/> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+94"/> <source>CandyCoin version</source> <translation>CandyCoin verzió</translation> </message> <message> <location line="+102"/> <source>Usage:</source> <translation>Használat:</translation> </message> <message> <location line="-29"/> <source>Send command to -server or candycoind</source> <translation>Parancs küldése a -serverhez vagy a candycoindhez </translation> </message> <message> <location line="-23"/> <source>List commands</source> <translation>Parancsok kilistázása </translation> </message> <message> <location line="-12"/> <source>Get help for a command</source> <translation>Segítség egy parancsról </translation> </message> <message> <location line="+24"/> <source>Options:</source> <translation>Opciók </translation> </message> <message> <location line="+24"/> <source>Specify configuration file (default: candycoin.conf)</source> <translation>Konfigurációs fájl (alapértelmezett: candycoin.conf) </translation> </message> <message> <location line="+3"/> <source>Specify pid file (default: candycoind.pid)</source> <translation>pid-fájl (alapértelmezett: candycoind.pid) </translation> </message> <message><|fim▁hole|></translation> </message> <message> <location line="-9"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Az adatbázis gyorsítótár mérete megabájtban (alapértelmezés: 25)</translation> </message> <message> <location line="-28"/> <source>Listen for connections on &lt;port&gt; (default: 7912 or testnet: 17912)</source> <translation>Csatlakozásokhoz figyelendő &lt;port&gt; (alapértelmezett: 7912 or testnet: 17912)</translation> </message> <message> <location line="+5"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Maximálisan &lt;n&gt; számú kapcsolat fenntartása a peerekkel (alapértelmezés: 125)</translation> </message> <message> <location line="-48"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Kapcsolódás egy csomóponthoz a peerek címeinek megszerzése miatt, majd szétkapcsolás</translation> </message> <message> <location line="+82"/> <source>Specify your own public address</source> <translation>Adja meg az Ön saját nyilvános címét</translation> </message> <message> <location line="+3"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Helytelenül viselkedő peerek leválasztási határértéke (alapértelmezés: 100)</translation> </message> <message> <location line="-134"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Helytelenül viselkedő peerek kizárási ideje másodpercben (alapértelmezés: 86400)</translation> </message> <message> <location line="-29"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 7913 or testnet: 17913)</source> <translation>JSON-RPC csatlakozásokhoz figyelendő &lt;port&gt; (alapértelmezett: 7913 or testnet: 17913)</translation> </message> <message> <location line="+37"/> <source>Accept command line and JSON-RPC commands</source> <translation>Parancssoros és JSON-RPC parancsok elfogadása </translation> </message> <message> <location line="+76"/> <source>Run in the background as a daemon and accept commands</source> <translation>Háttérben futtatás daemonként és parancsok elfogadása </translation> </message> <message> <location line="+37"/> <source>Use the test network</source> <translation>Teszthálózat használata </translation> </message> <message> <location line="-112"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation type="unfinished"/> </message> <message> <location line="-80"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=candycoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;CandyCoin Alert&quot; [email protected] </source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. CandyCoin is probably already running.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Figyelem: a -paytxfee nagyon magas. Ennyi tranzakciós díjat fogsz fizetni, ha elküldöd a tranzakciót.</translation> </message> <message> <location line="+3"/> <source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong CandyCoin will not work properly.</source> <translation>Figyelem: Ellenőrizd, hogy helyesen van-e beállítva a gépeden a dátum és az idő. A CandyCoin nem fog megfelelően működni, ha rosszul van beállítvaaz órád.</translation> </message> <message> <location line="+3"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Block creation options:</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Connect only to the specified node(s)</source> <translation>Csatlakozás csak a megadott csomóponthoz</translation> </message> <message> <location line="+3"/> <source>Corrupted block database detected</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Do you want to rebuild the block database now?</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error initializing block database</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error initializing wallet database environment %s!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error loading block database</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error opening block database</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error: Disk space is low!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error: system error: </source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to read block info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to read block</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to sync block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write file info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write to coin database</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write transaction index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write undo data</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Generate coins (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-4, default: 3)</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Not enough file descriptors available.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Rebuild block chain index from current blk000??.dat files</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation type="unfinished"/> </message> <message> <location line="+26"/> <source>Verifying blocks...</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Verifying wallet...</source> <translation type="unfinished"/> </message> <message> <location line="-69"/> <source>Imports blocks from external blk000??.dat file</source> <translation type="unfinished"/> </message> <message> <location line="-76"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+77"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation>Érvénytelen -tor cím: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Maintain a full transaction index (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation>Csak blokklánccal egyező beépített ellenőrző pontok elfogadása (alapértelmezés: 1)</translation> </message> <message> <location line="+1"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Prepend debug output with timestamp</source> <translation>Időbélyeges hibakeresési kimenet hozzáadása az elejéhez</translation> </message> <message> <location line="+5"/> <source>SSL options: (see the CandyCoin Wiki for SSL setup instructions)</source> <translation>SSL-opciók: (lásd a CandyCoin Wiki SSL-beállítási instrukcióit)</translation> </message> <message> <location line="+1"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>trace/debug információ küldése a konzolra a debog.log fájl helyett</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation>trace/debug információ küldése a debuggerre</translation> </message> <message> <location line="+5"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Signing transaction failed</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Csatlakozás időkerete milliszekundumban (alapértelmezett: 5000)</translation> </message> <message> <location line="+4"/> <source>System error: </source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Transaction amount too small</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction amounts must be positive</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction too large</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>UPnP-használat engedélyezése a figyelő port feltérképezésénél (default: 0)</translation> </message> <message> <location line="+1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>UPnP-használat engedélyezése a figyelő port feltérképezésénél (default: 1 when listening)</translation> </message> <message> <location line="+1"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Username for JSON-RPC connections</source> <translation>Felhasználói név JSON-RPC csatlakozásokhoz </translation> </message> <message> <location line="+4"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>You need to rebuild the databases using -reindex to change -txindex</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>wallet.dat corrupt, salvage failed</source> <translation type="unfinished"/> </message> <message> <location line="-50"/> <source>Password for JSON-RPC connections</source> <translation>Jelszó JSON-RPC csatlakozásokhoz </translation> </message> <message> <location line="-67"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>JSON-RPC csatlakozások engedélyezése meghatározott IP-címről </translation> </message> <message> <location line="+76"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Parancsok küldése &lt;ip&gt; címen működő csomóponthoz (alapértelmezett: 127.0.0.1) </translation> </message> <message> <location line="-120"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Parancs, amit akkor hajt végre, amikor a legjobb blokk megváltozik (%s a cmd-ban lecserélődik a blokk hash-re)</translation> </message> <message> <location line="+147"/> <source>Upgrade wallet to latest format</source> <translation>A Tárca frissítése a legfrissebb formátumra</translation> </message> <message> <location line="-21"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Kulcskarika mérete &lt;n&gt; (alapértelmezett: 100) </translation> </message> <message> <location line="-12"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Blokklánc újraszkennelése hiányzó tárca-tranzakciók után </translation> </message> <message> <location line="+35"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>OpenSSL (https) használata JSON-RPC csatalkozásokhoz </translation> </message> <message> <location line="-26"/> <source>Server certificate file (default: server.cert)</source> <translation>Szervertanúsítvány-fájl (alapértelmezett: server.cert) </translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Szerver titkos kulcsa (alapértelmezett: server.pem) </translation> </message> <message> <location line="-151"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>Elfogadható rejtjelkulcsok (alapértelmezett: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH ) </translation> </message> <message> <location line="+165"/> <source>This help message</source> <translation>Ez a súgó-üzenet </translation> </message> <message> <location line="+6"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>A %s nem elérhető ezen a gépen (bind returned error %d, %s)</translation> </message> <message> <location line="-91"/> <source>Connect through socks proxy</source> <translation>Csatlakozás SOCKS proxyn keresztül</translation> </message> <message> <location line="-10"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>DNS-kikeresés engedélyezése az addnode-nál és a connect-nél</translation> </message> <message> <location line="+55"/> <source>Loading addresses...</source> <translation>Címek betöltése...</translation> </message> <message> <location line="-35"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Hiba a wallet.dat betöltése közben: meghibásodott tárca</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat: Wallet requires newer version of CandyCoin</source> <translation>Hiba a wallet.dat betöltése közben: ehhez a tárcához újabb verziójú CandyCoin-kliens szükséges</translation> </message> <message> <location line="+93"/> <source>Wallet needed to be rewritten: restart CandyCoin to complete</source> <translation>A Tárca újraírása szükséges: Indítsa újra a teljesen a CandyCoin-t</translation> </message> <message> <location line="-95"/> <source>Error loading wallet.dat</source> <translation>Hiba az wallet.dat betöltése közben</translation> </message> <message> <location line="+28"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Érvénytelen -proxy cím: &apos;%s&apos;</translation> </message> <message> <location line="+56"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>Ismeretlen hálózat lett megadva -onlynet: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation>Ismeretlen -socks proxy kérése: %i</translation> </message> <message> <location line="-96"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+44"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Étvénytelen -paytxfee=&lt;összeg&gt; összeg: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount</source> <translation>Étvénytelen összeg</translation> </message> <message> <location line="-6"/> <source>Insufficient funds</source> <translation>Nincs elég candycoinod.</translation> </message> <message> <location line="+10"/> <source>Loading block index...</source> <translation>Blokkindex betöltése...</translation> </message> <message> <location line="-57"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Elérendő csomópont megadása and attempt to keep the connection open</translation> </message> <message> <location line="-25"/> <source>Unable to bind to %s on this computer. CandyCoin is probably already running.</source> <translation>A %s nem elérhető ezen a gépen. A CandyCoin valószínűleg fut már.</translation> </message> <message> <location line="+64"/> <source>Fee per KB to add to transactions you send</source> <translation>kB-onként felajánlandó díj az általad küldött tranzakciókhoz</translation> </message> <message> <location line="+19"/> <source>Loading wallet...</source> <translation>Tárca betöltése...</translation> </message> <message> <location line="-52"/> <source>Cannot downgrade wallet</source> <translation>Nem sikerült a Tárca visszaállítása a korábbi verzióra</translation> </message> <message> <location line="+3"/> <source>Cannot write default address</source> <translation>Nem sikerült az alapértelmezett címet írni.</translation> </message> <message> <location line="+64"/> <source>Rescanning...</source> <translation>Újraszkennelés...</translation> </message> <message> <location line="-57"/> <source>Done loading</source> <translation>Betöltés befejezve.</translation> </message> <message> <location line="+82"/> <source>To use the %s option</source> <translation>Használd a %s opciót</translation> </message> <message> <location line="-74"/> <source>Error</source> <translation>Hiba</translation> </message> <message> <location line="-31"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Be kell állítani rpcpassword=&lt;password&gt; a konfigurációs fájlban %s Ha a fájl nem létezik, hozd létre &apos;csak a felhasználó által olvasható&apos; fájl engedéllyel</translation> </message> </context> </TS><|fim▁end|>
<location line="-1"/> <source>Specify data directory</source> <translation>Adatkönyvtár
<|file_name|>codegen_common.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python ########################################################################## # # MTraceCheck # Copyright 2017 The Regents of the University of Michigan # Doowon Lee and Valeria Bertacco # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ########################################################################## # # This file should be called from codegen.py # #################################################################### # Data section #################################################################### def generate_data_section(dataName, memLocs, strideType): assert(memLocs <= 0x10000) #dataArray = [] #for i in range(memLocs): # data = [i & 0xFF, (i >> 8) & 0xFF, 0xFF, 0xFF] # dataArray += data ## Data contents will be initialized in test manager, so just create a placeholder if (strideType == 0): dataArray = [0xFF for i in range(memLocs * 4 * 1)] elif (strideType == 1): dataArray = [0xFF for i in range(memLocs * 4 * 4)] elif (strideType == 2): dataArray = [0xFF for i in range(memLocs * 4 * 16)] else: assert(False) dataFP = open(dataName, "w") dataFP.write(bytearray(dataArray)) dataFP.close() #################################################################### # BSS section (section to be written by test threads) #################################################################### def generate_bss_section(bssName, bssSize): #bssArray = [] #for i in range(bssSize): # bssArray += [0x00] #bssFP = open(bssName, "w") #bssFP.write(bytearray(bssArray)) #bssFP.close() # Faster code bssFP = open(bssName, "wb") bssFP.seek(bssSize-1) bssFP.write("\0") bssFP.close() #################################################################### # Test manager CPP file #################################################################### def generate_test_manager(cppName, headerName, threadList, bssBase, bssSizePerThread, signatureSize, regBitWidth, numExecutions, strideType): # See an example of cpp file at exp/160815_test_manager/test_manager.cpp # (This example is possibly outdated) wordTypeString = "uint%d_t" % regBitWidth cppString = "" cppString += "#include <stdio.h>\n" cppString += "#include <stdlib.h>\n" cppString += "#include <stdint.h>\n"<|fim▁hole|> cppString += "#include \"%s\"\n" % headerName for thread in threadList: cppString += "extern \"C\" void* thread%d_routine(void*);\n" % thread cppString += "volatile int thread_spawn_lock = 0;\n" cppString += "#ifdef EXEC_SYNC\n" cppString += "volatile int thread_exec_barrier0 = 0;\n" cppString += "volatile int thread_exec_barrier1 = 0;\n" cppString += "volatile int thread_exec_barrier_ptr = 0;\n" cppString += "#endif\n" cppString += "int main()\n" cppString += "{\n" cppString += " int pthread_return;\n" cppString += " int numThreads = %d;\n" % len(threadList) cppString += " // Test BSS section initialization\n" cppString += " %s *bss_address = (%s *) TEST_BSS_SECTION;\n" % (wordTypeString, wordTypeString) cppString += " for (int i = 0; i < numThreads * TEST_BSS_SIZE_PER_THREAD; i += sizeof(%s)) {\n" % (wordTypeString) cppString += " *(bss_address++) = 0;\n" cppString += " }\n" cppString += " // Test data section initialization\n" cppString += " uint32_t *data_address= (uint32_t *) TEST_DATA_SECTION;\n" cppString += " for (int i = 0; i < NUM_SHARED_DATA; i++) {\n" cppString += " *data_address = (uint32_t) (0xFFFF0000 | i);\n" if (strideType == 0): cppString += " data_address++; // strideType = 0\n" elif (strideType == 1): cppString += " data_address+=4; // strideType = 1\n" elif (strideType == 2): cppString += " data_address+=16; // strideType = 2\n" else: assert(False) cppString += " }\n" cppString += " pthread_t* threads = (pthread_t *) malloc(sizeof(pthread_t) * numThreads);\n" for threadIndex in range(len(threadList)): cppString += " pthread_return = pthread_create(&threads[%d], NULL, thread%d_routine, NULL);\n" % (threadIndex, threadList[threadIndex]) cppString += " for (int t = 0; t < numThreads; t++)\n" cppString += " pthread_return = pthread_join(threads[t], NULL);\n" cppString += " std::map<std::vector<%s>, int> signatureMap;\n" % (wordTypeString) cppString += " std::vector<%s> resultVector;\n" % (wordTypeString) cppString += " %s *signature = (%s *) TEST_BSS_SECTION;\n" % (wordTypeString, wordTypeString) cppString += " for (int i = 0; i < EXECUTION_COUNT; i++) {\n" cppString += " resultVector.clear();\n" #cppString += "#ifndef NO_PRINT\n" cppString += "#if 0\n" cppString += " printf(\"%8d:\", i);\n" cppString += "#endif\n" cppString += " for (int t = 0; t < numThreads; t++) {\n" cppString += " for (int w = 0; w < SIGNATURE_SIZE_IN_WORD; w++) {\n" # NOTE: SIGNATURE WORD REORDERING #cppString += " for (int w = SIGNATURE_SIZE_IN_WORD - 1; w >= 0; w--) {\n" #cppString += " for (int t = 0; t < numThreads; t++) {\n" cppString += " %s address = (%s) signature + t * TEST_BSS_SIZE_PER_THREAD + w * sizeof(%s);\n" % (wordTypeString, wordTypeString, wordTypeString) cppString += " %s result = (%s)*(%s*)address;\n" % (wordTypeString, wordTypeString, wordTypeString) cppString += " resultVector.push_back(result);\n" #cppString += "#ifndef NO_PRINT\n" cppString += "#if 0\n" cppString += " printf(\" 0x%%0%dlx\", result);\n" % (regBitWidth / 8 * 2) #cppString += " printf(\" 0x%%lx 0x%%0%dlx\", address, result);\n" % signatureSize cppString += "#endif\n" cppString += " }\n" cppString += " }\n" cppString += " if (signatureMap.find(resultVector) == signatureMap.end())\n" cppString += " signatureMap[resultVector] = 1;\n" cppString += " else\n" cppString += " signatureMap[resultVector]++;\n" #cppString += "#ifndef NO_PRINT\n" cppString += "#if 0\n" cppString += " printf(\"\\n\");\n" cppString += "#endif\n" cppString += " signature += SIGNATURE_SIZE_IN_WORD;\n" cppString += " }\n" cppString += "#ifndef NO_PRINT\n" cppString += " for (std::map<std::vector<%s>, int>::iterator it = signatureMap.begin(); it != signatureMap.end(); it++) {\n" % (wordTypeString) cppString += " for (int i = 0; i < (it->first).size(); i++)\n" cppString += " printf(\" 0x%%0%dlx\", (it->first)[i]);\n" % (regBitWidth / 8 * 2) cppString += " printf(\": %d\\n\", it->second);\n" cppString += " }\n" cppString += "#endif\n" cppString += " printf(\"Number of unique results %lu out of %d\\n\", signatureMap.size(), EXECUTION_COUNT);\n" cppString += " fflush(stdout);\n" cppString += " return 0;\n" cppString += "}\n" cppFP = open(cppName, "w") cppFP.write(cppString) cppFP.close() def manager_common(headerName, dataName, dataBase, memLocs, bssName, bssBase, bssSizePerThread, cppName, threadList, signatureSize, regBitWidth, numExecutions, platform, strideType, verbosity): if (platform == "linuxpthread"): # Data section and BSS section generate_data_section(dataName, memLocs, strideType) if (verbosity > 0): print("Data binary file %s generated (base 0x%X, size %d)" % (dataName, dataBase, memLocs * 4)) bssSize = bssSizePerThread * len(threadList) generate_bss_section(bssName, bssSize) if (verbosity > 0): print("BSS binary file %s generated (base 0x%X, size %d)" % (bssName, bssBase, bssSize)) generate_test_manager(cppName, headerName, threadList, bssBase, bssSizePerThread, signatureSize, regBitWidth, numExecutions, strideType) if (verbosity > 0): print("Test manager %s generated" % (cppName)) #################################################################### # Compute signature size (maximum signature size across all threads) #################################################################### def compute_max_signature_size(intermediate, regBitWidth): maxSignatureFlushCount = 0 perthreadSignatureSizes = dict() for thread in intermediate: pathCount = 0 signatureFlushCount = 0 for intermediateCode in intermediate[thread]: if (intermediateCode["type"] == "profile"): # reg, targets if ((pathCount * len(intermediateCode["targets"])) > ((1 << regBitWidth) - 1)): pathCount = 0 signatureFlushCount += 1 if (pathCount == 0): pathCount = len(intermediateCode["targets"]) else: pathCount = pathCount * len(intermediateCode["targets"]) perthreadSignatureSizes[thread] = (signatureFlushCount + 1) * regBitWidth / 8 if (signatureFlushCount > maxSignatureFlushCount): maxSignatureFlushCount = signatureFlushCount # Number of bytes for each signature temp = (maxSignatureFlushCount + 1) * regBitWidth / 8 # Log2 ceiling function power2Boundary = 1 while (power2Boundary < temp): power2Boundary <<= 1 return [max(power2Boundary, regBitWidth / 8), perthreadSignatureSizes]<|fim▁end|>
cppString += "#include <pthread.h>\n" cppString += "#include <map>\n" cppString += "#include <vector>\n"
<|file_name|>sell.py<|end_file_name|><|fim▁begin|>""" 일반매도 """ import base64 import simplejson as json import hashlib import hmac import httplib2 import time ACCESS_KEY = '' SECRET_KEY = '' currency = 'btc-krw' def get_encoded_payload(payload): dumped_json = json.dumps(payload) encoded_json = base64.b64encode(dumped_json) return encoded_json <|fim▁hole|>def get_signature(encoded_payload, secret_key): signature = hmac.new(str(secret_key), str(encoded_payload), hashlib.sha512); return signature.hexdigest() def get_response(url, payload): encoded_payload = get_encoded_payload(payload) headers = { 'content-type': 'application/json', 'X-COINRAIL-PAYLOAD': encoded_payload, 'X-COINRAIL-SIGNATURE': get_signature(encoded_payload, SECRET_KEY) } http = httplib2.Http() response, content = http.request(url, 'POST', headers=headers, body=encoded_payload) return content def limit_sell(): url = 'https://api.coinrail.co.kr/order/limit/sell' payload = { "access_key": ACCESS_KEY, "currency": currency, "price" : 4900000, "qty" : 0.1, "timestamp" : int(round(time.time() * 1000)) } response = get_response(url, payload) print response content = json.loads(response) return content if __name__ == "__main__": print limit_sell()<|fim▁end|>
<|file_name|>StoreAppleReceiptParser.hpp<|end_file_name|><|fim▁begin|>// // StoreAppleReceiptParser.hpp // Pods // // Created by eps on 7/3/20. //<|fim▁hole|> #include <string> #include "ee/store/StoreFwd.hpp" namespace ee { namespace store { class AppleReceiptParser { public: AppleReceiptParser(); std::shared_ptr<AppleReceipt> parse(const std::string& receiptData); private: IMessageBridge& bridge_; }; } // namespace store } // namespace ee #endif /* EE_X_STORE_APPLE_RECEIPT_PARSER_HPP */<|fim▁end|>
#ifndef EE_X_STORE_APPLE_RECEIPT_PARSER_HPP #define EE_X_STORE_APPLE_RECEIPT_PARSER_HPP
<|file_name|>misc7.C<|end_file_name|><|fim▁begin|><|fim▁hole|> int main() { int i = 0; // Make sure build_unary_op correctly computes this. int *pi = &(++i); *pi = 4; if (i != 4) { printf ("FAIL\n"); return 1; } else printf ("PASS\n"); }<|fim▁end|>
// { dg-do run } // GROUPS passed miscellaneous extern "C" int printf (const char *, ...);
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! API for tracing applications and libraries. //! //! The `trace` module includes types for tracking the progression of a single //! request while it is handled by services that make up an application. A trace //! is a tree of [`Span`]s which are objects that represent the work being done //! by individual services or components involved in a request as it flows //! through a system. This module implements the OpenTelemetry [trace //! specification]. //! //! [trace specification]: https://github.com/open-telemetry/opentelemetry-specification/blob/v1.3.0/specification/trace/api.md //! //! ## Getting Started //! //! ``` //! use opentelemetry_api::{global, trace::{Span, Tracer, TracerProvider}}; //! //! fn my_library_function() { //! // Use the global tracer provider to get access to the user-specified //! // tracer configuration //! let tracer_provider = global::tracer_provider(); //! //! // Get a tracer for this library //! let tracer = tracer_provider.versioned_tracer( //! "my_name", //! Some(env!("CARGO_PKG_VERSION")), //! None //! ); //! //! // Create spans //! let mut span = tracer.start("doing_work"); //! //! // Do work... //! //! // End the span //! span.end(); //! } //! ``` //! //! ## Overview //! //! The tracing API consists of a three main traits: //! //! * [`TracerProvider`]s are the entry point of the API. They provide access to //! `Tracer`s. //! * [`Tracer`]s are types responsible for creating `Span`s. //! * [`Span`]s provide the API to trace an operation. //! //! ## Working with Async Runtimes //! //! Exporting spans often involves sending data over a network or performing //! other I/O tasks. OpenTelemetry allows you to schedule these tasks using //! whichever runtime you area already using such as [Tokio] or [async-std]. //! When using an async runtime it's best to use the batch span processor //! where the spans will be sent in batches as opposed to being sent once ended, //! which often ends up being more efficient. //! //! [Tokio]: https://tokio.rs //! [async-std]: https://async.rs //! //! ## Managing Active Spans //! //! Spans can be marked as "active" for a given [`Context`], and all newly //! created spans will automatically be children of the currently active span. //! //! The active span for a given thread can be managed via [`get_active_span`] //! and [`mark_span_as_active`]. //! //! [`Context`]: crate::Context //! //! ``` //! use opentelemetry_api::{global, trace::{self, Span, StatusCode, Tracer, TracerProvider}}; //! //! fn may_error(rand: f32) { //! if rand < 0.5 { //! // Get the currently active span to record additional attributes, //! // status, etc. //! trace::get_active_span(|span| { //! span.set_status(StatusCode::Error, "value too small"); //! }); //! }<|fim▁hole|>//! //! // Get a tracer //! let tracer = global::tracer("my_tracer"); //! //! // Create a span //! let span = tracer.start("parent_span"); //! //! // Mark the span as active //! let active = trace::mark_span_as_active(span); //! //! // Any span created here will be a child of `parent_span`... //! //! // Drop the guard and the span will no longer be active //! drop(active) //! ``` //! //! Additionally [`Tracer::in_span`] can be used as shorthand to simplify //! managing the parent context. //! //! ``` //! use opentelemetry_api::{global, trace::Tracer}; //! //! // Get a tracer //! let tracer = global::tracer("my_tracer"); //! //! // Use `in_span` to create a new span and mark it as the parent, dropping it //! // at the end of the block. //! tracer.in_span("parent_span", |cx| { //! // spans created here will be children of `parent_span` //! }); //! ``` //! //! #### Async active spans //! //! Async spans can be propagated with [`TraceContextExt`] and [`FutureExt`]. //! //! ``` //! use opentelemetry_api::{Context, global, trace::{FutureExt, TraceContextExt, Tracer}}; //! //! async fn some_work() { } //! //! // Get a tracer //! let tracer = global::tracer("my_tracer"); //! //! // Start a span //! let span = tracer.start("my_span"); //! //! // Perform some async work with this span as the currently active parent. //! some_work().with_context(Context::current_with_span(span)); //! ``` use futures_channel::{mpsc::TrySendError, oneshot::Canceled}; use std::borrow::Cow; use std::time; use thiserror::Error; mod context; pub mod noop; mod span; mod span_context; mod tracer; mod tracer_provider; pub use self::{ context::{get_active_span, mark_span_as_active, FutureExt, SpanRef, TraceContextExt}, span::{Span, SpanKind, StatusCode}, span_context::{SpanContext, SpanId, TraceFlags, TraceId, TraceState, TraceStateError}, tracer::{SamplingDecision, SamplingResult, SpanBuilder, Tracer}, tracer_provider::TracerProvider, }; use crate::{ExportError, KeyValue}; /// Describe the result of operations in tracing API. pub type TraceResult<T> = Result<T, TraceError>; /// Errors returned by the trace API. #[derive(Error, Debug)] #[non_exhaustive] pub enum TraceError { /// Export failed with the error returned by the exporter #[error("Exporter {} encountered the following error(s): {0}", .0.exporter_name())] ExportFailed(Box<dyn ExportError>), /// Export failed to finish after certain period and processor stopped the export. #[error("Exporting timed out after {} seconds", .0.as_secs())] ExportTimedOut(time::Duration), /// Other errors propagated from trace SDK that weren't covered above #[error(transparent)] Other(#[from] Box<dyn std::error::Error + Send + Sync + 'static>), } impl<T> From<T> for TraceError where T: ExportError, { fn from(err: T) -> Self { TraceError::ExportFailed(Box::new(err)) } } impl<T> From<TrySendError<T>> for TraceError { fn from(err: TrySendError<T>) -> Self { TraceError::Other(Box::new(err.into_send_error())) } } impl From<Canceled> for TraceError { fn from(err: Canceled) -> Self { TraceError::Other(Box::new(err)) } } impl From<String> for TraceError { fn from(err_msg: String) -> Self { TraceError::Other(Box::new(Custom(err_msg))) } } impl From<&'static str> for TraceError { fn from(err_msg: &'static str) -> Self { TraceError::Other(Box::new(Custom(err_msg.into()))) } } /// Wrap type for string #[derive(Error, Debug)] #[error("{0}")] struct Custom(String); /// A `Span` has the ability to add events. Events have a time associated /// with the moment when they are added to the `Span`. #[derive(Clone, Debug, PartialEq)] pub struct Event { /// Event name pub name: Cow<'static, str>, /// Event timestamp pub timestamp: time::SystemTime, /// Event attributes pub attributes: Vec<KeyValue>, /// Number of dropped attributes pub dropped_attributes_count: u32, } impl Event { /// Create new `Event` pub fn new<T: Into<Cow<'static, str>>>( name: T, timestamp: time::SystemTime, attributes: Vec<KeyValue>, dropped_attributes_count: u32, ) -> Self { Event { name: name.into(), timestamp, attributes, dropped_attributes_count, } } /// Create new `Event` with a given name. pub fn with_name<T: Into<Cow<'static, str>>>(name: T) -> Self { Event { name: name.into(), timestamp: crate::time::now(), attributes: Vec::new(), dropped_attributes_count: 0, } } } /// During the `Span` creation user MUST have the ability to record links to other `Span`s. Linked /// `Span`s can be from the same or a different trace. #[derive(Clone, Debug, PartialEq)] pub struct Link { span_context: SpanContext, /// Attributes describing this link pub attributes: Vec<KeyValue>, /// The number of attributes that were above the limit, and thus dropped. pub dropped_attributes_count: u32, } impl Link { /// Create a new link pub fn new(span_context: SpanContext, attributes: Vec<KeyValue>) -> Self { Link { span_context, attributes, dropped_attributes_count: 0, } } /// The span context of the linked span pub fn span_context(&self) -> &SpanContext { &self.span_context } /// Attributes of the span link pub fn attributes(&self) -> &Vec<KeyValue> { &self.attributes } /// Dropped attributes count pub fn dropped_attributes_count(&self) -> u32 { self.dropped_attributes_count } }<|fim▁end|>
//! }
<|file_name|>train.py<|end_file_name|><|fim▁begin|>import argparse import copy import numpy as np import chainer from chainer.datasets import ConcatenatedDataset from chainer.datasets import TransformDataset from chainer.optimizer_hooks import WeightDecay from chainer import serializers from chainer import training from chainer.training import extensions from chainer.training import triggers from chainercv.datasets import voc_bbox_label_names from chainercv.datasets import VOCBboxDataset from chainercv.extensions import DetectionVOCEvaluator from chainercv.links.model.ssd import GradientScaling from chainercv.links.model.ssd import multibox_loss from chainercv.links import SSD300 from chainercv.links import SSD512 from chainercv import transforms from chainercv.links.model.ssd import random_crop_with_bbox_constraints from chainercv.links.model.ssd import random_distort from chainercv.links.model.ssd import resize_with_random_interpolation # https://docs.chainer.org/en/stable/tips.html#my-training-process-gets-stuck-when-using-multiprocessiterator import cv2 cv2.setNumThreads(0) class MultiboxTrainChain(chainer.Chain): def __init__(self, model, alpha=1, k=3): super(MultiboxTrainChain, self).__init__() with self.init_scope(): self.model = model self.alpha = alpha self.k = k def forward(self, imgs, gt_mb_locs, gt_mb_labels): mb_locs, mb_confs = self.model(imgs) loc_loss, conf_loss = multibox_loss( mb_locs, mb_confs, gt_mb_locs, gt_mb_labels, self.k) loss = loc_loss * self.alpha + conf_loss chainer.reporter.report( {'loss': loss, 'loss/loc': loc_loss, 'loss/conf': conf_loss}, self) return loss class Transform(object): def __init__(self, coder, size, mean): # to send cpu, make a copy<|fim▁hole|> self.size = size self.mean = mean def __call__(self, in_data): # There are five data augmentation steps # 1. Color augmentation # 2. Random expansion # 3. Random cropping # 4. Resizing with random interpolation # 5. Random horizontal flipping img, bbox, label = in_data # 1. Color augmentation img = random_distort(img) # 2. Random expansion if np.random.randint(2): img, param = transforms.random_expand( img, fill=self.mean, return_param=True) bbox = transforms.translate_bbox( bbox, y_offset=param['y_offset'], x_offset=param['x_offset']) # 3. Random cropping img, param = random_crop_with_bbox_constraints( img, bbox, return_param=True) bbox, param = transforms.crop_bbox( bbox, y_slice=param['y_slice'], x_slice=param['x_slice'], allow_outside_center=False, return_param=True) label = label[param['index']] # 4. Resizing with random interpolatation _, H, W = img.shape img = resize_with_random_interpolation(img, (self.size, self.size)) bbox = transforms.resize_bbox(bbox, (H, W), (self.size, self.size)) # 5. Random horizontal flipping img, params = transforms.random_flip( img, x_random=True, return_param=True) bbox = transforms.flip_bbox( bbox, (self.size, self.size), x_flip=params['x_flip']) # Preparation for SSD network img -= self.mean mb_loc, mb_label = self.coder.encode(bbox, label) return img, mb_loc, mb_label def main(): parser = argparse.ArgumentParser() parser.add_argument( '--model', choices=('ssd300', 'ssd512'), default='ssd300') parser.add_argument('--batchsize', type=int, default=32) parser.add_argument('--iteration', type=int, default=120000) parser.add_argument('--step', type=int, nargs='*', default=[80000, 100000]) parser.add_argument('--gpu', type=int, default=-1) parser.add_argument('--out', default='result') parser.add_argument('--resume') args = parser.parse_args() if args.model == 'ssd300': model = SSD300( n_fg_class=len(voc_bbox_label_names), pretrained_model='imagenet') elif args.model == 'ssd512': model = SSD512( n_fg_class=len(voc_bbox_label_names), pretrained_model='imagenet') model.use_preset('evaluate') train_chain = MultiboxTrainChain(model) if args.gpu >= 0: chainer.cuda.get_device_from_id(args.gpu).use() model.to_gpu() train = TransformDataset( ConcatenatedDataset( VOCBboxDataset(year='2007', split='trainval'), VOCBboxDataset(year='2012', split='trainval') ), Transform(model.coder, model.insize, model.mean)) train_iter = chainer.iterators.MultiprocessIterator(train, args.batchsize) test = VOCBboxDataset( year='2007', split='test', use_difficult=True, return_difficult=True) test_iter = chainer.iterators.SerialIterator( test, args.batchsize, repeat=False, shuffle=False) # initial lr is set to 1e-3 by ExponentialShift optimizer = chainer.optimizers.MomentumSGD() optimizer.setup(train_chain) for param in train_chain.params(): if param.name == 'b': param.update_rule.add_hook(GradientScaling(2)) else: param.update_rule.add_hook(WeightDecay(0.0005)) updater = training.updaters.StandardUpdater( train_iter, optimizer, device=args.gpu) trainer = training.Trainer( updater, (args.iteration, 'iteration'), args.out) trainer.extend( extensions.ExponentialShift('lr', 0.1, init=1e-3), trigger=triggers.ManualScheduleTrigger(args.step, 'iteration')) trainer.extend( DetectionVOCEvaluator( test_iter, model, use_07_metric=True, label_names=voc_bbox_label_names), trigger=triggers.ManualScheduleTrigger( args.step + [args.iteration], 'iteration')) log_interval = 10, 'iteration' trainer.extend(extensions.LogReport(trigger=log_interval)) trainer.extend(extensions.observe_lr(), trigger=log_interval) trainer.extend(extensions.PrintReport( ['epoch', 'iteration', 'lr', 'main/loss', 'main/loss/loc', 'main/loss/conf', 'validation/main/map']), trigger=log_interval) trainer.extend(extensions.ProgressBar(update_interval=10)) trainer.extend( extensions.snapshot(), trigger=triggers.ManualScheduleTrigger( args.step + [args.iteration], 'iteration')) trainer.extend( extensions.snapshot_object(model, 'model_iter_{.updater.iteration}'), trigger=(args.iteration, 'iteration')) if args.resume: serializers.load_npz(args.resume, trainer) trainer.run() if __name__ == '__main__': main()<|fim▁end|>
self.coder = copy.copy(coder) self.coder.to_cpu()
<|file_name|>test_inlinequeryresultgame.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # # A library that provides a Python interface to the Telegram Bot API # Copyright (C) 2015-2017 # Leandro Toledo de Souza <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see [http://www.gnu.org/licenses/]. import json import pytest from telegram import (InlineKeyboardButton, InlineQueryResultGame, InlineQueryResultVoice, InlineKeyboardMarkup) @pytest.fixture(scope='class') def inline_query_result_game(): return InlineQueryResultGame(TestInlineQueryResultGame.id, TestInlineQueryResultGame.game_short_name, <|fim▁hole|> class TestInlineQueryResultGame: id = 'id' type = 'game' game_short_name = 'game short name' reply_markup = InlineKeyboardMarkup([[InlineKeyboardButton('reply_markup')]]) def test_expected_values(self, inline_query_result_game): assert inline_query_result_game.type == self.type assert inline_query_result_game.id == self.id assert inline_query_result_game.game_short_name == self.game_short_name assert inline_query_result_game.reply_markup.to_dict() == \ self.reply_markup.to_dict() def test_to_json(self, inline_query_result_game): json.loads(inline_query_result_game.to_json()) def test_to_dict(self, inline_query_result_game): inline_query_result_game_dict = inline_query_result_game.to_dict() assert isinstance(inline_query_result_game_dict, dict) assert inline_query_result_game_dict['type'] == inline_query_result_game.type assert inline_query_result_game_dict['id'] == inline_query_result_game.id assert inline_query_result_game_dict['game_short_name'] == \ inline_query_result_game.game_short_name assert inline_query_result_game_dict['reply_markup'] == \ inline_query_result_game.reply_markup.to_dict() def test_equality(self): a = InlineQueryResultGame(self.id, self.game_short_name) b = InlineQueryResultGame(self.id, self.game_short_name) c = InlineQueryResultGame(self.id, "") d = InlineQueryResultGame("", self.game_short_name) e = InlineQueryResultVoice(self.id, "", "") assert a == b assert hash(a) == hash(b) assert a is not b assert a == c assert hash(a) == hash(c) assert a != d assert hash(a) != hash(d) assert a != e assert hash(a) != hash(e)<|fim▁end|>
reply_markup=TestInlineQueryResultGame.reply_markup)
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// ams - Advanced Memory Scanner // Copyright (C) 2018 th0rex // // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program. If not, see <http://www.gnu.org/licenses/>. //! Lock free Queue implementation. use std::cell::UnsafeCell; use std::sync::atomic::{AtomicUsize, Ordering}; pub struct Queue<T> { size: usize, count: AtomicUsize, write_index: AtomicUsize, read_index: AtomicUsize, max_read_index: AtomicUsize, data: UnsafeCell<Box<[T]>>, } impl<T: Clone + Default> Queue<T> { pub fn new(size: usize) -> Queue<T> { Queue { size, count: AtomicUsize::new(0), write_index: AtomicUsize::new(0), read_index: AtomicUsize::new(0), max_read_index: AtomicUsize::new(0), data: UnsafeCell::new(vec![T::default(); size].into_boxed_slice()), } } pub fn pop(&self) -> Option<T> { let mut ret; let mut current_read_index; let mut current_max_read_index; while { current_read_index = self.read_index.load(Ordering::SeqCst); current_max_read_index = self.max_read_index.load(Ordering::SeqCst); if self.count_to_index(current_read_index) == self.count_to_index(current_max_read_index) { return None; } ret = unsafe { &*self.data.get() }[self.count_to_index(current_read_index)].clone(); if self.read_index.compare_and_swap( current_read_index, current_read_index + 1, Ordering::SeqCst, ) == current_read_index { self.count.fetch_sub(1, Ordering::SeqCst); return Some(ret); } true } {} unreachable!(); } pub fn push(&self, val: T) -> bool { let mut current_read_index; let mut current_write_index; while { current_read_index = self.read_index.load(Ordering::SeqCst); current_write_index = self.write_index.load(Ordering::SeqCst); if self.count_to_index(current_write_index + 1) == self.count_to_index(current_read_index) { return false; } self.write_index.compare_and_swap( current_write_index, current_write_index + 1, Ordering::SeqCst, ) != current_write_index } {} unsafe { let slice = &mut **self.data.get(); slice[self.count_to_index(current_write_index)] = val; } while self.max_read_index.compare_and_swap( current_write_index, current_write_index + 1, Ordering::SeqCst, ) != current_write_index {} self.count.fetch_add(1, Ordering::SeqCst); true } pub fn size(&self) -> usize { self.size } fn count_to_index(&self, to: usize) -> usize {<|fim▁hole|> to % self.size } } unsafe impl<T> Sync for Queue<T> {}<|fim▁end|>
<|file_name|>analyze_test_sources.go<|end_file_name|><|fim▁begin|>/* Copyright 2021 Pants project contributors (see CONTRIBUTORS.md). * Licensed under the Apache License, Version 2.0 (see LICENSE). * * Parts adapted from Go SDK and Bazel rules_go, both under BSD-compatible licenses. */ package main import ( "encoding/json" "errors" "fmt" "go/ast"<|fim▁hole|> "go/parser" "go/token" "os" "strconv" "strings" "unicode" "unicode/utf8" ) // // Parse Go sources and extract various metadata about the tests contained therein. // Based in part on the `go` tool (https://github.com/golang/go/blob/master/src/cmd/go/internal/load/test.go) // (under BSD-compatible license). // // As explained by the Bazel rules_go source: // // A Go test comprises three packages: // // 1. An internal test package, compiled from the sources of the library being // tested and any _test.go files with the same package name. // 2. An external test package, compiled from _test.go files with a package // name ending with "_test". // 3. A generated main package that imports both packages and initializes the // test framework with a list of tests, benchmarks, examples, and fuzz // targets read from source files. // // https://github.com/bazelbuild/rules_go/blob/master/go/tools/builders/generate_test_main.go // type TestFunc struct { Package string `json:"package"` Name string `json:"name"` } type Example struct { Package string `json:"package"` Name string `json:"name"` Output string `json:"output"` Unordered bool `json:"unordered"` } // TestSourcesMetadata contains metadata about tests/benchmarks extracted from the parsed sources. // TODO: "Examples" and "fuzz targets" (Go 1.18+). type TestSourcesMetadata struct { // Names of all functions in the test sources that heuristically look like test functions. Tests []*TestFunc `json:"tests,omitempty"` // Names of all functions in the test sources that heuristically look like benchmark functions. Benchmarks []*TestFunc `json:"benchmarks,omitempty"` // Testable examples. Extracted using "go/doc" package. Examples []*Example `json:"examples,omitempty"` // True if the sources already contain a `TestMain` function (which is the entry point for test binaries). TestMain *TestFunc `json:"test_main,omitempty"` } // isTestFunc tells whether fn has the type of a testing function. arg // specifies the parameter type we look for: B, M or T. func isTestFunc(fn *ast.FuncDecl, arg string) bool { if (fn.Type.Results != nil && len(fn.Type.Results.List) > 0) || fn.Type.Params.List == nil || len(fn.Type.Params.List) != 1 || len(fn.Type.Params.List[0].Names) > 1 { return false } ptr, ok := fn.Type.Params.List[0].Type.(*ast.StarExpr) if !ok { return false } // We can't easily check that the type is *testing.M // because we don't know how testing has been imported, // but at least check that it's *M or *something.M. // Same applies for B and T. if name, ok := ptr.X.(*ast.Ident); ok && name.Name == arg { return true } if sel, ok := ptr.X.(*ast.SelectorExpr); ok && sel.Sel.Name == arg { return true } return false } // isTest tells whether name looks like a test (or benchmark, according to prefix). // It is a test if there is a character after Test that is not a lower-case letter. // This avoids, for example, Testify matching. func isTest(name, prefix string) bool { if !strings.HasPrefix(name, prefix) { return false } if len(name) == len(prefix) { // "Test" is ok return true } r, _ := utf8.DecodeRuneInString(name[len(prefix):]) return !unicode.IsLower(r) } func checkTestFunc(fileSet *token.FileSet, fn *ast.FuncDecl, arg string) error { if !isTestFunc(fn, arg) { name := fn.Name.String() pos := fileSet.Position(fn.Pos()) return fmt.Errorf("%s: wrong signature for %s, must be: func %s(%s *testing.%s)", pos, name, name, strings.ToLower(arg), arg) } return nil } func processFile(fileSet *token.FileSet, pkgName string, filename string) (*TestSourcesMetadata, error) { p, err := parser.ParseFile(fileSet, filename, nil, parser.ParseComments) if err != nil { return nil, fmt.Errorf("failed to parse: %s", err) } var metadata TestSourcesMetadata for _, e := range doc.Examples(p) { if e.Output == "" && !e.EmptyOutput { // Don't run examples with no output directive. continue } metadata.Examples = append(metadata.Examples, &Example{ Name: "Example" + e.Name, Package: pkgName, Output: strconv.Quote(e.Output), Unordered: e.Unordered, }) } for _, decl := range p.Decls { fn, ok := decl.(*ast.FuncDecl) if !ok { continue } if fn.Recv != nil { continue } // The following test/benchmark heuristic is based on the code in the `go` tool. // https://github.com/golang/go/blob/94323206aee1363471a4ae3b8d40dd4ae7a5cd9c/src/cmd/go/internal/load/test.go#L626-L665 name := fn.Name.String() switch { case name == "TestMain": if isTestFunc(fn, "T") { // Handle a TestMain function that is actually a test and not a true TestMain. metadata.Tests = append(metadata.Tests, &TestFunc{ Name: fn.Name.Name, Package: pkgName, }) continue } err := checkTestFunc(fileSet, fn, "M") if err != nil { return nil, err } if metadata.TestMain != nil { return nil, errors.New("multiple definitions of TestMain") } metadata.TestMain = &TestFunc{ Name: fn.Name.Name, Package: pkgName, } case isTest(name, "Test"): err := checkTestFunc(fileSet, fn, "T") if err != nil { return nil, err } metadata.Tests = append(metadata.Tests, &TestFunc{ Name: fn.Name.Name, Package: pkgName, }) case isTest(name, "Benchmark"): err := checkTestFunc(fileSet, fn, "B") if err != nil { return nil, err } metadata.Benchmarks = append(metadata.Benchmarks, &TestFunc{ Name: fn.Name.Name, Package: pkgName, }) } } return &metadata, nil } func main() { var allMetadata TestSourcesMetadata fileSet := token.NewFileSet() for _, arg := range os.Args[1:] { parts := strings.SplitN(arg, ":", 2) fileMetadata, err := processFile(fileSet, parts[0], parts[1]) if err != nil { fmt.Fprintf(os.Stderr, "%s: %s\n", parts[1], err) os.Exit(1) } // TODO: Flag duplicate test and benchmark names. allMetadata.Tests = append(allMetadata.Tests, fileMetadata.Tests...) allMetadata.Benchmarks = append(allMetadata.Benchmarks, fileMetadata.Benchmarks...) allMetadata.Examples = append(allMetadata.Examples, fileMetadata.Examples...) if fileMetadata.TestMain != nil { if allMetadata.TestMain != nil { fmt.Fprintf(os.Stderr, "multiple definitions of TestMain\n") os.Exit(1) } allMetadata.TestMain = fileMetadata.TestMain } } output, err := json.Marshal(&allMetadata) if err != nil { fmt.Fprintf(os.Stderr, "Unable to marshall JSON output: %s\n", err) os.Exit(1) } output = append(output, []byte{'\n'}...) amtWritten := 0 for amtWritten < len(output) { n, err := os.Stdout.Write(output[amtWritten:]) if err != nil { fmt.Fprintf(os.Stderr, "Failed to write output: %s\n", err) os.Exit(1) } amtWritten += n } os.Exit(0) }<|fim▁end|>
"go/doc"
<|file_name|>trace.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! Utilities for tracing JS-managed values. //! //! The lifetime of DOM objects is managed by the SpiderMonkey Garbage //! Collector. A rooted DOM object implementing the interface `Foo` is traced //! as follows: //! //! 1. The GC calls `_trace` defined in `FooBinding` during the marking //! phase. (This happens through `JSClass.trace` for non-proxy bindings, and //! through `ProxyTraps.trace` otherwise.) //! 2. `_trace` calls `Foo::trace()` (an implementation of `JSTraceable`). //! This is typically derived via a `#[dom_struct]` //! (implies `#[derive(JSTraceable)]`) annotation. //! Non-JS-managed types have an empty inline `trace()` method, //! achieved via `no_jsmanaged_fields!` or similar. //! 3. For all fields, `Foo::trace()` //! calls `trace()` on the field. //! For example, for fields of type `JS<T>`, `JS<T>::trace()` calls //! `trace_reflector()`. //! 4. `trace_reflector()` calls `JS_CallUnbarrieredObjectTracer()` with a //! pointer to the `JSObject` for the reflector. This notifies the GC, which //! will add the object to the graph, and will trace that object as well. //! 5. When the GC finishes tracing, it [`finalizes`](../index.html#destruction) //! any reflectors that were not reachable. //! //! The `no_jsmanaged_fields!()` macro adds an empty implementation of `JSTraceable` to //! a datatype. use canvas_traits::WebGLError; use canvas_traits::{CanvasGradientStop, LinearGradientStyle, RadialGradientStyle}; use canvas_traits::{CompositionOrBlending, LineCapStyle, LineJoinStyle, RepetitionStyle}; use cssparser::RGBA; use devtools_traits::WorkerId; use dom::bindings::js::{JS, Root}; use dom::bindings::refcounted::Trusted; use dom::bindings::reflector::{Reflectable, Reflector}; use dom::bindings::utils::WindowProxyHandler; use encoding::types::EncodingRef; use euclid::length::Length as EuclidLength; use euclid::matrix2d::Matrix2D;<|fim▁hole|>use hyper::header::Headers; use hyper::method::Method; use hyper::mime::Mime; use ipc_channel::ipc::{IpcReceiver, IpcSender}; use js::jsapi::JS_CallUnbarrieredObjectTracer; use js::jsapi::{GCTraceKindToAscii, Heap, JSGCTraceKind, JSObject, JSTracer, JS_CallObjectTracer, JS_CallValueTracer}; use js::jsval::JSVal; use js::rust::Runtime; use layout_interface::{LayoutChan, LayoutRPC}; use libc; use msg::constellation_msg::ConstellationChan; use msg::constellation_msg::{PipelineId, SubpageId, WindowSizeData}; use net_traits::Metadata; use net_traits::image::base::Image; use net_traits::image_cache_task::{ImageCacheChan, ImageCacheTask}; use net_traits::storage_task::StorageType; use profile_traits::mem::ProfilerChan as MemProfilerChan; use profile_traits::time::ProfilerChan as TimeProfilerChan; use script_task::ScriptChan; use script_traits::{LayoutMsg, ScriptMsg, TimerEventId, TimerSource, UntrustedNodeAddress}; use selectors::parser::PseudoElement; use selectors::states::*; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; use std::boxed::FnBox; use std::cell::{Cell, UnsafeCell}; use std::collections::hash_state::HashState; use std::collections::{HashMap, HashSet}; use std::ffi::CString; use std::hash::{Hash, Hasher}; use std::intrinsics::return_address; use std::iter::{FromIterator, IntoIterator}; use std::mem; use std::ops::{Deref, DerefMut}; use std::rc::Rc; use std::sync::Arc; use std::sync::atomic::AtomicBool; use std::sync::mpsc::{Receiver, Sender}; use string_cache::{Atom, Namespace, QualName}; use style::attr::{AttrIdentifier, AttrValue}; use style::properties::PropertyDeclarationBlock; use style::restyle_hints::ElementSnapshot; use style::values::specified::Length; use url::Url; use util::str::{DOMString, LengthOrPercentageOrAuto}; use uuid::Uuid; /// A trait to allow tracing (only) DOM objects. pub trait JSTraceable { /// Trace `self`. fn trace(&self, trc: *mut JSTracer); } no_jsmanaged_fields!(EncodingRef); no_jsmanaged_fields!(Reflector); /// Trace a `JSVal`. pub fn trace_jsval(tracer: *mut JSTracer, description: &str, val: &Heap<JSVal>) { unsafe { if !val.get().is_markable() { return; } let name = CString::new(description).unwrap(); (*tracer).debugPrinter_ = None; (*tracer).debugPrintIndex_ = !0; (*tracer).debugPrintArg_ = name.as_ptr() as *const libc::c_void; debug!("tracing value {}", description); JS_CallValueTracer(tracer, val.ptr.get() as *mut _, GCTraceKindToAscii(val.get().trace_kind())); } } /// Trace the `JSObject` held by `reflector`. #[allow(unrooted_must_root)] pub fn trace_reflector(tracer: *mut JSTracer, description: &str, reflector: &Reflector) { unsafe { let name = CString::new(description).unwrap(); (*tracer).debugPrinter_ = None; (*tracer).debugPrintIndex_ = !0; (*tracer).debugPrintArg_ = name.as_ptr() as *const libc::c_void; debug!("tracing reflector {}", description); JS_CallUnbarrieredObjectTracer(tracer, reflector.rootable(), GCTraceKindToAscii(JSGCTraceKind::JSTRACE_OBJECT)); } } /// Trace a `JSObject`. pub fn trace_object(tracer: *mut JSTracer, description: &str, obj: &Heap<*mut JSObject>) { unsafe { let name = CString::new(description).unwrap(); (*tracer).debugPrinter_ = None; (*tracer).debugPrintIndex_ = !0; (*tracer).debugPrintArg_ = name.as_ptr() as *const libc::c_void; debug!("tracing {}", description); JS_CallObjectTracer(tracer, obj.ptr.get() as *mut _, GCTraceKindToAscii(JSGCTraceKind::JSTRACE_OBJECT)); } } impl<T: JSTraceable> JSTraceable for Rc<T> { fn trace(&self, trc: *mut JSTracer) { (**self).trace(trc) } } impl<T: JSTraceable> JSTraceable for Box<T> { fn trace(&self, trc: *mut JSTracer) { (**self).trace(trc) } } impl<T: JSTraceable + Copy> JSTraceable for Cell<T> { fn trace(&self, trc: *mut JSTracer) { self.get().trace(trc) } } impl<T: JSTraceable> JSTraceable for UnsafeCell<T> { fn trace(&self, trc: *mut JSTracer) { unsafe { (*self.get()).trace(trc) } } } impl JSTraceable for Heap<*mut JSObject> { fn trace(&self, trc: *mut JSTracer) { if self.get().is_null() { return; } trace_object(trc, "object", self); } } impl JSTraceable for Heap<JSVal> { fn trace(&self, trc: *mut JSTracer) { trace_jsval(trc, "val", self); } } // XXXManishearth Check if the following three are optimized to no-ops // if e.trace() is a no-op (e.g it is an no_jsmanaged_fields type) impl<T: JSTraceable> JSTraceable for Vec<T> { #[inline] fn trace(&self, trc: *mut JSTracer) { for e in &*self { e.trace(trc); } } } // XXXManishearth Check if the following three are optimized to no-ops // if e.trace() is a no-op (e.g it is an no_jsmanaged_fields type) impl<T: JSTraceable + 'static> JSTraceable for SmallVec<[T; 1]> { #[inline] fn trace(&self, trc: *mut JSTracer) { for e in self.iter() { e.trace(trc); } } } impl<T: JSTraceable> JSTraceable for Option<T> { #[inline] fn trace(&self, trc: *mut JSTracer) { self.as_ref().map(|e| e.trace(trc)); } } impl<T: JSTraceable, U: JSTraceable> JSTraceable for Result<T, U> { #[inline] fn trace(&self, trc: *mut JSTracer) { match *self { Ok(ref inner) => inner.trace(trc), Err(ref inner) => inner.trace(trc), } } } impl<K, V, S> JSTraceable for HashMap<K, V, S> where K: Hash + Eq + JSTraceable, V: JSTraceable, S: HashState, <S as HashState>::Hasher: Hasher, { #[inline] fn trace(&self, trc: *mut JSTracer) { for (k, v) in &*self { k.trace(trc); v.trace(trc); } } } impl<A: JSTraceable, B: JSTraceable> JSTraceable for (A, B) { #[inline] fn trace(&self, trc: *mut JSTracer) { let (ref a, ref b) = *self; a.trace(trc); b.trace(trc); } } no_jsmanaged_fields!(bool, f32, f64, String, Url, AtomicBool, Uuid); no_jsmanaged_fields!(usize, u8, u16, u32, u64); no_jsmanaged_fields!(isize, i8, i16, i32, i64); no_jsmanaged_fields!(Sender<T>); no_jsmanaged_fields!(Receiver<T>); no_jsmanaged_fields!(Rect<T>); no_jsmanaged_fields!(Size2D<T>); no_jsmanaged_fields!(Arc<T>); no_jsmanaged_fields!(Image, ImageCacheChan, ImageCacheTask); no_jsmanaged_fields!(Metadata); no_jsmanaged_fields!(Atom, Namespace, QualName); no_jsmanaged_fields!(Trusted<T: Reflectable>); no_jsmanaged_fields!(PropertyDeclarationBlock); no_jsmanaged_fields!(HashSet<T>); // These three are interdependent, if you plan to put jsmanaged data // in one of these make sure it is propagated properly to containing structs no_jsmanaged_fields!(SubpageId, WindowSizeData, PipelineId); no_jsmanaged_fields!(TimerEventId, TimerSource); no_jsmanaged_fields!(WorkerId); no_jsmanaged_fields!(QuirksMode); no_jsmanaged_fields!(Runtime); no_jsmanaged_fields!(Headers, Method); no_jsmanaged_fields!(LayoutChan); no_jsmanaged_fields!(WindowProxyHandler); no_jsmanaged_fields!(UntrustedNodeAddress); no_jsmanaged_fields!(LengthOrPercentageOrAuto); no_jsmanaged_fields!(RGBA); no_jsmanaged_fields!(EuclidLength<Unit, T>); no_jsmanaged_fields!(Matrix2D<T>); no_jsmanaged_fields!(StorageType); no_jsmanaged_fields!(CanvasGradientStop, LinearGradientStyle, RadialGradientStyle); no_jsmanaged_fields!(LineCapStyle, LineJoinStyle, CompositionOrBlending); no_jsmanaged_fields!(RepetitionStyle); no_jsmanaged_fields!(WebGLError); no_jsmanaged_fields!(TimeProfilerChan); no_jsmanaged_fields!(MemProfilerChan); no_jsmanaged_fields!(PseudoElement); no_jsmanaged_fields!(Length); no_jsmanaged_fields!(ElementState); no_jsmanaged_fields!(DOMString); no_jsmanaged_fields!(Mime); no_jsmanaged_fields!(AttrIdentifier); no_jsmanaged_fields!(AttrValue); no_jsmanaged_fields!(ElementSnapshot); impl JSTraceable for ConstellationChan<ScriptMsg> { #[inline] fn trace(&self, _trc: *mut JSTracer) { // Do nothing } } impl JSTraceable for ConstellationChan<LayoutMsg> { #[inline] fn trace(&self, _trc: *mut JSTracer) { // Do nothing } } impl JSTraceable for Box<ScriptChan + Send> { #[inline] fn trace(&self, _trc: *mut JSTracer) { // Do nothing } } impl JSTraceable for Box<FnBox(f64, )> { #[inline] fn trace(&self, _trc: *mut JSTracer) { // Do nothing } } impl<'a> JSTraceable for &'a str { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } impl<A, B> JSTraceable for fn(A) -> B { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } impl<T> JSTraceable for IpcSender<T> where T: Deserialize + Serialize { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } impl JSTraceable for Box<LayoutRPC + 'static> { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } impl JSTraceable for () { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } impl<T> JSTraceable for IpcReceiver<T> where T: Deserialize + Serialize { #[inline] fn trace(&self, _: *mut JSTracer) { // Do nothing } } /// Homemade trait object for JSTraceable things struct TraceableInfo { pub ptr: *const libc::c_void, pub trace: fn(obj: *const libc::c_void, tracer: *mut JSTracer), } /// Holds a set of JSTraceables that need to be rooted pub struct RootedTraceableSet { set: Vec<TraceableInfo>, } #[allow(missing_docs)] // FIXME mod dummy { // Attributes don’t apply through the macro. use std::cell::RefCell; use std::rc::Rc; use super::RootedTraceableSet; /// TLV Holds a set of JSTraceables that need to be rooted thread_local!(pub static ROOTED_TRACEABLES: Rc<RefCell<RootedTraceableSet>> = Rc::new(RefCell::new(RootedTraceableSet::new()))); } pub use self::dummy::ROOTED_TRACEABLES; impl RootedTraceableSet { fn new() -> RootedTraceableSet { RootedTraceableSet { set: vec![], } } unsafe fn remove<T: JSTraceable>(traceable: &T) { ROOTED_TRACEABLES.with(|ref traceables| { let mut traceables = traceables.borrow_mut(); let idx = match traceables.set.iter() .rposition(|x| x.ptr == traceable as *const T as *const _) { Some(idx) => idx, None => unreachable!(), }; traceables.set.remove(idx); }); } unsafe fn add<T: JSTraceable>(traceable: &T) { ROOTED_TRACEABLES.with(|ref traceables| { fn trace<T: JSTraceable>(obj: *const libc::c_void, tracer: *mut JSTracer) { let obj: &T = unsafe { &*(obj as *const T) }; obj.trace(tracer); } let mut traceables = traceables.borrow_mut(); let info = TraceableInfo { ptr: traceable as *const T as *const libc::c_void, trace: trace::<T>, }; traceables.set.push(info); }) } unsafe fn trace(&self, tracer: *mut JSTracer) { for info in &self.set { (info.trace)(info.ptr, tracer); } } } /// Roots any JSTraceable thing /// /// If you have a valid Reflectable, use Root. /// If you have GC things like *mut JSObject or JSVal, use jsapi::Rooted. /// If you have an arbitrary number of Reflectables to root, use RootedVec<JS<T>> /// If you know what you're doing, use this. #[derive(JSTraceable)] pub struct RootedTraceable<'a, T: 'a + JSTraceable> { ptr: &'a T, } impl<'a, T: JSTraceable> RootedTraceable<'a, T> { /// Root a JSTraceable thing for the life of this RootedTraceable pub fn new(traceable: &'a T) -> RootedTraceable<'a, T> { unsafe { RootedTraceableSet::add(traceable); } RootedTraceable { ptr: traceable, } } } impl<'a, T: JSTraceable> Drop for RootedTraceable<'a, T> { fn drop(&mut self) { unsafe { RootedTraceableSet::remove(self.ptr); } } } /// A vector of items that are rooted for the lifetime of this struct. #[allow(unrooted_must_root)] #[no_move] #[derive(JSTraceable)] #[allow_unrooted_interior] pub struct RootedVec<T: JSTraceable> { v: Vec<T>, } impl<T: JSTraceable> RootedVec<T> { /// Create a vector of items of type T that is rooted for /// the lifetime of this struct pub fn new() -> RootedVec<T> { let addr = unsafe { return_address() as *const libc::c_void }; unsafe { RootedVec::new_with_destination_address(addr) } } /// Create a vector of items of type T. This constructor is specific /// for RootTraceableSet. pub unsafe fn new_with_destination_address(addr: *const libc::c_void) -> RootedVec<T> { RootedTraceableSet::add::<RootedVec<T>>(&*(addr as *const _)); RootedVec::<T> { v: vec![], } } } impl<T: JSTraceable + Reflectable> RootedVec<JS<T>> { /// Obtain a safe slice of references that can't outlive that RootedVec. pub fn r(&self) -> &[&T] { unsafe { mem::transmute(&self.v[..]) } } } impl<T: JSTraceable> Drop for RootedVec<T> { fn drop(&mut self) { unsafe { RootedTraceableSet::remove(self); } } } impl<T: JSTraceable> Deref for RootedVec<T> { type Target = Vec<T>; fn deref(&self) -> &Vec<T> { &self.v } } impl<T: JSTraceable> DerefMut for RootedVec<T> { fn deref_mut(&mut self) -> &mut Vec<T> { &mut self.v } } impl<A: JSTraceable + Reflectable> FromIterator<Root<A>> for RootedVec<JS<A>> { #[allow(moved_no_move)] fn from_iter<T>(iterable: T) -> RootedVec<JS<A>> where T: IntoIterator<Item = Root<A>> { let mut vec = unsafe { RootedVec::new_with_destination_address(return_address() as *const libc::c_void) }; vec.extend(iterable.into_iter().map(|item| JS::from_rooted(&item))); vec } } /// SM Callback that traces the rooted traceables pub unsafe fn trace_traceables(tracer: *mut JSTracer) { ROOTED_TRACEABLES.with(|ref traceables| { let traceables = traceables.borrow(); traceables.trace(tracer); }); }<|fim▁end|>
use euclid::rect::Rect; use euclid::size::Size2D; use html5ever::tree_builder::QuirksMode;
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>extern crate sodiumoxide; extern crate serialize; extern crate sync; extern crate extra; use std::io::{Listener, Acceptor}; use std::io::{MemReader, BufReader}; use std::io::net::ip::{SocketAddr}; use std::io::net::tcp::{TcpListener, TcpStream}; use std::from_str::{from_str}; use capnp::message::{MallocMessageBuilder, MessageBuilder, DEFAULT_READER_OPTIONS, MessageReader}; use capnp::serialize_packed; use sync::Arc; use sodiumoxide::crypto::asymmetricbox::{PublicKey, SecretKey, open, seal, gen_keypair}; use helpers::{KeyBytes, bytes_to_pubkey, bytes_to_nonce}; use super::comm; fn process_new_connection(client: TcpStream, my_pubkey: Arc<PublicKey>, my_privkey: Arc<SecretKey>) { use capnp::serialize_packed; use capnp::message::{MallocMessageBuilder, MessageBuilder, DEFAULT_READER_OPTIONS, MessageReader}; let mut client = client;<|fim▁hole|> let hellopack = client.read_bytes(9).unwrap(); if (hellopack.as_slice() == comm::bare_msgs::HELLOBYTES) { println!("Connection hello received"); } else { fail!("sad :("); } client.write(comm::bare_msgs::HELLOBYTES); client.write(my_pubkey.get().key_bytes()); let client_pubkey = ~{ let reader = serialize_packed::new_reader_unbuffered(&mut client, DEFAULT_READER_OPTIONS).unwrap(); let pack = reader.get_root::<comm::pack_capnp::Pack::Reader>(); let mut bufreader = BufReader::new(pack.get_data()); let datareader = serialize_packed::new_reader_unbuffered(&mut bufreader, DEFAULT_READER_OPTIONS).unwrap(); let packdata = datareader.get_root::<comm::pack_capnp::PackData::Reader>(); match packdata.which() { Some(comm::pack_capnp::PackData::Pubkey(key)) => bytes_to_pubkey(key), _ => { println!("Client didn't send pubkey, disconnecting"); return; } } }; println!("got client key:\n{:?}", client_pubkey); let mut clientname = ~""; loop { let reader = serialize_packed::new_reader_unbuffered(&mut client, DEFAULT_READER_OPTIONS).unwrap(); let pack = reader.get_root::<comm::pack_capnp::Pack::Reader>(); let nonce = bytes_to_nonce(pack.get_nonce()); let databytes = match open(pack.get_data(), &nonce, client_pubkey, my_privkey.get()) { Some(bytes) => bytes, None => { println!("WARNING! Decrypt failed! "); continue; } }; let mut bufreader = BufReader::new(databytes); let datareader = serialize_packed::new_reader_unbuffered(&mut bufreader, DEFAULT_READER_OPTIONS).unwrap(); let packdata = datareader.get_root::<comm::pack_capnp::PackData::Reader>(); match packdata.which() { Some(comm::pack_capnp::PackData::Login(login)) => { println!("{:s} logged in", login.get_name()); clientname = login.get_name().to_owned(); }, Some(comm::pack_capnp::PackData::Message(message)) => { println!("<{:s}>{:s}", clientname, message.get_message()); }, Some(comm::pack_capnp::PackData::Quit(reason)) => { println!("quitreason: {:s}", reason); break; }, _ => println!("wut") } } } fn writer_task() { } fn setup_sync_task() { } pub fn main() { sodiumoxide::init(); let (s_pubkey, s_privkey) = gen_keypair(); let a_pubkey = Arc::new(s_pubkey); let a_privkey = Arc::new(s_privkey); let bind_addr : SocketAddr = from_str("0.0.0.0:44944").unwrap(); let s_listener = TcpListener::bind(bind_addr).ok().expect("Failed to bind"); let mut s_acceptor = s_listener.listen().ok().expect("Failed to create connection listener"); loop { let pub_clone = a_pubkey.clone(); let priv_clone = a_privkey.clone(); match s_acceptor.accept() { Ok(c_sock) => spawn(proc(){process_new_connection(c_sock, pub_clone, priv_clone)}), Err(err) => println!("Failed connection attempt: {:?}", err) } } }<|fim▁end|>
println!("New connection from '{:s}'", client.peer_name().unwrap().to_str());
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># coding: utf-8 import sys from setuptools import setup, find_packages NAME = "pollster" VERSION = "2.0.2" # To install the library, run the following # # python setup.py install #<|fim▁hole|>REQUIRES = ["urllib3 >= 1.15", "six >= 1.10", "certifi", "python-dateutil", "pandas >= 0.19.1"] setup( name=NAME, version=VERSION, description="Pollster API", author_email="Adam Hooper <[email protected]>", url="https://github.com/huffpostdata/python-pollster", keywords=["Pollster API"], install_requires=REQUIRES, packages=find_packages(), include_package_data=True, long_description="""Download election-related polling data from Pollster.""" )<|fim▁end|>
# prerequisite: setuptools # http://pypi.python.org/pypi/setuptools
<|file_name|>DashboardController.js<|end_file_name|><|fim▁begin|>app.controller('DashboardController',function($scope,$http,Article){ //Pagination configuration $scope.maxSize = 5; $scope.numPerPage = 5; $scope.currentPage = '1'; $scope.isEdit = false; $scope.isError= false; $scope.newarticle = {}; // Edit/New panel model $scope.curArticle = {}; // Currently selected article model $scope.errors = []; //Load all articles. Article.query(function(data){ $scope.articles = data; },function(error){ console.log(error); alert('Loading data failed.'); }); //Shows validation errors function errorHandler(error){ $scope.isError=true; //Show validator error angular.forEach(error.data,function(key,value){ $scope.errors.push(value + ': ' + key); }); } //Open New panel $scope.newArticle = function(article){ $scope.isEdit=true; $scope.isError=false; //Initialize with Article resource $scope.newarticle = new Article(); }; //Open Edit panel with data on edit button click $scope.editArticle = function(article){ $scope.isEdit=true; $scope.isError=false; // Store selected data for future use $scope.curArticle = article; //Copy data to panel $scope.newarticle = angular.copy(article); }; //Update and New article $scope.addArticle = function(article){ //TODO error handling on requests //Check if update or new if($scope.curArticle.id){ //Send put resource request article.$update(function(data){ // Update values to selected article angular.extend($scope.curArticle,$scope.curArticle,data); //Hide edit/new panel $scope.isEdit = false; },errorHandler); }else{ //Send post resource request article.$save(function(data){ //Add newly add article to articles json $scope.articles.push(data); //Hide edit/new panel $scope.isEdit = false; },errorHandler); } //Remove old values //$scope.newarticle = new Article(); }; //Delete button $scope.deleteArticle = function(article){ if(confirm('Are you sure ?')){ article.$delete(function(data){ alert(data.msg); //Get selected article index then remove from articles json var curIndex = $scope.articles.indexOf(article); $scope.articles.splice(curIndex,1); },function(error){ alert('Item not deleted'); console.log(error); }); } }; //Cancel panel button $scope.cancelArticle = function(article){ $scope.isEdit=false;<|fim▁hole|> //Remove old values $scope.newarticle= new Article(); }; });<|fim▁end|>
$scope.isError=false;
<|file_name|>sys.rs<|end_file_name|><|fim▁begin|>/// This module is copied from `libstd/sys/common/stack.rs`. #[allow(dead_code)] pub mod stack { pub const RED_ZONE: usize = 20 * 1024; #[inline(always)] pub unsafe fn record_rust_managed_stack_bounds(stack_lo: usize, stack_hi: usize) { // When the old runtime had segmented stacks, it used a calculation that was // "limit + RED_ZONE + FUDGE". The red zone was for things like dynamic // symbol resolution, llvm function calls, etc. In theory this red zone // value is 0, but it matters far less when we have gigantic stacks because // we don't need to be so exact about our stack budget. The "fudge factor" // was because LLVM doesn't emit a stack check for functions < 256 bytes in // size. Again though, we have giant stacks, so we round all these // calculations up to the nice round number of 20k. record_sp_limit(stack_lo + RED_ZONE); return target_record_stack_bounds(stack_lo, stack_hi); #[cfg(not(windows))] #[inline(always)] unsafe fn target_record_stack_bounds(_stack_lo: usize, _stack_hi: usize) {} #[cfg(all(windows, target_arch = "x86"))] #[inline(always)] unsafe fn target_record_stack_bounds(stack_lo: usize, stack_hi: usize) { // stack range is at TIB: %fs:0x04 (top) and %fs:0x08 (bottom) asm!("mov $0, %fs:0x04" :: "r"(stack_hi) :: "volatile"); asm!("mov $0, %fs:0x08" :: "r"(stack_lo) :: "volatile"); } #[cfg(all(windows, target_arch = "x86_64"))] #[inline(always)] unsafe fn target_record_stack_bounds(stack_lo: usize, stack_hi: usize) { // stack range is at TIB: %gs:0x08 (top) and %gs:0x10 (bottom) asm!("mov $0, %gs:0x08" :: "r"(stack_hi) :: "volatile"); asm!("mov $0, %gs:0x10" :: "r"(stack_lo) :: "volatile"); } } /// Records the current limit of the stack as specified by `end`. /// /// This is stored in an OS-dependent location, likely inside of the thread /// local storage. The location that the limit is stored is a pre-ordained /// location because it's where LLVM has emitted code to check. /// /// Note that this cannot be called under normal circumstances. This function is /// changing the stack limit, so upon returning any further function calls will /// possibly be triggering the morestack logic if you're not careful. /// /// Also note that this and all of the inside functions are all flagged as /// "inline(always)" because they're messing around with the stack limits. This /// would be unfortunate for the functions themselves to trigger a morestack /// invocation (if they were an actual function call). #[inline(always)] pub unsafe fn record_sp_limit(limit: usize) { return target_record_sp_limit(limit); // x86-64 #[cfg(all(target_arch = "x86_64", any(target_os = "macos", target_os = "ios")))] #[inline(always)] unsafe fn target_record_sp_limit(limit: usize) { asm!("movq $$0x60+90*8, %rsi movq $0, %gs:(%rsi)" :: "r"(limit) : "rsi" : "volatile") } #[cfg(all(target_arch = "x86_64", target_os = "linux"))] #[inline(always)] unsafe fn target_record_sp_limit(limit: usize) { asm!("movq $0, %fs:112" :: "r"(limit) :: "volatile") } #[cfg(all(target_arch = "x86_64", target_os = "windows"))] #[inline(always)] unsafe fn target_record_sp_limit(_: usize) { } #[cfg(all(target_arch = "x86_64", target_os = "freebsd"))] #[inline(always)] unsafe fn target_record_sp_limit(limit: usize) { asm!("movq $0, %fs:24" :: "r"(limit) :: "volatile") } #[cfg(all(target_arch = "x86_64", target_os = "dragonfly"))] #[inline(always)] unsafe fn target_record_sp_limit(limit: usize) { asm!("movq $0, %fs:32" :: "r"(limit) :: "volatile") } // x86 #[cfg(all(target_arch = "x86", any(target_os = "macos", target_os = "ios")))] #[inline(always)] unsafe fn target_record_sp_limit(limit: usize) { asm!("movl $$0x48+90*4, %eax movl $0, %gs:(%eax)" :: "r"(limit) : "eax" : "volatile") } #[cfg(all(target_arch = "x86", any(target_os = "linux", target_os = "freebsd")))] #[inline(always)] unsafe fn target_record_sp_limit(limit: usize) { asm!("movl $0, %gs:48" :: "r"(limit) :: "volatile") } #[cfg(all(target_arch = "x86", target_os = "windows"))] #[inline(always)] unsafe fn target_record_sp_limit(_: usize) { } // mips, arm - Some brave soul can port these to inline asm, but it's over // my head personally #[cfg(any(target_arch = "mips", target_arch = "mipsel", all(target_arch = "arm", not(target_os = "ios"))))] #[inline(always)] unsafe fn target_record_sp_limit(limit: usize) { use libc::c_void; return record_sp_limit(limit as *const c_void); extern { fn record_sp_limit(limit: *const c_void); } } // aarch64 - FIXME(AARCH64): missing... // powerpc - FIXME(POWERPC): missing... // arm-ios - iOS segmented stack is disabled for now, see related notes // openbsd - segmented stack is disabled #[cfg(any(target_arch = "aarch64", target_arch = "powerpc", all(target_arch = "arm", target_os = "ios"), target_os = "bitrig", target_os = "openbsd"))] unsafe fn target_record_sp_limit(_: usize) { } } /// The counterpart of the function above, this function will fetch the current /// stack limit stored in TLS. /// /// Note that all of these functions are meant to be exact counterparts of their /// brethren above, except that the operands are reversed. /// /// As with the setter, this function does not have a __morestack header and can /// therefore be called in a "we're out of stack" situation. #[inline(always)] pub unsafe fn get_sp_limit() -> usize { return target_get_sp_limit(); // x86-64 #[cfg(all(target_arch = "x86_64", any(target_os = "macos", target_os = "ios")))] #[inline(always)] unsafe fn target_get_sp_limit() -> usize { let limit; asm!("movq $$0x60+90*8, %rsi movq %gs:(%rsi), $0" : "=r"(limit) :: "rsi" : "volatile"); return limit; } #[cfg(all(target_arch = "x86_64", target_os = "linux"))] #[inline(always)] unsafe fn target_get_sp_limit() -> usize { let limit; asm!("movq %fs:112, $0" : "=r"(limit) ::: "volatile"); return limit; } #[cfg(all(target_arch = "x86_64", target_os = "windows"))] #[inline(always)] unsafe fn target_get_sp_limit() -> usize { return 1024; } #[cfg(all(target_arch = "x86_64", target_os = "freebsd"))] #[inline(always)] unsafe fn target_get_sp_limit() -> usize { let limit; asm!("movq %fs:24, $0" : "=r"(limit) ::: "volatile"); return limit; } #[cfg(all(target_arch = "x86_64", target_os = "dragonfly"))] #[inline(always)] unsafe fn target_get_sp_limit() -> usize { let limit; asm!("movq %fs:32, $0" : "=r"(limit) ::: "volatile"); return limit; } // x86 #[cfg(all(target_arch = "x86", any(target_os = "macos", target_os = "ios")))] #[inline(always)] unsafe fn target_get_sp_limit() -> usize { let limit; asm!("movl $$0x48+90*4, %eax movl %gs:(%eax), $0" : "=r"(limit) :: "eax" : "volatile"); return limit; } #[cfg(all(target_arch = "x86", any(target_os = "linux", target_os = "freebsd")))] #[inline(always)] unsafe fn target_get_sp_limit() -> usize { let limit; asm!("movl %gs:48, $0" : "=r"(limit) ::: "volatile"); return limit; } #[cfg(all(target_arch = "x86", target_os = "windows"))] #[inline(always)] unsafe fn target_get_sp_limit() -> usize { return 1024; } // mips, arm - Some brave soul can port these to inline asm, but it's over // my head personally #[cfg(any(target_arch = "mips", target_arch = "mipsel", all(target_arch = "arm", not(target_os = "ios"))))] #[inline(always)] unsafe fn target_get_sp_limit() -> usize { use libc::c_void; return get_sp_limit() as usize; extern { fn get_sp_limit() -> *const c_void; } } // aarch64 - FIXME(AARCH64): missing... // powerpc - FIXME(POWERPC): missing... // arm-ios - iOS doesn't support segmented stacks yet. // openbsd - OpenBSD doesn't support segmented stacks. // // This function might be called by runtime though // so it is unsafe to unreachable, let's return a fixed constant.<|fim▁hole|> target_os = "openbsd"))] #[inline(always)] unsafe fn target_get_sp_limit() -> usize { 1024 } } }<|fim▁end|>
#[cfg(any(target_arch = "aarch64", target_arch = "powerpc", all(target_arch = "arm", target_os = "ios"), target_os = "bitrig",
<|file_name|>20141102214329_settings_invoice_number.js<|end_file_name|><|fim▁begin|>'use strict'; exports.up = function(knex, Promise) { return knex.schema.table('settings', function(table) { table.integer('next_invoice_number');<|fim▁hole|> return knex.schema.table('settings', function(t) { t.dropColumn('next_invoice_number'); }); };<|fim▁end|>
}); }; exports.down = function(knex, Promise) {
<|file_name|>syntax-extension-source-utils.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // This test is brittle!<|fim▁hole|> pub mod m1 { pub mod m2 { pub fn where_am_i() -> String { (module_path!()).to_string() } } } macro_rules! indirect_line { () => ( line!() ) } pub fn main() { assert_eq!(line!(), 25); //assert!((column!() == 11)); assert_eq!(indirect_line!(), 27); assert!((file!().ends_with("syntax-extension-source-utils.rs"))); assert_eq!(stringify!((2*3) + 5).to_string(), "( 2 * 3 ) + 5".to_string()); assert!(include!("syntax-extension-source-utils-files/includeme.\ fragment").to_string() == "victory robot 6".to_string()); assert!( include_str!("syntax-extension-source-utils-files/includeme.\ fragment").to_string() .as_slice() .starts_with("/* this is for ")); assert!( include_bytes!("syntax-extension-source-utils-files/includeme.fragment") [1] == (42 as u8)); // '*' // The Windows tests are wrapped in an extra module for some reason assert!((m1::m2::where_am_i().ends_with("m1::m2"))); assert!(match (45, "( 2 * 3 ) + 5") { (line!(), stringify!((2*3) + 5)) => true, _ => false }) }<|fim▁end|>
// ignore-pretty - the pretty tests lose path information, breaking include!
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate pcap; use pcap::{Active, Activated, Offline, Capture}; use std::path::Path; #[test] fn read_packet_with_full_data() { let mut capture = capture_from_test_file("packet_snaplen_65535.pcap"); assert_eq!(capture.next().unwrap().len(), 98); } #[test] fn read_packet_with_truncated_data() { let mut capture = capture_from_test_file("packet_snaplen_20.pcap"); assert_eq!(capture.next().unwrap().len(), 20); } fn capture_from_test_file(file_name: &str) -> Capture<Offline> { let path = Path::new("tests/data/").join(file_name); Capture::from_file(path).unwrap() } #[test] fn unify_activated() { #![allow(dead_code)] fn test1() -> Capture<Active> {<|fim▁hole|> fn test2() -> Capture<Offline> { loop{} } fn maybe(a: bool) -> Capture<Activated> { if a { test1().into() } else { test2().into() } } fn also_maybe(a: &mut Capture<Activated>) { a.filter("whatever filter string, this won't be run anyway").unwrap(); } }<|fim▁end|>
loop{} }
<|file_name|>histogram.py<|end_file_name|><|fim▁begin|>import datetime from pystogram.tree import PrefixTree SECOND = 1 MINUTE = SECOND * 60 HOUR = MINUTE * 60 DAY = HOUR * 24 MONTH = DAY * 30 YEAR = DAY * 365 # The multiplier applied when testing timestamp interval to guess a resolution. # A value of 2.0 means the timestamp interval must be greater than 24 months in # order to use a resolution of years RESOLUTION_SCALE = 2.0 # FIXME: Where to put this? def prefix(timestamp, resolution): """ Compute and return a key prefix for this timestamp. """ # FIXME: Improve? length = 1 if resolution < YEAR: length += 1 if resolution < MONTH: length += 1 if resolution < DAY: length += 1 if resolution < HOUR: length += 1 if resolution < MINUTE: length += 1 return timestamp.timetuple()[:length] # FIXME: Missing domain concepts: timestamp (essentially a datetime), key (essentially a time.struct_time tuple) class Histogram(object): """ An informal histogram useful for counting time-series data, dividing samples into equally-sized intervals (buckets), and computing aggregate counts of the samples within each bucket. """ def __init__(self): """ Construct a Histogram instance. """ self.tree = PrefixTree() def count(self, timestamp): """ Increment the count for this timestamp. """ self.tree.incr(timestamp) @property def first_sample(self): # FIXME: Subclass PrefixTree into DateTimePrefixTree so we don't have to do this conversion here? return datetime.datetime(*self.tree.least()) @property def last_sample(self): # FIXME: Subclass PrefixTree into DateTimePrefixTree so we don't have to do this conversion here? return datetime.datetime(*self.tree.greatest()) @property def sample_interval(self): return (self.last_sample - self.first_sample).total_seconds() @property def sample_resolution(self): """ Compute a reasonable bucket resolution based on the sample interval. """ # FIXME: Improve? interval = self.sample_interval if interval > YEAR * RESOLUTION_SCALE: return YEAR elif interval > MONTH * RESOLUTION_SCALE:<|fim▁hole|> elif interval > DAY * RESOLUTION_SCALE: return DAY elif interval > HOUR * RESOLUTION_SCALE: return HOUR elif interval > MINUTE * RESOLUTION_SCALE: return MINUTE else: return SECOND def buckets(self, resolution=None): """ Generate and yield buckets sized according to the passed or guessed resolution. """ # Cache these properties locally first_sample = self.first_sample last_sample = self.last_sample # Compute the bucket resolution and interval (width) if resolution is None: resolution = self.sample_resolution bucket_interval = datetime.timedelta(seconds=resolution) timestamp = first_sample while timestamp <= last_sample: node = self.tree.insert(prefix(timestamp, resolution)) bucket = Bucket(timestamp, node) yield bucket timestamp += bucket_interval class Bucket(object): """ Histogram bucket for a given time interval. """ def __init__(self, start, node): self.start = start self.node = node self.count = node.sum()<|fim▁end|>
return MONTH
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url from . import views urlpatterns = [ url(r'^$', views.index, name='index'), url(r'^books/$', views.BookListView.as_view(), name='books'), url(r'^books/(?P<pk>\d+)$', views.BookDetailView.as_view(), name='book-detail'), url(r'^books/(?P<pk>[-\w]+)/renew/$', views.renew_book_librarian, name='renew-book-librarian'), url(r'^books/create/$', views.BookCreate.as_view(), name='book-create'), url(r'^books/(?P<pk>\d+)/update/$', views.BookUpdate.as_view(), name='book-update'), url(r'^books/(?P<pk>\d+)/delete/$', views.BookDelete.as_view(), name='book-delete'), url(r'^authors/$', views.AuthorListView.as_view(), name='authors'), url(r'^authors/(?P<pk>\d+)$', views.AuthorDetailView.as_view(), name='author-detail'), url(r'^authors/create/$', views.AuthorCreate.as_view(), name='author-create'), url(r'^authors/(?P<pk>\d+)/update/$', views.AuthorUpdate.as_view(), name='author-update'), url(r'^authors/(?P<pk>\d+)/delete/$', views.AuthorDelete.as_view(), name='author-delete'), url(r'^mybooks/$', views.LoanedBooksByUserListView.as_view(), name='my-borrowed'), url(r'^borrowed/$', views.BorrowedBooksListView.as_view(), name='books-borrowed'),<|fim▁hole|><|fim▁end|>
]
<|file_name|>test_survey_integration.py<|end_file_name|><|fim▁begin|># coding=utf-8 from datetime import datetime from euphorie.client import model from euphorie.client.tests.utils import addAccount from euphorie.client.tests.utils import addSurvey from euphorie.content.tests.utils import BASIC_SURVEY from euphorie.testing import EuphorieIntegrationTestCase from lxml import html from plone import api from Products.Five.browser.metaconfigure import ViewNotCallableError from time import sleep from zope.event import notify from zope.lifecycleevent import ObjectModifiedEvent <|fim▁hole|> def test_survey_publication_date_views(self): """We have some views to display and set the published column for a survey session """ with api.env.adopt_user("admin"): survey = addSurvey(self.portal, BASIC_SURVEY) account = addAccount(password="secret") survey_session = model.SurveySession( id=123, title=u"Dummy session", created=datetime(2012, 4, 22, 23, 5, 12), modified=datetime(2012, 4, 23, 11, 50, 30), zodb_path="nl/ict/software-development", account=account, company=model.Company(country="nl", employees="1-9", referer="other"), ) model.Session.add(survey_session) survey = self.portal.client.nl.ict["software-development"] session_id = "++session++%d" % survey_session.id traversed_survey_session = survey.restrictedTraverse(session_id) with api.env.adopt_user(user=survey_session.account): with self._get_view( "publication_date", traversed_survey_session, survey_session ) as view: # The view is not callable but # has traversable allowed attributes self.assertRaises(ViewNotCallableError, view) # We have some default values that will be changed # when publishing/unpublishing the session self.assertEqual(survey_session.last_publisher, None) self.assertEqual(survey_session.published, None) self.assertEqual(survey_session.last_modifier, None) self.assertEqual(survey_session.review_state, "private") # Calling set_date will result in having this session published # and the publication time and the publisher will be recorded # If no referer is set, # the methods will redirect to the context url self.assertEqual( view.set_date(), "{url}/{session_id}".format( url=survey.absolute_url(), session_id=session_id ), ) self.assertEqual(survey_session.last_publisher, survey_session.account) self.assertIsInstance(survey_session.published, datetime) self.assertEqual(survey_session.review_state, "published") old_modified = survey_session.modified old_published = survey_session.published old_modifier = survey_session.last_modifier # Changing the HTTP_REFERER will redirect there # and calling reset_date will update the published date view.request.set("HTTP_REFERER", "foo") # We need to wait at least one second because the datetime # is stored with that accuracy sleep(1) self.assertEqual(view.reset_date(), "foo") self.assertEqual(survey_session.last_publisher, survey_session.account) # The publisher and publication dates are set. The modification date # is not touched. self.assertEqual(survey_session.modified, old_modified) self.assertEqual(survey_session.last_modifier, old_modifier) self.assertTrue(survey_session.published > old_published) # Calling unset_date will restore the publication info self.assertEqual(view.unset_date(), "foo") self.assertEqual(survey_session.last_publisher, None) self.assertEqual(survey_session.published, None) self.assertEqual(survey_session.review_state, "private") # We also have a menu view with self._get_view( "publication_menu", traversed_survey_session, survey_session ) as view: soup = html.fromstring(view()) self.assertListEqual( ["publication_date/set_date#content"], [ el.attrib["action"].rpartition("@@")[-1] for el in soup.cssselect("form") ], ) # We trigger the session to be private survey_session.published = "foo" soup = html.fromstring(view()) self.assertListEqual( [ "publication_date/unset_date#content", "publication_date/reset_date#content", ], [ el.attrib["action"].rpartition("@@")[-1] for el in soup.cssselect("form") ], ) def test_modify_updates_last_modifier(self): account = addAccount(password="secret") survey_session = model.SurveySession( title=u"Dummy session", account=account, zodb_path="" ) self.assertEqual(survey_session.modified, None) self.assertEqual(survey_session.last_modifier, None) with api.env.adopt_user(user=account): notify(ObjectModifiedEvent(survey_session)) self.assertIsInstance(survey_session.modified, datetime) self.assertEqual(survey_session.last_modifier, account)<|fim▁end|>
class TestSurveyViews(EuphorieIntegrationTestCase):
<|file_name|>sysmon.py<|end_file_name|><|fim▁begin|># Gnome15 - Suite of tools for the Logitech G series keyboards and headsets # Copyright (C) 2010 Brett Smith <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import gnome15.g15locale as g15locale _ = g15locale.get_translation("sysmon", modfile = __file__).ugettext import gnome15.util.g15convert as g15convert import gnome15.util.g15uigconf as g15uigconf import gnome15.util.g15gconf as g15gconf import gnome15.util.g15cairo as g15cairo import gnome15.util.g15icontools as g15icontools import gnome15.g15driver as g15driver import gnome15.g15plugin as g15plugin import time import logging logger=logging.getLogger(__name__) try: import gtop except Exception as e: logger.debug("Could not import gtop. Falling back to g15top", exc_info = e) # API compatible work around for Ubuntu 12.10 import gnome15.g15top as gtop import gtk import os import sys import socket id = "sysmon" name = _("System Monitor") description = _("Display CPU, Memory, and Network statistics. Either a summary of each system's stats is displayed, or \ you may cycle through the CPU and Network interfaces.") author = "Brett Smith <[email protected]>" copyright = _("Copyright (C)2010 Brett Smith") site = "http://www.gnome15.org" default_enabled = True has_preferences = True actions={ g15driver.PREVIOUS_SELECTION : _("Toggle Monitored CPU"), g15driver.NEXT_SELECTION : _("Toggle Monitored Network\nInterface") } unsupported_models = [ g15driver.MODEL_G110, g15driver.MODEL_G11, g15driver.MODEL_G930, g15driver.MODEL_G35 ] # Various constants GRAPH_SIZE = 50 CPU_ICONS = [ "utilities-system-monitor","gnome-cpu-frequency-applet", "computer" ] ''' This plugin displays system statistics ''' def create(gconf_key, gconf_client, screen): return G15SysMon(gconf_key, gconf_client, screen) def show_preferences(parent, driver, gconf_client, gconf_key): widget_tree = gtk.Builder() widget_tree.add_from_file(os.path.join(os.path.dirname(__file__), "sysmon.ui")) dialog = widget_tree.get_object("SysmonDialog") dialog.set_transient_for(parent) g15uigconf.configure_checkbox_from_gconf(gconf_client, gconf_key + "/show_cpu_on_panel", "ShowCPUUsageOnPanel", True, widget_tree) dialog.run() dialog.hide() class Net(): def __init__(self, net_no, name): self.net_no = net_no self.name = name self.recv_bps = 0.0 self.send_bps = 0.0 self.last_net_list = None self.max_send = 0.0001 self.max_recv = 0.0001 self.send_history = [0] * GRAPH_SIZE self.recv_history = [0] * GRAPH_SIZE self.last_net_list = None self.last_time = 0 def new_data(self, this_net_list):<|fim▁hole|> ''' Net ''' self.recv_bps = 0.0 self.send_bps = 0.0 if self.last_net_list != None: time_taken = now - self.last_time if self.net_no == 0: this_total = self._get_net_total(this_net_list) last_total = self._get_net_total(self.last_net_list) else: this_total = self._get_net(this_net_list[self.name]) last_total = self._get_net(self.last_net_list[self.name]) # How many bps self.recv_bps = (this_total[0] - last_total[0]) / time_taken self.send_bps = (this_total[1] - last_total[1]) / time_taken # Adjust the maximums if necessary if self.recv_bps > self.max_recv: self.max_recv = self.recv_bps if self.send_bps > self.max_send: self.max_send = self.send_bps # History self.send_history.append(self.recv_bps) while len(self.send_history) > GRAPH_SIZE: del self.send_history[0] self.recv_history.append(self.send_bps) while len(self.recv_history) > GRAPH_SIZE: del self.recv_history[0] self.last_net_list = this_net_list self.last_time = now def _get_net(self, card): totals = (card[0], card[1]) return totals def _get_net_total(self, net_list): totals = (0, 0) for l in net_list: card = net_list[l] totals = (totals[0] + card[0], totals[1]) totals = (totals[0], totals[1] + card[1]) return totals class CPU(): def __init__(self, number): self.number = number self.name = "cpu%d" % number if number >= 0 else "cpu" self.history = [0] * GRAPH_SIZE self.value = 0 self.times = None self.last_times = None def new_times(self, time_list): if self.last_times is not None: working_list = list(time_list) ''' Work out the number of time units the CPU has spent on each task type since the last time we checked ''' for i in range(len(self.last_times)): working_list[i] -= self.last_times[i] self.pc = self.get_pc(working_list) else: self.pc = 0 self.last_times = time_list # Update the history and trim it to the graph data size self.history.append(self.pc) while len(self.history) > GRAPH_SIZE: del self.history[0] def get_pc(self, times): sum_l = sum(times) val = times[len(times)- 1] if sum_l > 0: return 100 - (val * 100.00 / sum_l) return 0 class G15SysMon(g15plugin.G15RefreshingPlugin): """ Plugin implementation """ def __init__(self, gconf_key, gconf_client, screen): g15plugin.G15RefreshingPlugin.__init__(self, gconf_client, gconf_key, screen, CPU_ICONS, id, name) self.only_refresh_when_visible = False def activate(self): self._net_icon = g15icontools.get_icon_path([ "network-transmit-receive", "gnome-fs-network", "network-server" ], self.screen.height) self._cpu_icon = g15icontools.get_icon_path( CPU_ICONS, self.screen.height) self._mem_icon = g15icontools.get_icon_path( [ "media-memory", "media-flash" ], self.screen.height) self._thumb_icon = g15cairo.load_surface_from_file(self._cpu_icon) self.variant = 0 self.graphs = {} self.last_time_list = None self.last_times_list = [] self.last_time = 0 # CPU self.selected_cpu = None self.cpu_no = 0 self.cpu_data = [] selected_cpu_name = self.gconf_client.get_string(self.gconf_key + "/cpu") cpus = gtop.cpu().cpus for i in range(-1, len(cpus)): cpu = CPU(i) self.cpu_data.append(cpu) if cpu.name == selected_cpu_name: self.selected_cpu = cpu if self.selected_cpu is None: self.selected_cpu = self.cpu_data[0] # Net self.selected_net = None _, self.net_list = self._get_net_stats() net_name = self.gconf_client.get_string(self.gconf_key + "/net") self.net_data = [] for idx, n in enumerate(self.net_list): net = Net(idx, n) self.net_data.append(net) if net.name == net_name: self.selected_net = net if self.selected_net is None and len(self.net_data) > 0: self.selected_net = self.net_data[0] # Memory self.max_total_mem = 0 self.total = 1.0 self.cached = 0 self.free = 0 self.used = 0 self.cached_history = [0] * GRAPH_SIZE self.used_history = [0] * GRAPH_SIZE g15plugin.G15RefreshingPlugin.activate(self) self._set_panel() self.watch(["show_cpu_on_panel","theme"], self._config_changed) self.screen.key_handler.action_listeners.append(self) # Start refreshing self.do_refresh() def reload_theme(self): g15plugin.G15RefreshingPlugin.reload_theme(self) self._set_panel() def deactivate(self): g15plugin.G15RefreshingPlugin.deactivate(self) self.screen.key_handler.action_listeners.remove(self) def action_performed(self, binding): if self.page and self.page.is_visible(): if binding.action == g15driver.PREVIOUS_SELECTION: idx = self.cpu_data.index(self.selected_cpu) idx += 1 if idx >= len(self.cpu_data): idx = 0 self.gconf_client.set_string(self.gconf_key + "/cpu", self.cpu_data[idx].name) self.selected_cpu = self.cpu_data[idx] self.do_refresh() return True elif binding.action == g15driver.NEXT_SELECTION: if self.selected_net is not None: idx = self.net_data.index(self.selected_net) idx += 1 if idx >= len(self.net_data): idx = 0 self.gconf_client.set_string(self.gconf_key + "/net", self.net_data[idx].name) self.selected_net = self.net_data[idx] self.do_refresh() return True def refresh(self): # Memory mem = self._get_mem_info() now = time.time() ''' CPU ''' for c in self.cpu_data: c.new_times(self._get_time_list(c)) ''' Net ''' # Current net status this_net_list, self.net_list = self._get_net_stats() for n in self.net_data: n.new_data(this_net_list) ''' Memory ''' self.total = float(mem.total) self.max_total_mem = max(self.max_total_mem, self.total) self.free = float(mem.free) self.used = self.total - self.free self.cached = float(mem.cached) self.noncached = self.total - self.free - self.cached self.used_history.append(self.used + self.cached) while len(self.used_history) > GRAPH_SIZE: del self.used_history[0] self.cached_history.append(self.cached) while len(self.cached_history) > GRAPH_SIZE: del self.cached_history[0] self.last_time = now ''' Private ''' def _config_changed(self, client, connection_id, entry, args): self.reload_theme() self._reschedule_refresh() def _set_panel(self, client = None, connection_id = None, entry = None, args = None): self.page.panel_painter = self._paint_panel if g15gconf.get_bool_or_default(self.gconf_client, self.gconf_key + "/show_cpu_on_panel", True) else None def _refresh(self): if self.page is not None: if self.screen.is_visible(self.page): self.refresh() self.screen.redraw(self.page) elif self.page.panel_painter is not None: self.refresh() self.screen.redraw(redraw_content = False) self._schedule_refresh() def get_theme_properties(self): properties = {} properties["cpu_pc"] = "%3d" % self.selected_cpu.pc properties["mem_total"] = "%f" % ( self.total / 1024 ) properties["mem_free_k"] = "%f" % ( self.free / 1024 ) properties["mem_used_k"] = "%f" % ( self.used / 1024 ) properties["mem_cached_k"] = "%f" % ( self.cached / 1024 ) properties["mem_noncached_k"] = "%f" % ( self.noncached / 1024 ) properties["mem_total_mb"] = "%.2f" % ( self.total / 1024 / 1024 ) properties["mem_free_mb"] = "%.2f" % ( self.free / 1024 / 1024 ) properties["mem_used_mb"] = "%.2f" % ( self.used / 1024 / 1024 ) properties["mem_cached_mb" ] = "%3d" % ( self.cached / 1024 / 1024 ) properties["mem_noncached_mb" ] = "%3d" % ( self.noncached / 1024 / 1024 ) properties["mem_total_gb"] = "%.1f" % ( self.total / 1024 / 1024 / 1024 ) properties["mem_free_gb"] = "%.1f" % ( self.free / 1024 / 1024 / 1024 ) properties["mem_used_gb"] = "%.1f" % ( self.used / 1024 / 1024 / 1024 ) properties["mem_cached_gb" ] = "%.1f" % ( self.cached / 1024 / 1024 / 1024 ) properties["mem_noncached_gb"] = "%.1f" % ( self.noncached / 1024 / 1024 / 1024 ) properties["mem_used_pc"] = int(self.used * 100.0 / self.total) properties["mem_cached_pc"] = int(self.cached * 100.0 / self.total) properties["mem_noncached_pc"] = int(self.noncached * 100.0 / self.total) if self.selected_net is not None: properties["net_recv_pc"] = int(self.selected_net.recv_bps * 100.0 / self.selected_net.max_recv) properties["net_send_pc"] = int(self.selected_net.send_bps * 100.0 / self.selected_net.max_send) properties["net_recv_mbps"] = "%.2f" % (self.selected_net.recv_bps / 1024 / 1024) properties["net_send_mbps"] = "%.2f" % (self.selected_net.send_bps / 1024 / 1024) properties["net_no"] = self.selected_net.name.upper() idx = self.net_data.index(self.selected_net) properties["next_net_no"] = self.net_list[idx + 1].upper() if idx < ( len(self.net_list) - 1) else self.net_list[0].upper() else: for c in ["net_recv_pc","net_send_pc","net_recv_mbps","net_send_mbps"]: properties[c] = "" # TODO we should ship some more appropriate default icons properties["net_icon"] = self._net_icon properties["cpu_icon"] = self._cpu_icon properties["mem_icon"] = self._mem_icon try : properties["info"] = socket.gethostname() except Exception as e: logger.debug("Could not get hostname. Falling back to 'System'", exc_info = e) properties["info"] = "System" properties["cpu_no"] = self.selected_cpu.name.upper() idx = self.cpu_data.index(self.selected_cpu) properties["next_cpu_no"] = self.cpu_data[idx + 1].name.upper() if idx < ( len(self.cpu_data) - 1) else self.cpu_data[0].name.upper() return properties def _paint_thumbnail(self, canvas, allocated_size, horizontal): if self.page != None and self._thumb_icon != None and self.screen.driver.get_bpp() == 16: return g15cairo.paint_thumbnail_image(allocated_size, self._thumb_icon, canvas) def _paint_panel(self, canvas, allocated_size, horizontal): if self.page != None and self.screen.driver.get_bpp() == 16: canvas.save() no_cpus = len(self.cpu_data) - 1 if no_cpus < 2: bar_width = 16 elif no_cpus < 3: bar_width = 8 elif no_cpus < 5: bar_width = 6 elif no_cpus < 9: bar_width = 4 else: bar_width = 2 total_width = ( bar_width + 1 ) * no_cpus available_height = allocated_size - 4 r, g, b = self.screen.driver.get_color_as_ratios(g15driver.HINT_FOREGROUND, (0,0,0)) canvas.set_line_width(1.0) canvas.set_source_rgba(r, g, b, 0.3) canvas.rectangle(0, 0, total_width + 4, allocated_size ) canvas.stroke() canvas.set_source_rgb(*self.screen.driver.get_color_as_ratios(g15driver.HINT_HIGHLIGHT, (0,0,0))) canvas.translate(2, 0) for i in self.cpu_data: if i.number >= 0: bar_height = float(available_height) * ( float(i.pc) / 100.0 ) canvas.rectangle(0, available_height - bar_height + 2, bar_width, bar_height ) canvas.fill() canvas.translate(bar_width + 1, 0) canvas.restore() return 4 + total_width def _get_net_stats(self): ifs = { } nets = gtop.netlist() for net in nets: netload = gtop.netload(net) ifs[net] = [ netload.bytes_in, netload.bytes_out ] nets.insert(0, "Net") return ifs, nets def _get_time_list(self, cpu): ''' Returns a 4 element list containing the amount of time the CPU has spent performing the different types of work 0 user 1 nice 2 system 3 idle Values are in USER_HZ or Jiffies ''' if cpu.number == -1: cpu_times = gtop.cpu() else: cpu_times = gtop.cpu().cpus[cpu.number] return [cpu_times.user, cpu_times.nice, cpu_times.sys, cpu_times.idle] def _get_mem_info(self): return gtop.mem()<|fim▁end|>
now = time.time()
<|file_name|>testChemotaxis.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import JeevesLib class TestAuction(unittest.TestCase): def setUp(self): JeevesLib.init() def test_something(self): r = RSphaeroides() pass<|fim▁end|>
import JeevesLib from smt.Z3 import * import unittest from RSphaeroides import RSphaeroides
<|file_name|>ItemGourmetChicken.java<|end_file_name|><|fim▁begin|>package gerhard2202.culinaromancy.item; <|fim▁hole|>import net.minecraft.potion.PotionEffect; import net.minecraft.world.World; import java.util.Random; public class ItemGourmetChicken extends ItemCulinaromancyFood { public ItemGourmetChicken() { super("gourmetChicken", 7, 8.2F, false); } @Override public int getMaxItemUseDuration(ItemStack stack) { return 32; } @Override public void onFoodEaten(ItemStack stack, World worldIn, EntityPlayer player) { Random buffChance = new Random(); int result = buffChance.nextInt(100) + 1; if (result <= 50) { player.addPotionEffect(new PotionEffect(MobEffects.SPEED, 1600, 0, false, false)); } } }<|fim▁end|>
import net.minecraft.entity.player.EntityPlayer; import net.minecraft.init.MobEffects; import net.minecraft.item.ItemStack;
<|file_name|>movie.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # inbuild - Exception - python exception - parent clas # InvalidAgeException - Child class class InvalidAgeException(Exception): def __init__(self,age): self.age = age def validate_age(age): if age > 18:<|fim▁hole|> return "welcome to the movie!!!" else: raise InvalidAgeException(age) if __name__ == '__main__': age = input("please enter your age:") try: validate_age(age) except InvalidAgeException as e: print "Buddy!! Go home and sleep you are still {}".format(e.age) else: print validate_age(age)<|fim▁end|>
<|file_name|>read_write_lock.py<|end_file_name|><|fim▁begin|>from kazoo.exceptions import NoNodeError from sys import maxsize from .mutex import Mutex from .internals import LockDriver from .utils import lazyproperty READ_LOCK_NAME = "__READ__" WRITE_LOCK_NAME = "__WRIT__" class _LockDriver(LockDriver): def sort_key(self, string, _lock_name): string = super(_LockDriver, self).sort_key(string, READ_LOCK_NAME) string = super(_LockDriver, self).sort_key(string, WRITE_LOCK_NAME) return string class _ReadLockDriver(_LockDriver): def __init__(self, predicate): super(_ReadLockDriver, self).__init__() self._predicate = predicate def is_acquirable(self, children, sequence_node_name, max_leases): return self._predicate(children, sequence_node_name) class _Mutex(Mutex): def __init__(self, client, path, name, max_leases, driver, timeout): super(_Mutex, self).__init__( client, path, max_leases, name=name, driver=driver, timeout=timeout ) def get_participant_nodes(self): nodes = super(_Mutex, self).get_participant_nodes() return list(filter(lambda node: self.name in node, nodes)) class ReadWriteLock(object): def __init__(self, client, path, timeout=None): self._client = client self._path = path self._timeout = timeout @property def path(self): return self._path @property def timeout(self): return self._timeout @timeout.setter def timeout(self, value): self._timeout = value self.read_lock.timeout = value self.write_lock.timeout = value @lazyproperty def read_lock(self): def predicate(children, sequence_node_name): return self._read_is_acquirable_predicate( children, sequence_node_name ) return _Mutex( self._client, self.path, READ_LOCK_NAME, maxsize, _ReadLockDriver(predicate), self.timeout )<|fim▁hole|> @lazyproperty def write_lock(self): return _Mutex( self._client, self.path, WRITE_LOCK_NAME, 1, _LockDriver(), self.timeout ) def get_participant_nodes(self): nodes = self.read_lock.get_participant_nodes() nodes.extend(self.write_lock.get_participant_nodes()) return nodes def _read_is_acquirable_predicate(self, children, sequence_node_name): if self.write_lock.is_owned_by_current_thread: return (None, True) index = 0 write_index = maxsize our_index = -1 for node in children: if WRITE_LOCK_NAME in node: write_index = min(index, write_index) elif node.startswith(sequence_node_name): our_index = index break index += 1 if our_index < 0: raise NoNodeError acquirable = our_index < write_index path = None if acquirable else children[write_index] return (path, acquirable)<|fim▁end|>
<|file_name|>path.py<|end_file_name|><|fim▁begin|># # Copyright (c) 2010 Mikhail Gusarov # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # """ path.py - An object representing a path to a file or directory. Original author: Jason Orendorff <jason.orendorff\x40gmail\x2ecom> Current maintainer: Jason R. Coombs <[email protected]> Contributors: Mikhail Gusarov <[email protected]> Marc Abramowitz <[email protected]> Jason R. Coombs <[email protected]> Jason Chu <[email protected]> Vojislav Stojkovic <[email protected]> Example:: from path import path d = path('/home/guido/bin') for f in d.files('*.py'): f.chmod(0o755) path.py requires Python 2.5 or later. """ from __future__ import print_function, division, absolute_import import sys import warnings import os import fnmatch import glob import shutil import codecs import hashlib import errno import tempfile import functools import operator import re import contextlib try: import win32security except ImportError: pass try: import pwd except ImportError: pass ################################ # Monkey patchy python 3 support try: basestring except NameError: basestring = str try: unicode except NameError: unicode = str try: getcwdu = os.getcwdu except AttributeError: getcwdu = os.getcwd if sys.version < '3': def u(x): return codecs.unicode_escape_decode(x)[0] else: def u(x): return x o777 = 511 o766 = 502 o666 = 438 o554 = 364 ################################ ########################## # Python 2.5 compatibility try: from functools import reduce except ImportError: pass ########################## __version__ = '5.1' __all__ = ['path', 'CaseInsensitivePattern'] class TreeWalkWarning(Warning): pass def simple_cache(func): """ Save results for the 'using_module' classmethod. When Python 3.2 is available, use functools.lru_cache instead. """ saved_results = {} def wrapper(cls, module): if module in saved_results: return saved_results[module] saved_results[module] = func(cls, module) return saved_results[module] return wrapper class ClassProperty(property): def __get__(self, cls, owner): return self.fget.__get__(None, owner)() class multimethod(object): """ Acts like a classmethod when invoked from the class and like an instancemethod when invoked from the instance. """ def __init__(self, func): self.func = func def __get__(self, instance, owner): return ( functools.partial(self.func, owner) if instance is None else functools.partial(self.func, owner, instance) ) class path(unicode): """ Represents a filesystem path. For documentation on individual methods, consult their counterparts in os.path. """ module = os.path """ The path module to use for path operations. .. seealso:: :mod:`os.path` """ def __init__(self, other=''): if other is None: raise TypeError("Invalid initial value for path: None") @classmethod @simple_cache def using_module(cls, module): subclass_name = cls.__name__ + '_' + module.__name__ bases = (cls,) ns = {'module': module} return type(subclass_name, bases, ns) @ClassProperty @classmethod def _next_class(cls): """ What class should be used to construct new instances from this class """ return cls # --- Special Python methods. def __repr__(self): return '%s(%s)' % (type(self).__name__, super(path, self).__repr__()) # Adding a path and a string yields a path. def __add__(self, more): try: return self._next_class(super(path, self).__add__(more)) except TypeError: # Python bug return NotImplemented def __radd__(self, other): if not isinstance(other, basestring): return NotImplemented return self._next_class(other.__add__(self)) # The / operator joins paths. def __div__(self, rel): """ fp.__div__(rel) == fp / rel == fp.joinpath(rel) Join two path components, adding a separator character if needed. .. seealso:: :func:`os.path.join` """ return self._next_class(self.module.join(self, rel)) # Make the / operator work even when true division is enabled. __truediv__ = __div__ def __enter__(self): self._old_dir = self.getcwd() os.chdir(self) return self def __exit__(self, *_): os.chdir(self._old_dir) @classmethod def getcwd(cls): """ Return the current working directory as a path object. .. seealso:: :func:`os.getcwdu` """ return cls(getcwdu()) # # --- Operations on path strings. def abspath(self): """ .. seealso:: :func:`os.path.abspath` """ return self._next_class(self.module.abspath(self)) def normcase(self): """ .. seealso:: :func:`os.path.normcase` """ return self._next_class(self.module.normcase(self)) def normpath(self): """ .. seealso:: :func:`os.path.normpath` """ return self._next_class(self.module.normpath(self)) def realpath(self): """ .. seealso:: :func:`os.path.realpath` """ return self._next_class(self.module.realpath(self)) def expanduser(self): """ .. seealso:: :func:`os.path.expanduser` """ return self._next_class(self.module.expanduser(self)) def expandvars(self): """ .. seealso:: :func:`os.path.expandvars` """ return self._next_class(self.module.expandvars(self)) def dirname(self): """ .. seealso:: :attr:`parent`, :func:`os.path.dirname` """ return self._next_class(self.module.dirname(self)) def basename(self): """ .. seealso:: :attr:`name`, :func:`os.path.basename` """ return self._next_class(self.module.basename(self)) def expand(self): """ Clean up a filename by calling :meth:`expandvars()`, :meth:`expanduser()`, and :meth:`normpath()` on it. This is commonly everything needed to clean up a filename read from a configuration file, for example. """ return self.expandvars().expanduser().normpath() @property def namebase(self): """ The same as :meth:`name`, but with one file extension stripped off. For example, ``path('/home/guido/python.tar.gz').name == 'python.tar.gz'``, but ``path('/home/guido/python.tar.gz').namebase == 'python.tar'``. """ base, ext = self.module.splitext(self.name) return base @property def ext(self): """ The file extension, for example ``'.py'``. """ f, ext = self.module.splitext(self) return ext @property def drive(self): """ The drive specifier, for example ``'C:'``. This is always empty on systems that don't use drive specifiers. """ drive, r = self.module.splitdrive(self) return self._next_class(drive) parent = property( dirname, None, None, """ This path's parent directory, as a new path object. For example, ``path('/usr/local/lib/libpython.so').parent == path('/usr/local/lib')`` .. seealso:: :meth:`dirname`, :func:`os.path.dirname` """) name = property( basename, None, None, """ The name of this file or directory without the full path. For example, ``path('/usr/local/lib/libpython.so').name == 'libpython.so'`` .. seealso:: :meth:`basename`, :func:`os.path.basename` """) def splitpath(self): """ p.splitpath() -> Return ``(p.parent, p.name)``. .. seealso:: :attr:`parent`, :attr:`name`, :func:`os.path.split` """ parent, child = self.module.split(self) return self._next_class(parent), child def splitdrive(self): """ p.splitdrive() -> Return ``(p.drive, <the rest of p>)``. Split the drive specifier from this path. If there is no drive specifier, p.drive is empty, so the return value is simply ``(path(''), p)``. This is always the case on Unix. .. seealso:: :func:`os.path.splitdrive` """ drive, rel = self.module.splitdrive(self) return self._next_class(drive), rel def splitext(self): """ p.splitext() -> Return ``(p.stripext(), p.ext)``. Split the filename extension from this path and return the two parts. Either part may be empty. The extension is everything from ``'.'`` to the end of the last path segment. This has the property that if ``(a, b) == p.splitext()``, then ``a + b == p``. .. seealso:: :func:`os.path.splitext` """ filename, ext = self.module.splitext(self) return self._next_class(filename), ext def stripext(self): """ p.stripext() -> Remove one file extension from the path. For example, ``path('/home/guido/python.tar.gz').stripext()`` returns ``path('/home/guido/python.tar')``. """ return self.splitext()[0] def splitunc(self): """ .. seealso:: :func:`os.path.splitunc` """ unc, rest = self.module.splitunc(self) return self._next_class(unc), rest @property def uncshare(self): """ The UNC mount point for this path. This is empty for paths on local drives. """ unc, r = self.module.splitunc(self) return self._next_class(unc) @multimethod def joinpath(cls, first, *others): """ Join first to zero or more path components, adding a separator character (``first.module.sep``) if needed. Returns a new instance of ``first._next_class``. .. seealso:: :func:`os.path.join` """ if not isinstance(first, cls): first = cls(first) return first._next_class(first.module.join(first, *others)) def splitall(self): r""" Return a list of the path components in this path. The first item in the list will be a path. Its value will be either :data:`os.curdir`, :data:`os.pardir`, empty, or the root directory of this path (for example, ``'/'`` or ``'C:\\'``). The other items in the list will be strings. ``path.path.joinpath(*result)`` will yield the original path. """ parts = [] loc = self while loc != os.curdir and loc != os.pardir: prev = loc loc, child = prev.splitpath() if loc == prev: break parts.append(child) parts.append(loc) parts.reverse() return parts def relpath(self, start='.'): """ Return this path as a relative path, based from `start`, which defaults to the current working directory. """ cwd = self._next_class(start) return cwd.relpathto(self) def relpathto(self, dest): """ Return a relative path from `self` to `dest`. If there is no relative path from `self` to `dest`, for example if they reside on different drives in Windows, then this returns ``dest.abspath()``. """ origin = self.abspath() dest = self._next_class(dest).abspath() orig_list = origin.normcase().splitall() # Don't normcase dest! We want to preserve the case. dest_list = dest.splitall() if orig_list[0] != self.module.normcase(dest_list[0]): # Can't get here from there. return dest # Find the location where the two paths start to differ. i = 0 for start_seg, dest_seg in zip(orig_list, dest_list): if start_seg != self.module.normcase(dest_seg): break i += 1 # Now i is the point where the two paths diverge. # Need a certain number of "os.pardir"s to work up # from the origin to the point of divergence. segments = [os.pardir] * (len(orig_list) - i) # Need to add the diverging part of dest_list. segments += dest_list[i:] if len(segments) == 0: # If they happen to be identical, use os.curdir. relpath = os.curdir else: relpath = self.module.join(*segments) return self._next_class(relpath) # --- Listing, searching, walking, and matching def listdir(self, pattern=None): """ D.listdir() -> List of items in this directory. Use :meth:`files` or :meth:`dirs` instead if you want a listing of just files or just subdirectories. The elements of the list are path objects. With the optional `pattern` argument, this only lists items whose names match the given pattern. .. seealso:: :meth:`files`, :meth:`dirs` """ if pattern is None: pattern = '*' return [ self / child for child in os.listdir(self) if self._next_class(child).fnmatch(pattern) ] def dirs(self, pattern=None): """ D.dirs() -> List of this directory's subdirectories. The elements of the list are path objects. This does not walk recursively into subdirectories (but see :meth:`walkdirs`). With the optional `pattern` argument, this only lists directories whose names match the given pattern. For example, ``d.dirs('build-*')``. """ return [p for p in self.listdir(pattern) if p.isdir()] def files(self, pattern=None): """ D.files() -> List of the files in this directory. The elements of the list are path objects. This does not walk into subdirectories (see :meth:`walkfiles`). With the optional `pattern` argument, this only lists files whose names match the given pattern. For example, ``d.files('*.pyc')``. """ return [p for p in self.listdir(pattern) if p.isfile()] def walk(self, pattern=None, errors='strict'): """ D.walk() -> iterator over files and subdirs, recursively. The iterator yields path objects naming each child item of this directory and its descendants. This requires that D.isdir(). This performs a depth-first traversal of the directory tree. Each directory is returned just before all its children. The `errors=` keyword argument controls behavior when an error occurs. The default is 'strict', which causes an exception. The other allowed values are 'warn', which reports the error via ``warnings.warn()``, and 'ignore'. """ if errors not in ('strict', 'warn', 'ignore'): raise ValueError("invalid errors parameter") try: childList = self.listdir() except Exception: if errors == 'ignore': return elif errors == 'warn': warnings.warn( "Unable to list directory '%s': %s" % (self, sys.exc_info()[1]), TreeWalkWarning) return else: raise for child in childList: if pattern is None or child.fnmatch(pattern): yield child try: isdir = child.isdir() except Exception: if errors == 'ignore': isdir = False elif errors == 'warn': warnings.warn( "Unable to access '%s': %s" % (child, sys.exc_info()[1]), TreeWalkWarning) isdir = False else: raise if isdir: for item in child.walk(pattern, errors): yield item def walkdirs(self, pattern=None, errors='strict'): """ D.walkdirs() -> iterator over subdirs, recursively. With the optional `pattern` argument, this yields only directories whose names match the given pattern. For example, ``mydir.walkdirs('*test')`` yields only directories with names ending in 'test'. The `errors=` keyword argument controls behavior when an error occurs. The default is 'strict', which causes an exception. The other allowed values are 'warn', which reports the error via ``warnings.warn()``, and 'ignore'. """ if errors not in ('strict', 'warn', 'ignore'): raise ValueError("invalid errors parameter") try: dirs = self.dirs() except Exception: if errors == 'ignore': return elif errors == 'warn': warnings.warn( "Unable to list directory '%s': %s" % (self, sys.exc_info()[1]), TreeWalkWarning) return else: raise for child in dirs: if pattern is None or child.fnmatch(pattern): yield child for subsubdir in child.walkdirs(pattern, errors): yield subsubdir def walkfiles(self, pattern=None, errors='strict'): """ D.walkfiles() -> iterator over files in D, recursively. The optional argument, `pattern`, limits the results to files with names that match the pattern. For example, ``mydir.walkfiles('*.tmp')`` yields only files with the .tmp extension. """ if errors not in ('strict', 'warn', 'ignore'): raise ValueError("invalid errors parameter") try: childList = self.listdir() except Exception: if errors == 'ignore': return elif errors == 'warn': warnings.warn( "Unable to list directory '%s': %s" % (self, sys.exc_info()[1]), TreeWalkWarning)<|fim▁hole|> else: raise for child in childList: try: isfile = child.isfile() isdir = not isfile and child.isdir() except: if errors == 'ignore': continue elif errors == 'warn': warnings.warn( "Unable to access '%s': %s" % (self, sys.exc_info()[1]), TreeWalkWarning) continue else: raise if isfile: if pattern is None or child.fnmatch(pattern): yield child elif isdir: for f in child.walkfiles(pattern, errors): yield f def fnmatch(self, pattern, normcase=None): """ Return ``True`` if `self.name` matches the given pattern. pattern - A filename pattern with wildcards, for example ``'*.py'``. If the pattern contains a `normcase` attribute, it is applied to the name and path prior to comparison. normcase - (optional) A function used to normalize the pattern and filename before matching. Defaults to self.module which defaults to os.path.normcase. .. seealso:: :func:`fnmatch.fnmatch` """ default_normcase = getattr(pattern, 'normcase', self.module.normcase) normcase = normcase or default_normcase name = normcase(self.name) pattern = normcase(pattern) return fnmatch.fnmatchcase(name, pattern) def glob(self, pattern): """ Return a list of path objects that match the pattern. `pattern` - a path relative to this directory, with wildcards. For example, ``path('/users').glob('*/bin/*')`` returns a list of all the files users have in their bin directories. .. seealso:: :func:`glob.glob` """ cls = self._next_class return [cls(s) for s in glob.glob(self / pattern)] # # --- Reading or writing an entire file at once. def open(self, *args, **kwargs): """ Open this file. Return a file object. .. seealso:: :func:`python:open` """ return open(self, *args, **kwargs) def bytes(self): """ Open this file, read all bytes, return them as a string. """ with self.open('rb') as f: return f.read() def chunks(self, size, *args, **kwargs): """ Returns a generator yielding chunks of the file, so it can be read piece by piece with a simple for loop. Any argument you pass after `size` will be passed to `open()`. :example: >>> hash = hashlib.md5() >>> for chunk in path("path.py").chunks(8192, mode='rb'): ... hash.update(chunk) This will read the file by chunks of 8192 bytes. """ with open(self, *args, **kwargs) as f: while True: d = f.read(size) if not d: break yield d def write_bytes(self, bytes, append=False): """ Open this file and write the given bytes to it. Default behavior is to overwrite any existing file. Call ``p.write_bytes(bytes, append=True)`` to append instead. """ if append: mode = 'ab' else: mode = 'wb' with self.open(mode) as f: f.write(bytes) def text(self, encoding=None, errors='strict'): r""" Open this file, read it in, return the content as a string. This method uses ``'U'`` mode, so ``'\r\n'`` and ``'\r'`` are automatically translated to ``'\n'``. Optional arguments: `encoding` - The Unicode encoding (or character set) of the file. If present, the content of the file is decoded and returned as a unicode object; otherwise it is returned as an 8-bit str. `errors` - How to handle Unicode errors; see :meth:`str.decode` for the options. Default is 'strict'. .. seealso:: :meth:`lines` """ if encoding is None: # 8-bit with self.open('U') as f: return f.read() else: # Unicode with codecs.open(self, 'r', encoding, errors) as f: # (Note - Can't use 'U' mode here, since codecs.open # doesn't support 'U' mode.) t = f.read() return (t.replace(u('\r\n'), u('\n')) .replace(u('\r\x85'), u('\n')) .replace(u('\r'), u('\n')) .replace(u('\x85'), u('\n')) .replace(u('\u2028'), u('\n'))) def write_text(self, text, encoding=None, errors='strict', linesep=os.linesep, append=False): r""" Write the given text to this file. The default behavior is to overwrite any existing file; to append instead, use the `append=True` keyword argument. There are two differences between :meth:`write_text` and :meth:`write_bytes`: newline handling and Unicode handling. See below. Parameters: `text` - str/unicode - The text to be written. `encoding` - str - The Unicode encoding that will be used. This is ignored if 'text' isn't a Unicode string. `errors` - str - How to handle Unicode encoding errors. Default is 'strict'. See help(unicode.encode) for the options. This is ignored if 'text' isn't a Unicode string. `linesep` - keyword argument - str/unicode - The sequence of characters to be used to mark end-of-line. The default is :data:`os.linesep`. You can also specify ``None``; this means to leave all newlines as they are in `text`. `append` - keyword argument - bool - Specifies what to do if the file already exists (``True``: append to the end of it; ``False``: overwrite it.) The default is ``False``. --- Newline handling. write_text() converts all standard end-of-line sequences (``'\n'``, ``'\r'``, and ``'\r\n'``) to your platform's default end-of-line sequence (see :data:`os.linesep`; on Windows, for example, the end-of-line marker is ``'\r\n'``). If you don't like your platform's default, you can override it using the `linesep=` keyword argument. If you specifically want write_text() to preserve the newlines as-is, use ``linesep=None``. This applies to Unicode text the same as to 8-bit text, except there are three additional standard Unicode end-of-line sequences: ``u'\x85'``, ``u'\r\x85'``, and ``u'\u2028'``. (This is slightly different from when you open a file for writing with ``fopen(filename, "w")`` in C or ``open(filename, 'w')`` in Python.) --- Unicode If `text` isn't Unicode, then apart from newline handling, the bytes are written verbatim to the file. The `encoding` and `errors` arguments are not used and must be omitted. If `text` is Unicode, it is first converted to bytes using the specified 'encoding' (or the default encoding if `encoding` isn't specified). The `errors` argument applies only to this conversion. """ if isinstance(text, unicode): if linesep is not None: # Convert all standard end-of-line sequences to # ordinary newline characters. text = (text.replace(u('\r\n'), u('\n')) .replace(u('\r\x85'), u('\n')) .replace(u('\r'), u('\n')) .replace(u('\x85'), u('\n')) .replace(u('\u2028'), u('\n'))) text = text.replace(u('\n'), linesep) if encoding is None: encoding = sys.getdefaultencoding() bytes = text.encode(encoding, errors) else: # It is an error to specify an encoding if 'text' is # an 8-bit string. assert encoding is None if linesep is not None: text = (text.replace('\r\n', '\n') .replace('\r', '\n')) bytes = text.replace('\n', linesep) self.write_bytes(bytes, append) def lines(self, encoding=None, errors='strict', retain=True): r""" Open this file, read all lines, return them in a list. Optional arguments: `encoding` - The Unicode encoding (or character set) of the file. The default is None, meaning the content of the file is read as 8-bit characters and returned as a list of (non-Unicode) str objects. `errors` - How to handle Unicode errors; see help(str.decode) for the options. Default is 'strict' `retain` - If true, retain newline characters; but all newline character combinations (``'\r'``, ``'\n'``, ``'\r\n'``) are translated to ``'\n'``. If false, newline characters are stripped off. Default is True. This uses ``'U'`` mode. .. seealso:: :meth:`text` """ if encoding is None and retain: with self.open('U') as f: return f.readlines() else: return self.text(encoding, errors).splitlines(retain) def write_lines(self, lines, encoding=None, errors='strict', linesep=os.linesep, append=False): r""" Write the given lines of text to this file. By default this overwrites any existing file at this path. This puts a platform-specific newline sequence on every line. See `linesep` below. `lines` - A list of strings. `encoding` - A Unicode encoding to use. This applies only if `lines` contains any Unicode strings. `errors` - How to handle errors in Unicode encoding. This also applies only to Unicode strings. linesep - The desired line-ending. This line-ending is applied to every line. If a line already has any standard line ending (``'\r'``, ``'\n'``, ``'\r\n'``, ``u'\x85'``, ``u'\r\x85'``, ``u'\u2028'``), that will be stripped off and this will be used instead. The default is os.linesep, which is platform-dependent (``'\r\n'`` on Windows, ``'\n'`` on Unix, etc.). Specify ``None`` to write the lines as-is, like :meth:`file.writelines`. Use the keyword argument append=True to append lines to the file. The default is to overwrite the file. Warning: When you use this with Unicode data, if the encoding of the existing data in the file is different from the encoding you specify with the encoding= parameter, the result is mixed-encoding data, which can really confuse someone trying to read the file later. """ if append: mode = 'ab' else: mode = 'wb' with self.open(mode) as f: for line in lines: isUnicode = isinstance(line, unicode) if linesep is not None: # Strip off any existing line-end and add the # specified linesep string. if isUnicode: if line[-2:] in (u('\r\n'), u('\x0d\x85')): line = line[:-2] elif line[-1:] in (u('\r'), u('\n'), u('\x85'), u('\u2028')): line = line[:-1] else: if line[-2:] == '\r\n': line = line[:-2] elif line[-1:] in ('\r', '\n'): line = line[:-1] line += linesep if isUnicode: if encoding is None: encoding = sys.getdefaultencoding() line = line.encode(encoding, errors) f.write(line) def read_md5(self): """ Calculate the md5 hash for this file. This reads through the entire file. .. seealso:: :meth:`read_hash` """ return self.read_hash('md5') def _hash(self, hash_name): """ Returns a hash object for the file at the current path. `hash_name` should be a hash algo name such as 'md5' or 'sha1' that's available in the :mod:`hashlib` module. """ m = hashlib.new(hash_name) for chunk in self.chunks(8192, mode="rb"): m.update(chunk) return m def read_hash(self, hash_name): """ Calculate given hash for this file. List of supported hashes can be obtained from :mod:`hashlib` package. This reads the entire file. .. seealso:: :meth:`hashlib.hash.digest` """ return self._hash(hash_name).digest() def read_hexhash(self, hash_name): """ Calculate given hash for this file, returning hexdigest. List of supported hashes can be obtained from :mod:`hashlib` package. This reads the entire file. .. seealso:: :meth:`hashlib.hash.hexdigest` """ return self._hash(hash_name).hexdigest() # --- Methods for querying the filesystem. # N.B. On some platforms, the os.path functions may be implemented in C # (e.g. isdir on Windows, Python 3.2.2), and compiled functions don't get # bound. Playing it safe and wrapping them all in method calls. def isabs(self): """ .. seealso:: :func:`os.path.isabs` """ return self.module.isabs(self) def exists(self): """ .. seealso:: :func:`os.path.exists` """ return self.module.exists(self) def isdir(self): """ .. seealso:: :func:`os.path.isdir` """ return self.module.isdir(self) def isfile(self): """ .. seealso:: :func:`os.path.isfile` """ return self.module.isfile(self) def islink(self): """ .. seealso:: :func:`os.path.islink` """ return self.module.islink(self) def ismount(self): """ .. seealso:: :func:`os.path.ismount` """ return self.module.ismount(self) def samefile(self, other): """ .. seealso:: :func:`os.path.samefile` """ return self.module.samefile(self, other) def getatime(self): """ .. seealso:: :attr:`atime`, :func:`os.path.getatime` """ return self.module.getatime(self) atime = property( getatime, None, None, """ Last access time of the file. .. seealso:: :meth:`getatime`, :func:`os.path.getatime` """) def getmtime(self): """ .. seealso:: :attr:`mtime`, :func:`os.path.getmtime` """ return self.module.getmtime(self) mtime = property( getmtime, None, None, """ Last-modified time of the file. .. seealso:: :meth:`getmtime`, :func:`os.path.getmtime` """) def getctime(self): """ .. seealso:: :attr:`ctime`, :func:`os.path.getctime` """ return self.module.getctime(self) ctime = property( getctime, None, None, """ Creation time of the file. .. seealso:: :meth:`getctime`, :func:`os.path.getctime` """) def getsize(self): """ .. seealso:: :attr:`size`, :func:`os.path.getsize` """ return self.module.getsize(self) size = property( getsize, None, None, """ Size of the file, in bytes. .. seealso:: :meth:`getsize`, :func:`os.path.getsize` """) if hasattr(os, 'access'): def access(self, mode): """ Return true if current user has access to this path. mode - One of the constants :data:`os.F_OK`, :data:`os.R_OK`, :data:`os.W_OK`, :data:`os.X_OK` .. seealso:: :func:`os.access` """ return os.access(self, mode) def stat(self): """ Perform a ``stat()`` system call on this path. .. seealso:: :meth:`lstat`, :func:`os.stat` """ return os.stat(self) def lstat(self): """ Like :meth:`stat`, but do not follow symbolic links. .. seealso:: :meth:`stat`, :func:`os.lstat` """ return os.lstat(self) def __get_owner_windows(self): r""" Return the name of the owner of this file or directory. Follow symbolic links. Return a name of the form ``ur'DOMAIN\User Name'``; may be a group. .. seealso:: :attr:`owner` """ desc = win32security.GetFileSecurity( self, win32security.OWNER_SECURITY_INFORMATION) sid = desc.GetSecurityDescriptorOwner() account, domain, typecode = win32security.LookupAccountSid(None, sid) return domain + u('\\') + account def __get_owner_unix(self): """ Return the name of the owner of this file or directory. Follow symbolic links. .. seealso:: :attr:`owner` """ st = self.stat() return pwd.getpwuid(st.st_uid).pw_name def __get_owner_not_implemented(self): raise NotImplementedError("Ownership not available on this platform.") if 'win32security' in globals(): get_owner = __get_owner_windows elif 'pwd' in globals(): get_owner = __get_owner_unix else: get_owner = __get_owner_not_implemented owner = property( get_owner, None, None, """ Name of the owner of this file or directory. .. seealso:: :meth:`get_owner`""") if hasattr(os, 'statvfs'): def statvfs(self): """ Perform a ``statvfs()`` system call on this path. .. seealso:: :func:`os.statvfs` """ return os.statvfs(self) if hasattr(os, 'pathconf'): def pathconf(self, name): """ .. seealso:: :func:`os.pathconf` """ return os.pathconf(self, name) # # --- Modifying operations on files and directories def utime(self, times): """ Set the access and modified times of this file. .. seealso:: :func:`os.utime` """ os.utime(self, times) return self def chmod(self, mode): """ .. seealso:: :func:`os.chmod` """ os.chmod(self, mode) return self if hasattr(os, 'chown'): def chown(self, uid=-1, gid=-1): """ .. seealso:: :func:`os.chown` """ os.chown(self, uid, gid) return self def rename(self, new): """ .. seealso:: :func:`os.rename` """ os.rename(self, new) return self._next_class(new) def renames(self, new): """ .. seealso:: :func:`os.renames` """ os.renames(self, new) return self._next_class(new) # # --- Create/delete operations on directories def mkdir(self, mode=o777): """ .. seealso:: :func:`os.mkdir` """ os.mkdir(self, mode) return self def mkdir_p(self, mode=o777): """ Like :meth:`mkdir`, but does not raise an exception if the directory already exists. """ try: self.mkdir(mode) except OSError: _, e, _ = sys.exc_info() if e.errno != errno.EEXIST: raise return self def makedirs(self, mode=o777): """ .. seealso:: :func:`os.makedirs` """ os.makedirs(self, mode) return self def makedirs_p(self, mode=o777): """ Like :meth:`makedirs`, but does not raise an exception if the directory already exists. """ try: self.makedirs(mode) except OSError: _, e, _ = sys.exc_info() if e.errno != errno.EEXIST: raise return self def rmdir(self): """ .. seealso:: :func:`os.rmdir` """ os.rmdir(self) return self def rmdir_p(self): """ Like :meth:`rmdir`, but does not raise an exception if the directory is not empty or does not exist. """ try: self.rmdir() except OSError: _, e, _ = sys.exc_info() if e.errno != errno.ENOTEMPTY and e.errno != errno.EEXIST: raise return self def removedirs(self): """ .. seealso:: :func:`os.removedirs` """ os.removedirs(self) return self def removedirs_p(self): """ Like :meth:`removedirs`, but does not raise an exception if the directory is not empty or does not exist. """ try: self.removedirs() except OSError: _, e, _ = sys.exc_info() if e.errno != errno.ENOTEMPTY and e.errno != errno.EEXIST: raise return self # --- Modifying operations on files def touch(self): """ Set the access/modified times of this file to the current time. Create the file if it does not exist. """ fd = os.open(self, os.O_WRONLY | os.O_CREAT, o666) os.close(fd) os.utime(self, None) return self def remove(self): """ .. seealso:: :func:`os.remove` """ os.remove(self) return self def remove_p(self): """ Like :meth:`remove`, but does not raise an exception if the file does not exist. """ try: self.unlink() except OSError: _, e, _ = sys.exc_info() if e.errno != errno.ENOENT: raise return self def unlink(self): """ .. seealso:: :func:`os.unlink` """ os.unlink(self) return self def unlink_p(self): """ Like :meth:`unlink`, but does not raise an exception if the file does not exist. """ self.remove_p() return self # --- Links if hasattr(os, 'link'): def link(self, newpath): """ Create a hard link at `newpath`, pointing to this file. .. seealso:: :func:`os.link` """ os.link(self, newpath) return self._next_class(newpath) if hasattr(os, 'symlink'): def symlink(self, newlink): """ Create a symbolic link at `newlink`, pointing here. .. seealso:: :func:`os.symlink` """ os.symlink(self, newlink) return self._next_class(newlink) if hasattr(os, 'readlink'): def readlink(self): """ Return the path to which this symbolic link points. The result may be an absolute or a relative path. .. seealso:: :meth:`readlinkabs`, :func:`os.readlink` """ return self._next_class(os.readlink(self)) def readlinkabs(self): """ Return the path to which this symbolic link points. The result is always an absolute path. .. seealso:: :meth:`readlink`, :func:`os.readlink` """ p = self.readlink() if p.isabs(): return p else: return (self.parent / p).abspath() # # --- High-level functions from shutil copyfile = shutil.copyfile copymode = shutil.copymode copystat = shutil.copystat copy = shutil.copy copy2 = shutil.copy2 copytree = shutil.copytree if hasattr(shutil, 'move'): move = shutil.move rmtree = shutil.rmtree def rmtree_p(self): """ Like :meth:`rmtree`, but does not raise an exception if the directory does not exist. """ try: self.rmtree() except OSError: _, e, _ = sys.exc_info() if e.errno != errno.ENOENT: raise return self def chdir(self): """ .. seealso:: :func:`os.chdir` """ os.chdir(self) cd = chdir # # --- Special stuff from os if hasattr(os, 'chroot'): def chroot(self): """ .. seealso:: :func:`os.chroot` """ os.chroot(self) if hasattr(os, 'startfile'): def startfile(self): """ .. seealso:: :func:`os.startfile` """ os.startfile(self) return self # in-place re-writing, courtesy of Martijn Pieters # http://www.zopatista.com/python/2013/11/26/inplace-file-rewriting/ @contextlib.contextmanager def in_place(self, mode='r', buffering=-1, encoding=None, errors=None, newline=None, backup_extension=None): """ A context in which a file may be re-written in-place with new content. Yields a tuple of (readable, writable) file objects, where writable replaces readable. If an exception occurs, the old file is restored, removing the written data. Mode *must not* use 'w', 'a' or '+'; only read-only-modes are allowed. A ValueError is raised on invalid modes. For example, to add line numbers to a file:: p = path(filename) assert p.isfile() with p.in_place() as reader, writer: for number, line in enumerate(reader, 1): writer.write('{0:3}: '.format(number))) writer.write(line) Thereafter, the file at filename will have line numbers in it. """ import io if set(mode).intersection('wa+'): raise ValueError('Only read-only file modes can be used') # move existing file to backup, create new file with same permissions # borrowed extensively from the fileinput module backup_fn = self + (backup_extension or os.extsep + 'bak') try: os.unlink(backup_fn) except os.error: pass os.rename(self, backup_fn) readable = io.open(backup_fn, mode, buffering=buffering, encoding=encoding, errors=errors, newline=newline) try: perm = os.fstat(readable.fileno()).st_mode except OSError: writable = open(self, 'w' + mode.replace('r', ''), buffering=buffering, encoding=encoding, errors=errors, newline=newline) else: os_mode = os.O_CREAT | os.O_WRONLY | os.O_TRUNC if hasattr(os, 'O_BINARY'): os_mode |= os.O_BINARY fd = os.open(self, os_mode, perm) writable = io.open(fd, "w" + mode.replace('r', ''), buffering=buffering, encoding=encoding, errors=errors, newline=newline) try: if hasattr(os, 'chmod'): os.chmod(self, perm) except OSError: pass try: yield readable, writable except Exception: # move backup back readable.close() writable.close() try: os.unlink(self) except os.error: pass os.rename(backup_fn, self) raise else: readable.close() writable.close() finally: try: os.unlink(backup_fn) except os.error: pass class tempdir(path): """ A temporary directory via tempfile.mkdtemp, and constructed with the same parameters that you can use as a context manager. Example: with tempdir() as d: # do stuff with the path object "d" # here the directory is deleted automatically .. seealso:: :func:`tempfile.mkdtemp` """ @ClassProperty @classmethod def _next_class(cls): return path def __new__(cls, *args, **kwargs): dirname = tempfile.mkdtemp(*args, **kwargs) return super(tempdir, cls).__new__(cls, dirname) def __init__(self, *args, **kwargs): pass def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): if not exc_value: self.rmtree() def _permission_mask(mode): """ Convert a Unix chmod symbolic mode like 'ugo+rwx' to a function suitable for applying to a mask to affect that change. >>> mask = _permission_mask('ugo+rwx') >>> mask(o554) == o777 True >>> _permission_mask('go-x')(o777) == o766 True """ parsed = re.match('(?P<who>[ugo]+)(?P<op>[-+])(?P<what>[rwx]+)$', mode) if not parsed: raise ValueError("Unrecognized symbolic mode", mode) spec_map = dict(r=4, w=2, x=1) spec = reduce(operator.or_, [spec_map[perm] for perm in parsed.group('what')]) # now apply spec to each in who shift_map = dict(u=6, g=3, o=0) mask = reduce(operator.or_, [spec << shift_map[subj] for subj in parsed.group('who')]) op = parsed.group('op') # if op is -, invert the mask if op == '-': mask ^= o777 op_map = {'+': operator.or_, '-': operator.and_} return functools.partial(op_map[op], mask) class CaseInsensitivePattern(unicode): """ A string with a 'normcase' property, suitable for passing to :meth:`listdir`, :meth:`dirs`, :meth:`files`, :meth:`walk`, :meth:`walkdirs`, or :meth:`walkfiles` to match case-insensitive. For example, to get all files ending in .py, .Py, .pY, or .PY in the current directory:: from path import path, CaseInsensitivePattern as ci path('.').files(ci('*.py')) """ @property def normcase(self): return __import__('ntpath').normcase<|fim▁end|>
return
<|file_name|>sensor.py<|end_file_name|><|fim▁begin|>"""Support for Subaru sensors.""" import subarulink.const as sc from homeassistant.components.sensor import DEVICE_CLASSES from homeassistant.const import ( DEVICE_CLASS_BATTERY, DEVICE_CLASS_TEMPERATURE, DEVICE_CLASS_VOLTAGE, LENGTH_KILOMETERS, LENGTH_MILES, PERCENTAGE, PRESSURE_HPA, TEMP_CELSIUS, TIME_MINUTES, VOLT, VOLUME_GALLONS, VOLUME_LITERS, ) from homeassistant.util.distance import convert as dist_convert from homeassistant.util.unit_system import ( IMPERIAL_SYSTEM, LENGTH_UNITS, PRESSURE_UNITS, TEMPERATURE_UNITS, ) from homeassistant.util.volume import convert as vol_convert from .const import ( API_GEN_2, DOMAIN, ENTRY_COORDINATOR, ENTRY_VEHICLES, VEHICLE_API_GEN, VEHICLE_HAS_EV, VEHICLE_HAS_SAFETY_SERVICE, VEHICLE_STATUS, ) from .entity import SubaruEntity L_PER_GAL = vol_convert(1, VOLUME_GALLONS, VOLUME_LITERS) KM_PER_MI = dist_convert(1, LENGTH_MILES, LENGTH_KILOMETERS) # Fuel Economy Constants FUEL_CONSUMPTION_L_PER_100KM = "L/100km" FUEL_CONSUMPTION_MPG = "mi/gal" FUEL_CONSUMPTION_UNITS = [FUEL_CONSUMPTION_L_PER_100KM, FUEL_CONSUMPTION_MPG] SENSOR_TYPE = "type" SENSOR_CLASS = "class" SENSOR_FIELD = "field" SENSOR_UNITS = "units" # Sensor data available to "Subaru Safety Plus" subscribers with Gen1 or Gen2 vehicles SAFETY_SENSORS = [ { SENSOR_TYPE: "Odometer", SENSOR_CLASS: None, SENSOR_FIELD: sc.ODOMETER, SENSOR_UNITS: LENGTH_KILOMETERS, }, ] # Sensor data available to "Subaru Safety Plus" subscribers with Gen2 vehicles API_GEN_2_SENSORS = [ { SENSOR_TYPE: "Avg Fuel Consumption", SENSOR_CLASS: None, SENSOR_FIELD: sc.AVG_FUEL_CONSUMPTION, SENSOR_UNITS: FUEL_CONSUMPTION_L_PER_100KM, }, { SENSOR_TYPE: "Range", SENSOR_CLASS: None, SENSOR_FIELD: sc.DIST_TO_EMPTY, SENSOR_UNITS: LENGTH_KILOMETERS, }, { SENSOR_TYPE: "Tire Pressure FL", SENSOR_CLASS: None, SENSOR_FIELD: sc.TIRE_PRESSURE_FL, SENSOR_UNITS: PRESSURE_HPA, }, { SENSOR_TYPE: "Tire Pressure FR", SENSOR_CLASS: None, SENSOR_FIELD: sc.TIRE_PRESSURE_FR, SENSOR_UNITS: PRESSURE_HPA, }, { SENSOR_TYPE: "Tire Pressure RL", SENSOR_CLASS: None, SENSOR_FIELD: sc.TIRE_PRESSURE_RL, SENSOR_UNITS: PRESSURE_HPA, }, { SENSOR_TYPE: "Tire Pressure RR", SENSOR_CLASS: None, SENSOR_FIELD: sc.TIRE_PRESSURE_RR, SENSOR_UNITS: PRESSURE_HPA, }, { SENSOR_TYPE: "External Temp", SENSOR_CLASS: DEVICE_CLASS_TEMPERATURE, SENSOR_FIELD: sc.EXTERNAL_TEMP, SENSOR_UNITS: TEMP_CELSIUS, }, { SENSOR_TYPE: "12V Battery Voltage", SENSOR_CLASS: DEVICE_CLASS_VOLTAGE, SENSOR_FIELD: sc.BATTERY_VOLTAGE, SENSOR_UNITS: VOLT, }, ] # Sensor data available to "Subaru Safety Plus" subscribers with PHEV vehicles EV_SENSORS = [ { SENSOR_TYPE: "EV Range", SENSOR_CLASS: None, SENSOR_FIELD: sc.EV_DISTANCE_TO_EMPTY, SENSOR_UNITS: LENGTH_MILES, }, { SENSOR_TYPE: "EV Battery Level", SENSOR_CLASS: DEVICE_CLASS_BATTERY, SENSOR_FIELD: sc.EV_STATE_OF_CHARGE_PERCENT, SENSOR_UNITS: PERCENTAGE, }, { SENSOR_TYPE: "EV Time to Full Charge", SENSOR_CLASS: None, SENSOR_FIELD: sc.EV_TIME_TO_FULLY_CHARGED, SENSOR_UNITS: TIME_MINUTES, }, ] async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Subaru sensors by config_entry.""" coordinator = hass.data[DOMAIN][config_entry.entry_id][ENTRY_COORDINATOR] vehicle_info = hass.data[DOMAIN][config_entry.entry_id][ENTRY_VEHICLES] entities = [] for vin in vehicle_info.keys(): entities.extend(create_vehicle_sensors(vehicle_info[vin], coordinator)) async_add_entities(entities, True) def create_vehicle_sensors(vehicle_info, coordinator): """Instantiate all available sensors for the vehicle.""" sensors_to_add = [] if vehicle_info[VEHICLE_HAS_SAFETY_SERVICE]: sensors_to_add.extend(SAFETY_SENSORS) if vehicle_info[VEHICLE_API_GEN] == API_GEN_2: sensors_to_add.extend(API_GEN_2_SENSORS) if vehicle_info[VEHICLE_HAS_EV]: sensors_to_add.extend(EV_SENSORS) return [ SubaruSensor( vehicle_info, coordinator, s[SENSOR_TYPE], s[SENSOR_CLASS], s[SENSOR_FIELD], s[SENSOR_UNITS], ) for s in sensors_to_add ] class SubaruSensor(SubaruEntity): """Class for Subaru sensors.""" def __init__( self, vehicle_info, coordinator, entity_type, sensor_class, data_field, api_unit<|fim▁hole|> """Initialize the sensor.""" super().__init__(vehicle_info, coordinator) self.hass_type = "sensor" self.current_value = None self.entity_type = entity_type self.sensor_class = sensor_class self.data_field = data_field self.api_unit = api_unit @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" if self.sensor_class in DEVICE_CLASSES: return self.sensor_class return super().device_class @property def state(self): """Return the state of the sensor.""" self.current_value = self.get_current_value() if self.current_value is None: return None if self.api_unit in TEMPERATURE_UNITS: return round( self.hass.config.units.temperature(self.current_value, self.api_unit), 1 ) if self.api_unit in LENGTH_UNITS: return round( self.hass.config.units.length(self.current_value, self.api_unit), 1 ) if self.api_unit in PRESSURE_UNITS: if self.hass.config.units == IMPERIAL_SYSTEM: return round( self.hass.config.units.pressure(self.current_value, self.api_unit), 1, ) if self.api_unit in FUEL_CONSUMPTION_UNITS: if self.hass.config.units == IMPERIAL_SYSTEM: return round((100.0 * L_PER_GAL) / (KM_PER_MI * self.current_value), 1) return self.current_value @property def unit_of_measurement(self): """Return the unit_of_measurement of the device.""" if self.api_unit in TEMPERATURE_UNITS: return self.hass.config.units.temperature_unit if self.api_unit in LENGTH_UNITS: return self.hass.config.units.length_unit if self.api_unit in PRESSURE_UNITS: if self.hass.config.units == IMPERIAL_SYSTEM: return self.hass.config.units.pressure_unit return PRESSURE_HPA if self.api_unit in FUEL_CONSUMPTION_UNITS: if self.hass.config.units == IMPERIAL_SYSTEM: return FUEL_CONSUMPTION_MPG return FUEL_CONSUMPTION_L_PER_100KM return self.api_unit @property def available(self): """Return if entity is available.""" last_update_success = super().available if last_update_success and self.vin not in self.coordinator.data: return False return last_update_success def get_current_value(self): """Get raw value from the coordinator.""" value = self.coordinator.data[self.vin][VEHICLE_STATUS].get(self.data_field) if value in sc.BAD_SENSOR_VALUES: value = None if isinstance(value, str): if "." in value: value = float(value) else: value = int(value) return value<|fim▁end|>
):
<|file_name|>LicenceValidationResult.java<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2016 the original author or authors. * * This file is part of jGrades Application Project. * * Licensed under the Apache License, Version 2.0 (the "License"); * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 */ package org.jgrades.lic.api.model; <|fim▁hole|>import lombok.Setter; @Getter @Setter @AllArgsConstructor public class LicenceValidationResult { private boolean valid; private String errorMessage; public LicenceValidationResult() { valid = true; errorMessage = null; } }<|fim▁end|>
import lombok.AllArgsConstructor; import lombok.Getter;
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>from functools import wraps import logging import time try: from collections import OrderedDict except ImportError: try: # ordereddict available on pypi for Python < 2.7 from ordereddict import OrderedDict except ImportError: # Otherwise fall back on normal dict OrderedDict = dict def cached_property(func): """ Wraps a method on a class to make it a property and caches the result the first time it is evaluated """ attr_name = '_cached_prop_' + func.__name__ @property @wraps(func) def get(self): try: return getattr(self, attr_name) except AttributeError: value = func(self) setattr(self, attr_name, value) return value return get class Timer(object): """ Context manager for logging the time taken for an operation """ def __init__(self, log, description): self._enabled = log.isEnabledFor(logging.INFO) self._log = log self._description = description self._start_time = None def __enter__(self): if not self._enabled: return self try: self._start_time = time.perf_counter() except AttributeError: # Python < 3.3 self._start_time = time.clock() return self def __exit__(self, exc_type, exc_val, exc_tb): if not self._enabled: return try: end_time = time.perf_counter() except AttributeError: # Python < 3.3 end_time = time.clock() <|fim▁hole|> elapsed_time = (end_time - self._start_time) * 1.0e3 self._log.info("{0}: Took {1} ms".format(self._description, elapsed_time))<|fim▁end|>
<|file_name|>qos_specs.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_db import exception as db_exc from oslo_log import log as logging from cinder import db from cinder import exception from cinder.i18n import _ from cinder import objects from cinder.objects import base from cinder.objects import fields as c_fields from oslo_versionedobjects import fields LOG = logging.getLogger(__name__) @base.CinderObjectRegistry.register class QualityOfServiceSpecs(base.CinderPersistentObject, base.CinderObject, base.CinderObjectDictCompat, base.CinderComparableObject): # Version # 1.0: Initial version VERSION = "1.0" OPTIONAL_FIELDS = ['volume_types'] fields = { 'id': fields.UUIDField(), 'name': fields.StringField(), 'consumer': c_fields.QoSConsumerField( default=c_fields.QoSConsumerValues.BACK_END), 'specs': fields.DictOfNullableStringsField(nullable=True), 'volume_types': fields.ObjectField('VolumeTypeList', nullable=True), } def __init__(self, *args, **kwargs): super(QualityOfServiceSpecs, self).__init__(*args, **kwargs) self._init_specs = {} def __setattr__(self, name, value): try: super(QualityOfServiceSpecs, self).__setattr__(name, value) except ValueError: if name == 'consumer': # Give more descriptive error message for invalid 'consumer' msg = (_("Valid consumer of QoS specs are: %s") % c_fields.QoSConsumerField()) raise exception.InvalidQoSSpecs(reason=msg) else: raise def obj_reset_changes(self, fields=None, recursive=False): super(QualityOfServiceSpecs, self).obj_reset_changes(fields, recursive) if fields is None or 'specs' in fields: self._init_specs = self.specs.copy() if self.specs else {} def obj_what_changed(self): changes = super(QualityOfServiceSpecs, self).obj_what_changed() # Do comparison of what's in the dict vs. reference to the specs object if self.obj_attr_is_set('id'): if self.specs != self._init_specs: changes.add('specs') else: # If both dicts are equal don't consider anything gets changed if 'specs' in changes: changes.remove('specs') return changes def obj_get_changes(self): changes = super(QualityOfServiceSpecs, self).obj_get_changes() if 'specs' in changes: # For specs, we only want what has changed in the dictionary, # because otherwise we'll individually overwrite the DB value for # every key in 'specs' even if it hasn't changed specs_changes = {} for key, val in self.specs.items(): if val != self._init_specs.get(key): specs_changes[key] = val changes['specs'] = specs_changes specs_keys_removed = (set(self._init_specs.keys()) - set(self.specs.keys())) if specs_keys_removed: # Special key notifying which specs keys have been deleted changes['specs_keys_removed'] = specs_keys_removed return changes def obj_load_attr(self, attrname): if attrname not in self.OPTIONAL_FIELDS: raise exception.ObjectActionError( action='obj_load_attr', reason=_('attribute %s not lazy-loadable') % attrname) if not self._context: raise exception.OrphanedObjectError(method='obj_load_attr', objtype=self.obj_name()) if attrname == 'volume_types': self.volume_types = objects.VolumeTypeList.get_all_types_for_qos( self._context, self.id) @classmethod def _from_db_object(cls, context, qos_spec, db_qos_spec, expected_attrs=None): if expected_attrs is None: expected_attrs = [] for name, field in qos_spec.fields.items(): if name not in cls.OPTIONAL_FIELDS: value = db_qos_spec.get(name) # 'specs' could be null if only a consumer is given, so make # it an empty dict instead of None if not value and isinstance(field, fields.DictOfStringsField): value = {} setattr(qos_spec, name, value) if 'volume_types' in expected_attrs: volume_types = objects.VolumeTypeList.get_all_types_for_qos( context, db_qos_spec['id']) qos_spec.volume_types = volume_types qos_spec._context = context qos_spec.obj_reset_changes() return qos_spec def create(self): if self.obj_attr_is_set('id'): raise exception.ObjectActionError(action='create', reason='already created') updates = self.cinder_obj_get_changes() try: create_ret = db.qos_specs_create(self._context, updates) except db_exc.DBDataError: msg = _('Error writing field to database') LOG.exception(msg) raise exception.Invalid(msg) except db_exc.DBError: LOG.exception('DB error occurred when creating QoS specs.') raise exception.QoSSpecsCreateFailed(name=self.name, qos_specs=self.specs) # Save ID with the object updates['id'] = create_ret['id'] self._from_db_object(self._context, self, updates) def save(self): updates = self.cinder_obj_get_changes() if updates: if 'specs_keys_removed' in updates.keys(): for specs_key_to_remove in updates['specs_keys_removed']: db.qos_specs_item_delete( self._context, self.id, specs_key_to_remove) del updates['specs_keys_removed'] db.qos_specs_update(self._context, self.id, updates) self.obj_reset_changes()<|fim▁hole|> :param force: when force is True, all volume_type mappings for this QoS are deleted. When force is False and volume_type mappings still exist, a QoSSpecsInUse exception is thrown """ if self.volume_types: if not force: raise exception.QoSSpecsInUse(specs_id=self.id) # remove all association db.qos_specs_disassociate_all(self._context, self.id) updated_values = db.qos_specs_delete(self._context, self.id) self.update(updated_values) self.obj_reset_changes(updated_values.keys()) @base.CinderObjectRegistry.register class QualityOfServiceSpecsList(base.ObjectListBase, base.CinderObject): VERSION = '1.0' fields = { 'objects': fields.ListOfObjectsField('QualityOfServiceSpecs'), } @classmethod def get_all(cls, context, *args, **kwargs): specs = db.qos_specs_get_all(context, *args, **kwargs) return base.obj_make_list(context, cls(context), objects.QualityOfServiceSpecs, specs)<|fim▁end|>
def destroy(self, force=False): """Deletes the QoS spec.
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import re import hashlib FNAME_MATCH = re.compile(r'/([^/]+)$') # From the last slash to the end of the string PREFIX = re.compile(r'([^:]+://)(/)?(.+)') # Check for a prefix like data:// def getParentAndBase(path): match = PREFIX.match(path)<|fim▁hole|> stripped_path = path base = FNAME_MATCH.search(stripped_path) if base is None: raise ValueError('Invalid path') parent = FNAME_MATCH.sub('', stripped_path) return parent, base.group(1) else: prefix, leading_slash, uri = match.groups() parts = uri.split('/') parent_path = '/'.join(parts[:-1]) if leading_slash is not None: parent_path = '{prefix}/{uri}'.format(prefix=prefix, uri='/'.join(parts[:-1])) else: parent_path = '{prefix}{uri}'.format(prefix=prefix, uri='/'.join(parts[:-1])) return parent_path, parts[-1] def pathJoin(parent, base): if parent.endswith('/'): return parent + base return parent + '/' + base def md5_for_file(fname): hash_md5 = hashlib.md5() with open(fname, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): hash_md5.update(chunk) return str(hash_md5.hexdigest()) def md5_for_str(content): hash_md5 = hashlib.md5() hash_md5.update(content.encode()) return str(hash_md5.hexdigest())<|fim▁end|>
if match is None: if path.endswith('/'): stripped_path = path[:-1] else:
<|file_name|>DataFilter.java<|end_file_name|><|fim▁begin|>package com.instaclick.filter; /** * Defines a behavior that should be implement by all filter * * @author Fabio B. Silva <[email protected]> */ public interface DataFilter { /** * Adds the given {@link Data} if it does not exists * * @param data * * @return <b>TRUE</b> if the the {@link Data} does not exists; <b>FALSE</b> otherwise */ public boolean add(Data data); /** * Check if the given {@link Data} exists *<|fim▁hole|> * @param data * * @return <b>TRUE</b> if the the {@link Data} does not exists; <b>FALSE</b> otherwise */ public boolean contains(Data data); /** * Flushes the filter data, this operation should be invoked at the end of the filter */ public void flush(); }<|fim▁end|>
<|file_name|>search-control-properties.js<|end_file_name|><|fim▁begin|>'use strict'; module.exports = { skip: { type: 'integer', format: 'int32', minimum: 0, maximum: 500, default: 0, description: 'The number of results to skip before returning matches. Use this to support paging. Maximum of 500' }, limit: { type: 'integer', format: 'int32', minimum: 1,<|fim▁hole|> default: 20, description: 'Limit the number of results returned. Defaults to 20. Maximum of 200' } };<|fim▁end|>
maximum: 200,
<|file_name|>CellError.hpp<|end_file_name|><|fim▁begin|>// CellError.hpp // // Copyright (C) 2006-2007 Peter Graves <[email protected]> // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU General Public License // as published by the Free Software Foundation; either version 2 // of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. #ifndef __CELL_ERROR_HPP #define __CELL_ERROR_HPP class CellError : public Condition { private: static Layout * get_layout_for_class(); public: CellError() : Condition(WIDETAG_CONDITION, get_layout_for_class()) { set_slot_value(S_name, NIL); } CellError(Value name) : Condition(WIDETAG_CONDITION, get_layout_for_class()) { set_slot_value(S_name, name); }<|fim▁hole|> void initialize(Value initargs); virtual Value type_of() const { return S_cell_error; } virtual Value class_of() const { return C_cell_error; } virtual bool typep(Value type) const; }; #endif // CellError.hpp<|fim▁end|>
<|file_name|>transport_test.go<|end_file_name|><|fim▁begin|>// Copyright Project Harbor Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and<|fim▁hole|>import ( "fmt" "net/http" "testing" "github.com/goharbor/harbor/src/common/utils/test" ) type simpleModifier struct { } func (s *simpleModifier) Modify(req *http.Request) error { req.Header.Set("Authorization", "token") return nil } func TestRoundTrip(t *testing.T) { server := test.NewServer( &test.RequestHandlerMapping{ Method: "GET", Pattern: "/", Handler: test.Handler(nil), }) transport := NewTransport(&http.Transport{}, &simpleModifier{}) client := &http.Client{ Transport: transport, } req, err := http.NewRequest("GET", fmt.Sprintf("%s/", server.URL), nil) if err != nil { t.Fatalf("failed to create request: %v", err) } if _, err := client.Do(req); err != nil { t.Fatalf("failed to send request: %s", err) } header := req.Header.Get("Authorization") if header != "token" { t.Errorf("unexpected header: %s != %s", header, "token") } }<|fim▁end|>
// limitations under the License. package notary
<|file_name|>apps.py<|end_file_name|><|fim▁begin|>from django.apps import AppConfig <|fim▁hole|><|fim▁end|>
class LibraryConfig(AppConfig): name = 'library'
<|file_name|>calibration.py<|end_file_name|><|fim▁begin|>import numpy as np from scipy import sparse from scipy.interpolate import interp1d class calibration(object): ''' some useful tools for manual calibration ''' def normalize_zdata(self,z_data,cal_z_data): return z_data/cal_z_data def normalize_amplitude(self,z_data,cal_ampdata): return z_data/cal_ampdata def normalize_phase(self,z_data,cal_phase): return z_data*np.exp(-1j*cal_phase) def normalize_by_func(self,f_data,z_data,func): return z_data/func(f_data) def _baseline_als(self,y, lam, p, niter=10): ''' see http://zanran_storage.s3.amazonaws.com/www.science.uva.nl/ContentPages/443199618.pdf "Asymmetric Least Squares Smoothing" by P. Eilers and H. Boelens in 2005. http://stackoverflow.com/questions/29156532/python-baseline-correction-library "There are two parameters: p for asymmetry and lambda for smoothness. Both have to be tuned to the data at hand. We found that generally 0.001<=p<=0.1 is a good choice (for a trace with positive peaks) and 10e2<=lambda<=10e9, but exceptions may occur." ''' L = len(y) D = sparse.csc_matrix(np.diff(np.eye(L), 2)) w = np.ones(L) for i in range(niter): W = sparse.spdiags(w, 0, L, L) Z = W + lam * D.dot(D.transpose()) z = sparse.linalg.spsolve(Z, w*y) w = p * (y > z) + (1-p) * (y < z) return z def fit_baseline_amp(self,z_data,lam,p,niter=10): ''' for this to work, you need to analyze a large part of the baseline tune lam and p until you get the desired result ''' return self._baseline_als(np.absolute(z_data),lam,p,niter=niter) def baseline_func_amp(self,z_data,f_data,lam,p,niter=10):<|fim▁hole|> returns the baseline as a function the points in between the datapoints are computed by cubic interpolation ''' return interp1d(f_data, self._baseline_als(np.absolute(z_data),lam,p,niter=niter), kind='cubic') def baseline_func_phase(self,z_data,f_data,lam,p,niter=10): ''' for this to work, you need to analyze a large part of the baseline tune lam and p until you get the desired result returns the baseline as a function the points in between the datapoints are computed by cubic interpolation ''' return interp1d(f_data, self._baseline_als(np.angle(z_data),lam,p,niter=niter), kind='cubic') def fit_baseline_phase(self,z_data,lam,p,niter=10): ''' for this to work, you need to analyze a large part of the baseline tune lam and p until you get the desired result ''' return self._baseline_als(np.angle(z_data),lam,p,niter=niter) def GUIbaselinefit(self): ''' A GUI to help you fit the baseline ''' self.__lam = 1e6 self.__p = 0.9 niter = 10 self.__baseline = self._baseline_als(np.absolute(self.z_data_raw),self.__lam,self.__p,niter=niter) import matplotlib.pyplot as plt from matplotlib.widgets import Slider fig, (ax0,ax1) = plt.subplots(nrows=2) plt.suptitle('Use the sliders to make the green curve match the baseline.') plt.subplots_adjust(left=0.25, bottom=0.25) l0, = ax0.plot(np.absolute(self.z_data_raw)) l0b, = ax0.plot(np.absolute(self.__baseline)) l1, = ax1.plot(np.absolute(self.z_data_raw/self.__baseline)) ax0.set_ylabel('amp, rawdata vs. baseline') ax1.set_ylabel('amp, corrected') axcolor = 'lightgoldenrodyellow' axSmooth = plt.axes([0.25, 0.1, 0.65, 0.03], axisbg=axcolor) axAsym = plt.axes([0.25, 0.15, 0.65, 0.03], axisbg=axcolor) axbcorr = plt.axes([0.25, 0.05, 0.65, 0.03], axisbg=axcolor) sSmooth = Slider(axSmooth, 'Smoothness', 0.1, 10., valinit=np.log10(self.__lam),valfmt='1E%f') sAsym = Slider(axAsym, 'Asymmetry', 1e-4,0.99999, valinit=self.__p,valfmt='%f') sbcorr = Slider(axbcorr, 'vertical shift',0.7,1.1,valinit=1.) def update(val): self.__lam = 10**sSmooth.val self.__p = sAsym.val self.__baseline = sbcorr.val*self._baseline_als(np.absolute(self.z_data_raw),self.__lam,self.__p,niter=niter) l0.set_ydata(np.absolute(self.z_data_raw)) l0b.set_ydata(np.absolute(self.__baseline)) l1.set_ydata(np.absolute(self.z_data_raw/self.__baseline)) fig.canvas.draw_idle() sSmooth.on_changed(update) sAsym.on_changed(update) sbcorr.on_changed(update) plt.show() self.z_data_raw /= self.__baseline plt.close()<|fim▁end|>
''' for this to work, you need to analyze a large part of the baseline tune lam and p until you get the desired result
<|file_name|>containers.component.ts<|end_file_name|><|fim▁begin|>import {ChangeDetectorRef, Component, OnInit} from '@angular/core'; import {DockerService} from "../../services/docker.service"; import 'rxjs/Rx'; import {toast} from "angular2-materialize"; import {Transform, Writable} from "stream"; declare let $: any; declare let jQuery: any; @Component({ selector: 'dm-containers', templateUrl: './containers.component.html', styleUrls: ['./containers.component.scss'], }) export class ContainersComponent implements OnInit { containers: any[]; terminalOut: string = ''; constructor(private dockerService: DockerService, private ref: ChangeDetectorRef) { this.containers = new Array(); // this.terminalOut = new Array(); } ngOnInit() { $(function () { $('#modal-terminal').modal({ dismissible: true, opacity: .3, inDuration: 300, outDuration: 200, startingTop: '4%', endingTop: '10%', ready: function (modal, trigger) { }, complete: function () { } } ); /* * jQuery.dockmodal - jQuery dockable modal dialog widget * * Copyright 2014, uxMine * Dual licensed under the MIT or GPL Version 2 licenses. * Date: 2/11/2014 * @author Tarafder Ashek E Elahi * @version 1.1 * Depends: * jquery.js * */ (function ($) { var defaults = { width: 900, height: "65%", minimizedWidth: 200, gutter: 10, poppedOutDistance: "6%", title: "", dialogClass: "", buttons: [], /* id, html, buttonClass, click */ animationSpeed: 400, opacity: 1, initialState: 'modal', /* "modal", "docked", "minimized" */ showClose: true, showPopout: true, showMinimize: true, create: undefined, open: undefined, beforeClose: undefined, close: undefined, beforeMinimize: undefined, minimize: undefined, beforeRestore: undefined, restore: undefined, beforePopout: undefined, popout: undefined }; var dClass = "dockmodal"; var windowWidth = $(window).width(); function setAnimationCSS($this, $el) { var aniSpeed = $this.options.animationSpeed / 1000; $el.css({"transition": aniSpeed + "s right, " + aniSpeed + "s left, " + aniSpeed + "s top, " + aniSpeed + "s bottom, " + aniSpeed + "s height, " + aniSpeed + "s width"}); return true; } function removeAnimationCSS($el) { $el.css({"transition": "none"}); return true; } var methods = { init: function (options) { return this.each(function () { var $this = $(this); var data = $this.data('dockmodal'); $this.options = $.extend({}, defaults, options); // If the plugin hasn't been initialized yet if (!data) { $this.data('dockmodal', $this); } else { $("body").append($this.closest("." + dClass).show()); //methods.restore.apply($this); methods.refreshLayout(); setTimeout(function () { methods.restore.apply($this); }, $this.options.animationSpeed); return; } // create modal var $body = $("body"); var $window = $(window); var $dockModal = $('<div/>').addClass(dClass).addClass($this.options.dialogClass); if ($this.options.initialState == "modal") { $dockModal.addClass("popped-out"); } else if ($this.options.initialState == "minimized") { $dockModal.addClass("minimized"); } //$dockModal.width($this.options.width); $dockModal.height(0); setAnimationCSS($this, $dockModal); // create title var $dockHeader = $('<div></div>').addClass(dClass + "-header"); if ($this.options.showClose) { $('<a href="#" class="header-action action-close" title="Close"><i class="icon-dockmodal-close"></i></a>').appendTo($dockHeader).click(function (e) { methods.destroy.apply($this); return false; }); } if ($this.options.showPopout) { $('<a href="#" class="header-action action-popout" title="Pop out"><i class="icon-dockmodal-popout"></i></a>').appendTo($dockHeader).click(function (e) { if ($dockModal.hasClass("popped-out")) { methods.restore.apply($this); } else { methods.popout.apply($this); } return false; }); } if ($this.options.showMinimize) { $('<a href="#" class="header-action action-minimize" title="Minimize"><i class="icon-dockmodal-minimize"></i></a>').appendTo($dockHeader).click(function (e) { if ($dockModal.hasClass("minimized")) { if ($dockModal.hasClass("popped-out")) { methods.popout.apply($this); } else { methods.restore.apply($this); } } else { methods.minimize.apply($this); } return false; }); } if ($this.options.showMinimize && $this.options.showPopout) { $dockHeader.click(function () { if ($dockModal.hasClass("minimized")) { if ($dockModal.hasClass("popped-out")) { methods.popout.apply($this); } else { methods.restore.apply($this); } } else { methods.minimize.apply($this); }<|fim▁hole|> }); } $dockHeader.append('<div class="title-text">' + ($this.options.title || $this.attr("title")) + '</div>'); $dockModal.append($dockHeader); // create body section var $placeholder = $('<div class="modal-placeholder"></div>').insertAfter($this); $this.placeholder = $placeholder; var $dockBody = $('<div></div>').addClass(dClass + "-body").append($this); $dockModal.append($dockBody); // create footer if ($this.options.buttons.length) { var $dockFooter = $('<div></div>').addClass(dClass + "-footer"); var $dockFooterButtonset = $('<div></div>').addClass(dClass + "-footer-buttonset"); $dockFooter.append($dockFooterButtonset); $.each($this.options.buttons, function (indx, el) { var $btn = $('<a href="#" class="btn"></a>'); $btn.attr({ "id": el.id, "class": el.buttonClass }); $btn.html(el.html); $btn.click(function (e) { el.click(e, $this); return false; }); $dockFooterButtonset.append($btn); }); $dockModal.append($dockFooter); } else { $dockModal.addClass("no-footer"); } // create overlay var $overlay = $("." + dClass + "-overlay"); if (!$overlay.length) { $overlay = $('<div/>').addClass(dClass + "-overlay"); } // raise create event if ($.isFunction($this.options.create)) { $this.options.create($this); } $body.append($dockModal); $dockModal.after($overlay); $dockBody.focus(); // raise open event if ($.isFunction($this.options.open)) { setTimeout(function () { $this.options.open($this); }, $this.options.animationSpeed); } //methods.restore.apply($this); if ($dockModal.hasClass("minimized")) { $dockModal.find(".dockmodal-body, .dockmodal-footer").hide(); methods.minimize.apply($this); } else { if ($dockModal.hasClass("popped-out")) { methods.popout.apply($this); } else { methods.restore.apply($this); } } // attach resize event // track width, set to window width $body.data("windowWidth", $window.width()); $window.unbind("resize.dockmodal").bind("resize.dockmodal", function () { // do nothing if the width is the same // update new width value if ($window.width() == $body.data("windowWidth")) { return; } $body.data("windowWidth", $window.width()); methods.refreshLayout(); }); }); }, destroy: function () { return this.each(function () { var $this = $(this).data('dockmodal'); if (!$this) return; // raise beforeClose event if ($.isFunction($this.options.beforeClose)) { if ($this.options.beforeClose($this) === false) { return; } } try { var $dockModal = $this.closest("." + dClass); if ($dockModal.hasClass("popped-out") && !$dockModal.hasClass("minimized")) { $dockModal.css({ "left": "50%", "right": "50%", "top": "50%", "bottom": "50%" }); } else { $dockModal.css({ "width": "0", "height": "0" }); } setTimeout(function () { $this.removeData('dockmodal'); $this.placeholder.replaceWith($this); $dockModal.remove(); $("." + dClass + "-overlay").hide(); methods.refreshLayout(); // raise close event if ($.isFunction($this.options.close)) { $this.options.close($this); } }, $this.options.animationSpeed); } catch (err) { alert(err.message); } // other destroy routines }) }, close: function () { methods.destroy.apply(this); }, minimize: function () { return this.each(function () { var $this = $(this).data('dockmodal'); if (!$this) return; // raise beforeMinimize event if ($.isFunction($this.options.beforeMinimize)) { if ($this.options.beforeMinimize($this) === false) { return; } } var $dockModal = $this.closest("." + dClass); var headerHeight = $dockModal.find(".dockmodal-header").outerHeight(); $dockModal.addClass("minimized").css({ "width": $this.options.minimizedWidth + "px", "height": headerHeight + "px", "left": "auto", "right": "auto", "top": "auto", "bottom": "0" }); setTimeout(function () { // for safty, hide the body and footer $dockModal.find(".dockmodal-body, .dockmodal-footer").hide(); // raise minimize event if ($.isFunction($this.options.minimize)) { $this.options.minimize($this); } }, $this.options.animationSpeed); $("." + dClass + "-overlay").hide(); $dockModal.find(".action-minimize").attr("title", "Restore"); methods.refreshLayout(); }) }, restore: function () { return this.each(function () { var $this = $(this).data('dockmodal'); if (!$this) return; // raise beforeRestore event if ($.isFunction($this.options.beforeRestore)) { if ($this.options.beforeRestore($this) === false) { return; } } var $dockModal = $this.closest("." + dClass); $dockModal.removeClass("minimized popped-out"); $dockModal.find(".dockmodal-body, .dockmodal-footer").show(); $dockModal.css({ "width": $this.options.width + "px", "height": $this.options.height, "left": "auto", "right": "auto", "top": "auto", "bottom": "0" }); $("." + dClass + "-overlay").hide(); $dockModal.find(".action-minimize").attr("title", "Minimize"); $dockModal.find(".action-popout").attr("title", "Pop-out"); setTimeout(function () { // raise restore event if ($.isFunction($this.options.restore)) { $this.options.restore($this); } }, $this.options.animationSpeed); methods.refreshLayout(); }) }, popout: function () { return this.each(function () { var $this = $(this).data('dockmodal'); if (!$this) return; // raise beforePopout event if ($.isFunction($this.options.beforePopout)) { if ($this.options.beforePopout($this) === false) { return; } } var $dockModal = $this.closest("." + dClass); $dockModal.find(".dockmodal-body, .dockmodal-footer").show(); // prepare element for animation removeAnimationCSS($dockModal); var offset = $dockModal.position(); var windowWidth = $(window).width(); $dockModal.css({ "width": "auto", "height": "auto", "left": offset.left + "px", "right": (windowWidth - offset.left - $dockModal.outerWidth(true)) + "px", "top": offset.top + "px", "bottom": 0 }); setAnimationCSS($this, $dockModal); setTimeout(function () { $dockModal.removeClass("minimized").addClass("popped-out").css({ "width": "auto", "height": "auto", "left": $this.options.poppedOutDistance, "right": $this.options.poppedOutDistance, "top": $this.options.poppedOutDistance, "bottom": $this.options.poppedOutDistance }); $("." + dClass + "-overlay").show(); $dockModal.find(".action-popout").attr("title", "Pop-in"); methods.refreshLayout(); }, 10); setTimeout(function () { // raise popout event if ($.isFunction($this.options.popout)) { $this.options.popout($this); } }, $this.options.animationSpeed); }); }, refreshLayout: function () { var right = 0; var windowWidth = $(window).width(); $.each($("." + dClass).toArray().reverse(), function (i, val) { var $dockModal = $(this); var $this = $dockModal.find("." + dClass + "-body > div").data("dockmodal"); if ($dockModal.hasClass("popped-out") && !$dockModal.hasClass("minimized")) { return; } right += $this.options.gutter; $dockModal.css({ "right": right + "px" }); if ($dockModal.hasClass("minimized")) { right += $this.options.minimizedWidth; } else { right += $this.options.width; } if (right > windowWidth) { $dockModal.hide(); } else { setTimeout(function () { $dockModal.show(); }, $this.options.animationSpeed); } }); } }; $.fn.dockmodal = function (method) { if (methods[method]) { return methods[method].apply(this, Array.prototype.slice.call(arguments, 1)); } else if (typeof method === 'object' || !method) { return methods.init.apply(this, arguments); } else { $.error('Method ' + method + ' does not exist on jQuery.dockmodal'); } }; })(jQuery); }); this.updateContainers(); } playPause(container) { switch (container.State) { case 'running': this.dockerService.containerPause(container.Id).subscribe( () => { this.updateContainers(); }, error => { toast(error, 3000); } ); break; case 'paused': this.dockerService.containerResume(container.Id).subscribe( () => { this.updateContainers(); }, error => { toast(error, 3000); } ); break; default: this.dockerService.containerStart(container.Id).subscribe( () => { this.updateContainers(); }, error => { console.log(error); toast(error, 3000); } ); break; } } stop(container) { if (container.State == 'paused') { this.dockerService.containerResume(container.Id).subscribe( () => { this.dockerService.containerStop(container.Id).subscribe(() => { this.updateContainers(); }); }, error => { toast(error.json().message, 3000); } ); } else { this.dockerService.containerStop(container.Id).subscribe(() => { this.updateContainers(); }); } } info(container) { this.dockerService.containerInfo(container.Id).subscribe(info => { console.log('info', info); let ports = new Array(); for(let port in info.Config.ExposedPorts) { ports.push(port); } info.Config['ports'] = ports; container.info = info; }); } openTerminal(container) { if($('#terminal_' + container.Id).length == 1) { $('#terminal_' + container.Id).dockmodal('restore'); return; } this.dockerService.containerAttach(container).subscribe((stream: any) => { // stream.pipe(process.stdout); var WriteStream = require('stream').Writable; var TransformStream = require('stream').Transform; // console.log(stream); // stream.write = function(chunk, encoding, callback) { // console.log('write: ' + JSON.stringify(chunk)); // console.log('waiting 2 seconds'); // setTimeout(function() { // console.log('finished waiting'); // callback(); // },2000); // }; // var ws = new WriteStream(); // var ts = new TransformStream(); // stream = stream.pipe(ts).pipe(ws); $('.terminal-item, .dockmodal').remove(); var $terminal = $('<div class="terminal-item" id="terminal_' + container.Id + '" data-container-id="' + container.Id + '" style="height:100%;"></div>'); $('#modal-terminal .modal-content').append($terminal); var Terminal = require('sh.js/build/sh'); var terminal = new Terminal(); $terminal.dockmodal({ title: container.Id, open: function ($content) { terminal.open($content[0]); stream.on('data', function (e) { if(!terminal.write(e.toString())) { terminal.write(''); } }); terminal.on('data', function (data) { stream.write(data); }); setTimeout(function () { terminal.sizeToFit(); terminal.focus(); stream.write('\r'); }, 1000); }, close: function ($term) { $('#terminal_' + container.Id).remove(); }, popout: function ($term) { setTimeout(function () { terminal.sizeToFit(); terminal.focus(); }, 1000); }, restore: function ($term) { setTimeout(function () { terminal.sizeToFit(); terminal.focus(); }, 1000); } }); }); } remove(container) { this.dockerService.containerRemove(container.Id).subscribe(result => { this.updateContainers(); }); } updateContainers() { this.dockerService.containers().subscribe(containers => { this.containers = containers; console.log(containers); }); } }<|fim▁end|>
return false;
<|file_name|>unittest_pool.py<|end_file_name|><|fim▁begin|>#------------------------------------------------------------------------- # The Azure Batch Apps Python Client # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # #-------------------------------------------------------------------------- """Unit tests for Pool and PoolSpecifier""" import sys try: import unittest2 as unittest except ImportError: import unittest try: from unittest import mock except ImportError: import mock from batchapps.pool import ( Pool, PoolSpecifier) from batchapps.api import ( BatchAppsApi, Response) from batchapps.exceptions import RestCallException # pylint: disable=W0212 class TestPool(unittest.TestCase): """Unit tests for Pool""" def test_pool_create(self): """Test Pool object""" api = mock.create_autospec(BatchAppsApi) pool = Pool(api) self.assertIsNone(pool.id) self.assertIsNone(pool.created) self.assertEqual(pool.target_size, 0) pool_spec = { 'id': 'abc', 'creationTime': '', 'targetDedicated': '5', 'state': 'active', 'communication': True } pool = Pool(api, **pool_spec) self.assertEqual(pool.id, 'abc') self.assertEqual(pool.created, '') self.assertEqual(pool.target_size, 5) self.assertEqual(pool.communication, True) def test_pool_delete(self): """Test delete""" api = mock.create_autospec(BatchAppsApi) api.delete_pool.return_value = mock.create_autospec(Response) api.delete_pool.return_value.success = True pool = Pool(api) pool.delete() api.delete_pool.assert_called_with(None) api.delete_pool.return_value.success = False api.delete_pool.return_value.result = RestCallException(None, "Test", None) with self.assertRaises(RestCallException): pool.delete() <|fim▁hole|> @mock.patch.object(Pool, 'update') def test_pool_resize(self, mock_update): """Test resize""" api = mock.create_autospec(BatchAppsApi) api.resize_pool.return_value = mock.create_autospec(Response) api.resize_pool.return_value.success = True pool = Pool(api) pool.resize(5) api.resize_pool.assert_called_with(None, 5) mock_update.assert_called_with() with self.assertRaises(ValueError): pool.resize("test") api.resize_pool.return_value.success = False api.resize_pool.return_value.result = RestCallException(None, "Test", None) mock_update.called = False with self.assertRaises(RestCallException): pool.resize(1) self.assertFalse(mock_update.called) def test_pool_update(self): """Test delete""" api = mock.create_autospec(BatchAppsApi) pool = Pool(api) api.get_pool.return_value = mock.create_autospec(Response) api.get_pool.return_value.success = True api.get_pool.return_value.result = { 'targetDedicated':'5', 'currentDedicated':'4', 'state':'active', 'allocationState':'test', } self.assertEqual(pool.target_size, 0) self.assertEqual(pool.current_size, 0) self.assertEqual(pool.state, None) self.assertEqual(pool.allocation_state, None) self.assertEqual(pool.resize_error, '') pool.update() api.get_pool.assert_called_with(pool_id=None) self.assertEqual(pool.target_size, 5) self.assertEqual(pool.current_size, 4) self.assertEqual(pool.state, 'active') self.assertEqual(pool.allocation_state, 'test') self.assertEqual(pool.resize_error, '') api.get_pool.return_value.success = False api.get_pool.return_value.result = RestCallException(None, "Test", None) with self.assertRaises(RestCallException): pool.update() class TestPoolSpecifier(unittest.TestCase): """Unit tests for PoolSpecifier""" def test_poolspecifier_create(self): """Test PoolSpecifier object""" api = mock.create_autospec(BatchAppsApi) pool = PoolSpecifier(api) self.assertEqual(pool.target_size, 0) self.assertEqual(pool.max_tasks, 1) self.assertEqual(pool.communication, False) self.assertEqual(pool.certificates, []) pool = PoolSpecifier(api, target_size=5, max_tasks=2, communication=True) self.assertEqual(pool.target_size, 5) self.assertEqual(pool.max_tasks, 2) self.assertEqual(pool.communication, True) self.assertEqual(pool.certificates, []) def test_poolspecifier_start(self): """Test start""" api = mock.create_autospec(BatchAppsApi) api.add_pool.return_value.success = True api.add_pool.return_value.result = { 'poolId':'abc', 'link':{'href':'test.com'}} pool = PoolSpecifier(api) new_pool = pool.start() self.assertEqual(new_pool, {'id':'abc', 'link':'test.com'}) api.add_pool.assert_called_with(0, 1, False, []) api.add_pool.return_value.success = False api.add_pool.return_value.result = RestCallException(None, "Test", None) with self.assertRaises(RestCallException): pool.start() def test_poolspecifier_add_cert(self): api = mock.create_autospec(BatchAppsApi) pool = PoolSpecifier(api) pool.add_cert("test_thumb") self.assertEqual(pool.certificates, [{ 'thumbprint':'test_thumb', 'thumbprintAlgorithm':'SHA1', 'storeLocation':'CurrentUser', 'storeName':'My'}]) pool.add_cert("test_thumb", store_location="test", store_name=None) self.assertEqual(pool.certificates, [{ 'thumbprint':'test_thumb', 'thumbprintAlgorithm':'SHA1', 'storeLocation':'CurrentUser', 'storeName':'My'},{ 'thumbprint':'test_thumb', 'thumbprintAlgorithm':'SHA1', 'storeLocation':'test', 'storeName':'None'}]) pool.id = None pool.certificates = [0,1,2,3,4,5,6,7,8,9] pool.add_cert("new_cert") self.assertEqual(pool.certificates, [0,1,2,3,4,5,6,7,8,9]) if __name__ == '__main__': unittest.main()<|fim▁end|>
<|file_name|>CustomFieldServiceInterfaceupdateCustomFieldOptions.java<|end_file_name|><|fim▁begin|>// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.api.ads.admanager.jaxws.v202202; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; /** * * Updates the specified {@link CustomFieldOption} objects. * * @param customFieldOptions the custom field options to update * @return the updated custom field options * * * <p>Java class for updateCustomFieldOptions element declaration. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;element name="updateCustomFieldOptions"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="customFieldOptions" type="{https://www.google.com/apis/ads/publisher/v202202}CustomFieldOption" maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * </pre> * * <|fim▁hole|>@XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "customFieldOptions" }) @XmlRootElement(name = "updateCustomFieldOptions") public class CustomFieldServiceInterfaceupdateCustomFieldOptions { protected List<CustomFieldOption> customFieldOptions; /** * Gets the value of the customFieldOptions property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the customFieldOptions property. * * <p> * For example, to add a new item, do as follows: * <pre> * getCustomFieldOptions().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link CustomFieldOption } * * */ public List<CustomFieldOption> getCustomFieldOptions() { if (customFieldOptions == null) { customFieldOptions = new ArrayList<CustomFieldOption>(); } return this.customFieldOptions; } }<|fim▁end|>
*/
<|file_name|>compiler.py<|end_file_name|><|fim▁begin|>from .execute import GraphNode from . import preprocess def compile(layout_dict): preprocess.proprocess(layout_dict)<|fim▁hole|> out = [GraphNode.from_layout(root_node, layout_dict, graph_dict) for root_node in root_nodes] return out<|fim▁end|>
# get nodes without any outputs root_nodes = layout_dict["nodes"].keys() - {l[0] for l in layout_dict["links"]} graph_dict = {}
<|file_name|>map.ts<|end_file_name|><|fim▁begin|>module core {<|fim▁hole|> export interface IMap<T> { [pKey: string]: T; } export class Map<T> implements IMap<T> { [pKey: string]: T; } }<|fim▁end|>
<|file_name|>test.js<|end_file_name|><|fim▁begin|>//@flow var x = 42;<|fim▁hole|> y = "hello world"; } (42: string); // should still have some errors!<|fim▁end|>
x = "true"; var y = 42; if (x) {
<|file_name|>date-formatter.min.js<|end_file_name|><|fim▁begin|>/** * angular-strap * @version v2.1.6 - 2015-01-11 * @link http://mgcrea.github.io/angular-strap * @author Olivier Louvignes ([email protected])<|fim▁hole|> * @license MIT License, http://www.opensource.org/licenses/MIT */ "use strict";angular.module("mgcrea.ngStrap.helpers.dateFormatter",[]).service("$dateFormatter",["$locale","dateFilter",function(t,e){function r(t){return/(h+)([:\.])?(m+)[ ]?(a?)/i.exec(t).slice(1)}this.getDefaultLocale=function(){return t.id},this.getDatetimeFormat=function(e){return t.DATETIME_FORMATS[e]||e},this.weekdaysShort=function(){return t.DATETIME_FORMATS.SHORTDAY},this.hoursFormat=function(t){return r(t)[0]},this.minutesFormat=function(t){return r(t)[2]},this.timeSeparator=function(t){return r(t)[1]},this.showAM=function(t){return!!r(t)[3]},this.formatDate=function(t,r){return e(t,r)}}]); //# sourceMappingURL=date-formatter.min.js.map<|fim▁end|>
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for react-particles-js v3.0.0 // Project: https://github.com/wufe/react-particles-js // Definitions by: Simone Bembi <https://github.com/wufe> // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped <|fim▁hole|> export type IParticlesParams = ISourceOptions; export * from 'tsparticles/Enums'; export * from "tsparticles/Plugins/Absorbers/Enums"; export * from "tsparticles/Plugins/Emitters/Enums"; export * from "tsparticles/Plugins/PolygonMask/Enums"; export interface ParticlesProps { width?: string; height?: string; params?: IParticlesParams; style?: any; className?: string; canvasClassName?: string; particlesRef?: React.RefObject<Container>; } type Particles = ComponentClass<ParticlesProps>; declare const Particles: Particles; export default Particles;<|fim▁end|>
/// <reference types="react" /> import { ComponentClass } from "react"; import { Container } from "tsparticles/Core/Container"; import { ISourceOptions } from "tsparticles";
<|file_name|>legend.py<|end_file_name|><|fim▁begin|>""" Defines a legend for displaying components. :copyright: 2015 Agile Geoscience :license: Apache 2.0 """ # from builtins import object from io import StringIO import csv import warnings import random import re import itertools try: from functools import partialmethod except: # Python 2 from utils import partialmethod import numpy as np from matplotlib import patches import matplotlib.pyplot as plt from .component import Component from . import utils from .defaults import LEGEND__NSDOE from .defaults import LEGEND__Canstrat from .defaults import LEGEND__NAGMDM__6_2 from .defaults import LEGEND__NAGMDM__6_1 from .defaults import LEGEND__NAGMDM__4_3 from .defaults import LEGEND__SGMC from .defaults import TIMESCALE__ISC from .defaults import TIMESCALE__USGS_ISC from .defaults import TIMESCALE__DNAG ############################################### # This module is not used directly, but must # be imported in order to register new hatches. from . import hatches # DO NOT DELETE ############################################### class LegendError(Exception): """ Generic error class. """ pass class Decor(object): """ A single display style. A Decor describes how to display a given set of Component properties. In general, you will not usually use a Decor on its own. Instead, you will want to use a Legend, which is just a list of Decors, and leave the Decors to the Legend. Args: params (dict): The parameters you want in the Decor. There must be a Component to attach the decoration to, and at least 1 other attribute. It's completely up to you, but you probably want at least a colour (hex names like #AAA or #d3d3d3, or matplotlib's English-language names listed at http://ageo.co/modelrcolour are acceptable. The only other parameter the class recognizes for now is 'width', which is the width of the striplog element. Example: my_rock = Component({ ... }) d = {'component': my_rock, 'colour': 'red'} my_decor = Decor(d) """ def __init__(self, *params, **kwargs): """ Supports the passing in of a single dictionary, or the passing of keyword arguments. Possibly a bad idea; review later. """ for p in params: params = p for k, v in kwargs.items() or params.items(): k = k.lower().replace(' ', '_') if k in ['colour', 'color']: k = 'colour' if not v: v = '#eeeeee' try: v = v.lower() except AttributeError: v = v setattr(self, k, v) if (getattr(self, 'component', None) is None) and (getattr(self, 'curve', None) is None): raise LegendError("You must provide a Component to decorate.") if len(self.__dict__) < 2: raise LegendError("You must provide at least one decoration.") # Make sure we have a width, and it's a float, even if it's None. try: self.width = float(getattr(self, 'width', None)) except (TypeError, ValueError): self.width = None # Make sure we have a hatch, even if it's None. And correct 'none's. self.hatch = getattr(self, 'hatch', None) if self.hatch == 'none': self.hatch = None def __repr__(self): s = repr(self.__dict__) return "Decor({0})".format(s) def __str__(self): s = str(self.__dict__) return "Decor({0})".format(s) def __add__(self, other): if isinstance(other, self.__class__): result = [self, other] return Legend(result) elif isinstance(other, Legend): return other + self else: raise LegendError("You can only add legends or decors.") def __eq__(self, other): if not isinstance(other, self.__class__): return False # Weed out empty elements s = {k: v for k, v in self.__dict__.items() if v} o = {k: v for k, v in other.__dict__.items() if v} # Compare if s == o: return True else: return False def __ne__(self, other): return not self.__eq__(other) # If we define __eq__ we also need __hash__ otherwise the object # becomes unhashable. All this does is hash the frozenset of the # keys. (You can only hash immutables.) def __hash__(self): return hash(frozenset(self.__dict__.keys())) def _repr_html_(self): """ Jupyter Notebook magic repr function. """ rows, c = '', '' s = '<tr><td><strong>{k}</strong></td><td style="{stl}">{v}</td></tr>' for k, v in self.__dict__.items(): if k == '_colour': k = 'colour' c = utils.text_colour_for_hex(v) style = 'color:{}; background-color:{}'.format(c, v) else: style = 'color:black; background-color:white' if k == 'component': try: v = v._repr_html_() except AttributeError: v = v.__repr__() rows += s.format(k=k, v=v, stl=style) html = '<table>{}</table>'.format(rows) return html def _repr_html_row_(self, keys):<|fim▁hole|> """ tr, th, c = '', '', '' r = '<td style="{stl}">{v}</td>' h = '<th>{k}</th>' for k in keys: v = self.__dict__.get(k) if k == '_colour': k = 'colour' c = utils.text_colour_for_hex(v) style = 'color:{}; background-color:{}'.format(c, v) else: style = 'color:black; background-color:white' if k == 'component': try: v = v._repr_html_() except AttributeError: v = v.__repr__() tr += r.format(v=v, stl=style) th += h.format(k=k) return th, tr @property def colour(self): return self._colour @colour.setter def colour(self, c): numbers = r'([\.0-9]+), ?([\.0-9]+), ?([\.0-9]+)' pattern = re.compile(r'[\(\[]?' + numbers + r'[\)\]]?') try: x = pattern.search(c) except: x = None if x is not None: try: x = list(map(float, x.groups())) if x[0] > 1 or x[1] > 1 or x[2] > 1: x = [int(i) for i in x] colour = utils.rgb_to_hex(x) except KeyError: raise LegendError("Colour not recognized: " + c) elif not c: colour = '#eeeeee' elif type(c) in [list, tuple]: try: colour = utils.rgb_to_hex(c) except TypeError: raise LegendError("Colour not recognized: " + c) elif c[0] != '#': try: colour = utils.name_to_hex(c) except KeyError: raise LegendError("Colour not recognized: " + c) elif (c[0] == '#') and (len(c) == 4): # Three-letter hex colour = c[:2] + c[1] + 2*c[2] + 2*c[3] elif (c[0] == '#') and (len(c) == 8): # 8-letter hex colour = c[:-2] else: colour = c self._colour = colour @property def rgb(self): """ Returns an RGB triple equivalent to the hex colour. """ return utils.hex_to_rgb(self.colour) @property def keys(self): """ Returns the keys of the Decor's dict. """ return list(self.__dict__.keys()) @classmethod def random(cls, component, match_only=None): """ Returns a minimal Decor with a random colour. """ c = component.__dict__.copy() if match_only is None: match_only = c.keys() for k in list(c.keys()): if k not in match_only: _ = c.pop(k) colour = random.sample([i for i in range(256)], 3) return cls({'colour': colour, 'component': Component(c), 'width': 1.0}) def plot(self, fmt=None, fig=None, ax=None): """ Make a simple plot of the Decor. Args: fmt (str): A Python format string for the component summaries. fig (Pyplot figure): A figure, optional. Use either fig or ax, not both. ax (Pyplot axis): An axis, optional. Use either fig or ax, not both. Returns: fig or ax or None. If you pass in an ax, you get it back. If you pass in a fig, you get it. If you pass nothing, the function creates a plot object as a side-effect. """ u = 4 # aspect ratio of decor plot v = 0.25 # ratio of decor tile width r = None if (fig is None) and (ax is None): fig = plt.figure(figsize=(u, 1)) else: r = fig if ax is None: ax = fig.add_axes([0.1*v, 0.1, 0.8*v, 0.8]) else: r = ax rect1 = patches.Rectangle((0, 0), u*v, u*v, color=self.colour, lw=1, hatch=self.hatch, ec='k') ax.add_patch(rect1) ax.text(1.0+0.1*v*u, u*v*0.5, self.component.summary(fmt=fmt), fontsize=max(u, 15), verticalalignment='center', horizontalalignment='left') ax.set_xlim([0, u*v]) ax.set_ylim([0, u*v]) ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) ax.invert_yaxis() return r class Legend(object): """ A look-up table to assist in the conversion of Components to a plot colour. Args: list_of_Decors (list): The decors to collect into a legend. In general, you will want to leave legend building to the constructor class methods, `Legend.default()`, and `Legend.from_csv(text=string)`. We can add others over time, such as `from_xls` and so on. """ def __init__(self, list_of_Decors): self.table = [d.__dict__ for d in list_of_Decors] self.__list = list_of_Decors self.__index = 0 self._iter = iter(self.__list) # Set up iterable. def __repr__(self): s = [repr(d) for d in self.__list] return "Legend({0})".format('\n'.join(s)) def __str__(self): s = [str(d) for d in self.__list] return '\n'.join(s) def __getitem__(self, key): if type(key) is slice: i = key.indices(len(self.__list)) result = [self.__list[n] for n in range(*i)] return Legend(result) elif type(key) is list: result = [] for j in key: result.append(self.__list[j]) return Legend(result) else: return self.__list[key] def __setitem__(self, key, value): self.__list[key] = value def __iter__(self): return self def __next__(self): try: result = self.__list[self.__index] except IndexError: self.__index = 0 raise StopIteration self.__index += 1 return result def next(self): """ Retains Python 2 compatibility. """ return self.__next__() def __len__(self): return len(self.__list) def __contains__(self, item): if isinstance(item, Decor): for d in self.__list: if item == d: return True if isinstance(item, Component): for d in self.__list: if item == d.component: return True return False def __add__(self, other): if isinstance(other, self.__class__): result = self.__list + other.__list return Legend(result) elif isinstance(other, Decor): result = self.__list + [other] return Legend(result) else: raise LegendError("You can only add legends or decors.") def _repr_html_(self): """ Jupyter Notebook magic repr function. """ all_keys = list(set(itertools.chain(*[d.keys for d in self]))) rows = '' for decor in self: th, tr = decor._repr_html_row_(keys=all_keys) rows += '<tr>{}</tr>'.format(tr) header = '<tr>{}</tr>'.format(th) html = '<table>{}{}</table>'.format(header, rows) return html @classmethod def builtin(cls, name): """ Generate a default legend. Args: name (str): The name of the legend you want. Not case sensitive. 'nsdoe': Nova Scotia Dept. of Energy 'canstrat': Canstrat 'nagmdm__6_2': USGS N. Am. Geol. Map Data Model 6.2 'nagmdm__6_1': USGS N. Am. Geol. Map Data Model 6.1 'nagmdm__4_3': USGS N. Am. Geol. Map Data Model 4.3 'sgmc': USGS State Geologic Map Compilation Default 'nagmdm__6_2'. Returns: Legend: The legend stored in `defaults.py`. """ names = { 'nsdoe': LEGEND__NSDOE, 'canstrat': LEGEND__Canstrat, 'nagmdm__6_2': LEGEND__NAGMDM__6_2, 'nagmdm__6_1': LEGEND__NAGMDM__6_1, 'nagmdm__4_3': LEGEND__NAGMDM__4_3, 'sgmc': LEGEND__SGMC, } return cls.from_csv(text=names[name.lower()]) @classmethod def builtin_timescale(cls, name): """ Generate a default timescale legend. No arguments. Returns: Legend: The timescale stored in `defaults.py`. """ names = { 'isc': TIMESCALE__ISC, 'usgs_isc': TIMESCALE__USGS_ISC, 'dnag': TIMESCALE__DNAG, } return cls.from_csv(text=names[name.lower()]) # Curry. default = partialmethod(builtin, name="NAGMDM__6_2") default_timescale = partialmethod(builtin_timescale, name='ISC') @classmethod def random(cls, components, width=False, colour=None, match_only=None, ): """ Generate a random legend for a given list of components. Args: components (list or Striplog): A list of components. If you pass a Striplog, it will use the primary components. If you pass a component on its own, you will get a random Decor. width (bool): Also generate widths for the components, based on the order in which they are encountered. colour (str): If you want to give the Decors all the same colour, provide a hex string. match_only (list): A list of Component properties to use. Returns: Legend or Decor: A legend (or Decor) with random colours. TODO: It might be convenient to have a partial method to generate an 'empty' legend. Might be an easy way for someone to start with a template, since it'll have the components in it already. """ try: # Treating as a Striplog. list_of_Decors = [Decor.random(c, match_only=match_only) for c in [i[0] for i in components.unique if i[0]] ] except: try: list_of_Decors = [Decor.random(c, match_only=match_only) for c in components.copy()] except: # It's a single component. list_of_Decors = [Decor.random(components, match_only=match_only)] if match_only is not None: # We might have duplicate components. comps, keeps = [], [] for d in list_of_Decors: if d.component not in comps: comps.append(d.component) keeps.append(d) list_of_Decors = keeps if colour is not None: for d in list_of_Decors: d.colour = colour if width: for i, d in enumerate(list_of_Decors): d.width = i + 1 return cls(list_of_Decors) @classmethod def from_image(cls, filename, components, ignore=None, col_offset=0.1, row_offset=2): """ A slightly easier way to make legends from images. Args: filename (str) components (list) ignore (list): Colours to ignore, e.g. "#FFFFFF" to ignore white. col_offset (Number): If < 1, interpreted as proportion of way across the image. If > 1, interpreted as pixels from left. row_offset (int): Number of pixels to skip at the top of each interval. """ if ignore is None: ignore = [] rgb = utils.loglike_from_image(filename, offset=col_offset) loglike = np.array([utils.rgb_to_hex(t) for t in rgb]) # Get the pixels and colour values at 'tops' (i.e. changes). _, hexes = utils.tops_from_loglike(loglike, offset=row_offset) # Reduce to unique colours. hexes_reduced = [] for h in hexes: if h not in hexes_reduced: if h not in ignore: hexes_reduced.append(h) list_of_Decors = [] for i, c in enumerate(components): d = Decor({'colour': hexes_reduced[i], 'component': c}) list_of_Decors.append(d) return cls(list_of_Decors) @classmethod def from_striplog(cls, strip, colour='colour', width='width', hatch='hatch', fields=None, ): """ Creates a legend for a striplog whose components already contain. Args: components (list): list of components that need to be in the legend Returns: legend (striplog.Legend) """ components = [i.primary for i in strip] list_of_Decors = [] for component in components: f = {} if fields is None: fields = component.__dict__.keys() for field in fields: f[field] = component[field] d = {'component': Component(properties=f)} d['colour'] = component[colour] d['width'] = component[width] d['hatch'] = component[hatch] decor = Decor(d) if decor not in list_of_Decors: list_of_Decors.append(decor) return cls(list_of_Decors) @classmethod def from_csv(cls, filename=None, text=None): """ Read CSV text and generate a Legend. Args: string (str): The CSV string. In the first row, list the properties. Precede the properties of the component with 'comp ' or 'component '. For example: colour, width, comp lithology, comp colour #FFFFFF, 0, , #F7E9A6, 3, Sandstone, Grey #FF99CC, 2, Anhydrite, ... etc Note: To edit a legend, the easiest thing to do is probably this: - `legend.to_csv()` - Edit the legend, call it `new_legend`. - `legend = Legend.from_csv(text=new_legend)` """ if (filename is None) and (text is None): raise LegendError("You must provide a filename or CSV text.") if (filename is not None): with open(filename, 'r') as f: text = f.read() try: f = StringIO(text) # Python 3 except TypeError: f = StringIO(unicode(text)) # Python 2 r = csv.DictReader(f, skipinitialspace=True) list_of_Decors, components = [], [] kind = 'component' for row in r: d, component = {}, {} for (k, v) in row.items(): if (k in [None, '']): continue if (v in [None, '']): if k.lower() not in ['color', 'colour']: continue if k[:4].lower() == 'comp': prop = ' '.join(k.split()[1:]) if v.lower() == 'true': component[prop] = True elif v.lower() == 'false': component[prop] = False else: try: component[prop] = float(v) except ValueError: component[prop] = v.lower() elif k[:5].lower() == 'curve': prop = ' '.join(k.split()[1:]) component[prop] = v.lower() kind = 'curve' else: try: d[k] = float(v) except ValueError: d[k] = v.lower() this_component = Component(component) d[kind] = this_component # Check for duplicates and warn. if this_component in components: with warnings.catch_warnings(): warnings.simplefilter("always") w = "This legend contains duplicate components." warnings.warn(w) components.append(this_component) # Append to the master list and continue. list_of_Decors.append(Decor(d)) return cls(list_of_Decors) def to_csv(self): """ Renders a legend as a CSV string. No arguments. Returns: str: The legend as a CSV. """ # We can't delegate this to Decor because we need to know the superset # of all Decor properties. There may be lots of blanks. header = [] component_header = [] for row in self: for j in row.__dict__.keys(): if j == '_colour': j = 'colour' header.append(j) for k in row.component.__dict__.keys(): component_header.append(k) header = set(header) component_header = set(component_header) header.remove('component') header_row = '' if 'colour' in header: header_row += 'colour,' header.remove('colour') has_colour = True for item in header: header_row += item + ',' for item in component_header: header_row += 'component ' + item + ',' # Now we have a header row! Phew. # Next we'll go back over the legend and collect everything. result = header_row.strip(',') + '\n' for row in self: if has_colour: result += row.__dict__.get('_colour', '') + ',' for item in header: result += str(row.__dict__.get(item, '')) + ',' for item in component_header: result += str(row.component.__dict__.get(item, '')) + ',' result += '\n' return result @property def max_width(self): """ The maximum width of all the Decors in the Legend. This is needed to scale a Legend or Striplog when plotting with widths turned on. """ try: maximum = max([row.width for row in self.__list if row.width is not None]) return maximum except: return 0 def get_decor(self, c, match_only=None): """ Get the decor for a component. Args: c (component): The component to look up. match_only (list of str): The component attributes to include in the comparison. Default: All of them. Returns: Decor. The matching Decor from the Legend, or None if not found. """ if isinstance(c, Component): if c: if match_only: # Filter the component only those attributes c = Component({k: getattr(c, k, None) for k in match_only}) for decor in self.__list: try: if c == decor.component: return decor except AttributeError: continue else: for decor in self.__list: try: if getattr(c, 'mnemonic').lower() == decor.curve.mnemonic: return decor if getattr(c, '_alias').lower() == decor.curve.mnemonic: return decor except AttributeError: continue return Decor({'colour': '#eeeeee', 'component': Component()}) def getattr(self, c, attr, default=None, match_only=None): """ Get the attribute of a component. Args: c (component): The component to look up. attr (str): The attribute to get. default (str): What to return in the event of no match. match_only (list of str): The component attributes to include in the comparison. Default: All of them. Returns: obj. The specified attribute of the matching Decor in the Legend. """ matching_decor = self.get_decor(c, match_only=match_only) try: return getattr(matching_decor, attr) except AttributeError: return default def get_colour(self, c, default='#eeeeee', match_only=None): """ Get the display colour of a component. Wraps `getattr()`. Development note: Cannot define this as a `partial()` because I want to maintain the order of arguments in `getattr()`. Args: c (component): The component to look up. default (str): The colour to return in the event of no match. match_only (list of str): The component attributes to include in the comparison. Default: All of them. Returns: str. The hex string of the matching Decor in the Legend. """ return self.getattr(c=c, attr='colour', default=default, match_only=match_only) def get_width(self, c, default=0, match_only=None): """ Get the display width of a component. Wraps `getattr()`. Development note: Cannot define this as a `partial()` because I want to maintain the order of arguments in `getattr()`. Args: c (component): The component to look up. default (float): The width to return in the event of no match. match_only (list of str): The component attributes to include in the comparison. Default: All of them. Returns: float. The width of the matching Decor in the Legend. """ return self.getattr(c=c, attr='width', default=default, match_only=match_only) def get_component(self, colour, tolerance=0, default=None): """ Get the component corresponding to a display colour. This is for generating a Striplog object from a colour image of a striplog. Args: colour (str): The hex colour string to look up. tolerance (float): The colourspace distance within which to match. default (component or None): The component to return in the event of no match. Returns: component. The component best matching the provided colour. """ if not (0 <= tolerance <= np.sqrt(195075)): raise LegendError('Tolerance must be between 0 and 441.67') for decor in self.__list: if colour.lower() == decor.colour: return decor.component # If we're here, we didn't find one yet. r1, g1, b1 = utils.hex_to_rgb(colour) # Start with a best match of black. best_match = Component() best_match_colour = '#000000' best_match_dist = np.sqrt(r1**2. + g1**2. + b1**2.) # Now compare to each colour in the legend. for decor in self.__list: r2, g2, b2 = decor.rgb distance = np.sqrt((r2-r1)**2. + (g2-g1)**2. + (b2-b1)**2.) if distance < best_match_dist: best_match = decor.component best_match_dist = distance best_match_colour = decor.colour if best_match_dist <= tolerance: return best_match else: with warnings.catch_warnings(): warnings.simplefilter("always") w = "No match found for {0} ".format(colour.lower()) w += "with tolerance of {0}. Best match is ".format(tolerance) w += "{0}, {1}".format(best_match.summary(), best_match_colour) w += ", d={0}".format(best_match_dist) warnings.warn(w) return default def plot(self, fmt=None, ax=None): """ Make a simple plot of the legend. Calls Decor.plot() on all of its members. TODO: Build a more attractive plot. """ if ax is None: fig, ax = plt.subplots() return_ax = False else: return_ax = True height = width = (0.9 / len(self)) h_incr = 1 / len(self) left_pos = 0.1 bot_pos = 0.0 for decor in self: cax = utils.add_subplot_axes(ax, [left_pos, bot_pos, width, height]) cax = decor.plot(ax=cax) bot_pos += h_incr ax.axis('off') if return_ax: return ax else: plt.show()<|fim▁end|>
""" Jupyter Notebook magic repr function as a row – used by ``Legend._repr_html_()``.
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># Generated by Django 3.1 on 2020-08-25 12:15 import django.db.models.deletion from django.db import migrations, models import s3upload.fields class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="Cat", fields=[ (<|fim▁hole|> auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ( "custom_filename", s3upload.fields.S3UploadField(blank=True, dest="custom_filename"), ), ], ), migrations.CreateModel( name="Kitten", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("video", s3upload.fields.S3UploadField(blank=True, dest="vids")), ("image", s3upload.fields.S3UploadField(blank=True, dest="imgs")), ("pdf", s3upload.fields.S3UploadField(blank=True, dest="files")), ( "mother", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to="example.cat" ), ), ], ), ]<|fim▁end|>
"id", models.AutoField(
<|file_name|>folder.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ (c) 2014 - Copyright Red Hat Inc Authors: Pierre-Yves Chibon <[email protected]> """ from anitya.lib.backends import ( BaseBackend, get_versions_by_regex_for_text, REGEX) from anitya.lib.exceptions import AnityaPluginException import six DEFAULT_REGEX = 'href="([0-9][0-9.]*)/"' class FolderBackend(BaseBackend): ''' The custom class for project having a special hosting. This backend allows to specify a version_url and a regex that will be used to retrieve the version information. ''' name = 'folder' examples = [ 'http://ftp.gnu.org/pub/gnu/gnash/', 'http://subsurface.hohndel.org/downloads/', ] @classmethod def get_version(cls, project): ''' Method called to retrieve the latest version of the projects provided, project that relies on the backend of this plugin. :arg Project project: a :class:`model.Project` object whose backend corresponds to the current plugin. :return: the latest version found upstream :return type: str :raise AnityaPluginException: a :class:`anitya.lib.exceptions.AnityaPluginException` exception when the version cannot be retrieved correctly ''' return cls.get_ordered_versions(project)[-1] <|fim▁hole|> this plugin. :arg Project project: a :class:`model.Project` object whose backend corresponds to the current plugin. :return: a list of all the possible releases found :return type: list :raise AnityaPluginException: a :class:`anitya.lib.exceptions.AnityaPluginException` exception when the versions cannot be retrieved correctly ''' url = project.version_url try: req = cls.call_url(url, insecure=project.insecure) except Exception as err: raise AnityaPluginException( 'Could not call : "%s" of "%s", with error: %s' % ( url, project.name, str(err))) versions = None if not isinstance(req, six.string_types): req = req.text try: regex = REGEX % {'name': project.name.replace('+', '\+')} versions = get_versions_by_regex_for_text( req, url, regex, project) except AnityaPluginException: versions = get_versions_by_regex_for_text( req, url, DEFAULT_REGEX, project) return versions<|fim▁end|>
@classmethod def get_versions(cls, project): ''' Method called to retrieve all the versions (that can be found) of the projects provided, project that relies on the backend of
<|file_name|>test_rich_string03.py<|end_file_name|><|fim▁begin|><|fim▁hole|># Tests for XlsxWriter. # # Copyright (c), 2013-2016, John McNamara, [email protected] # from ..excel_comparsion_test import ExcelComparisonTest from ...workbook import Workbook class TestCompareXLSXFiles(ExcelComparisonTest): """ Test file created by XlsxWriter against a file created by Excel. """ def setUp(self): self.maxDiff = None filename = 'rich_string03.xlsx' test_dir = 'xlsxwriter/test/comparison/' self.got_filename = test_dir + '_test_' + filename self.exp_filename = test_dir + 'xlsx_files/' + filename self.ignore_files = [] self.ignore_elements = {} def test_create_file(self): """Test the creation of a simple XlsxWriter file.""" workbook = Workbook(self.got_filename) worksheet = workbook.add_worksheet() bold = workbook.add_format({'bold': 1}) italic = workbook.add_format({'italic': 1}) worksheet.write('A1', 'Foo', bold) worksheet.write('A2', 'Bar', italic) worksheet.write_rich_string('A3', bold, 'abc', 'defg') workbook.close() self.assertExcelEqual()<|fim▁end|>
############################################################################### #
<|file_name|>listener.py<|end_file_name|><|fim▁begin|># # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from conveyor.conveyorheat.common import exception from conveyor.conveyorheat.engine import attributes from conveyor.conveyorheat.engine import constraints from conveyor.conveyorheat.engine import properties from conveyor.conveyorheat.engine.resources.huawei.elb import elb_res_base from conveyor.i18n import _ class Listener(elb_res_base.ElbBaseResource): """A resource for ELB Listener. Listener resource for Elastic Load Balance Service. """ PROPERTIES = ( NAME, DESCRIPTION, LB_ID, PROTOCOL, PORT, BACKEND_PROTOCOL, BACKEND_PORT, LB_ALGORITHM, SESSION_STICKY, STICKY_SESSION_TYPE, COOKIE_TIMEOUT, CERTIFICATE, TCP_TIMEOUT, ) = ( 'name', 'description', 'loadbalancer_id', 'protocol', 'port', 'backend_protocol', 'backend_port', 'lb_algorithm', 'session_sticky', 'sticky_session_type', 'cookie_timeout', 'certificate_id', 'tcp_timeout', ) _BACKEND_PROTOCOLS = ( HTTP, TCP, ) = ( 'HTTP', 'TCP', ) HTTPS = ('HTTPS') _PROTOCOLS = _BACKEND_PROTOCOLS + (HTTPS,) _LB_ALGORITHMS = ( ROUND_ROBIN, LEAST_CONNECTIONS, SOURCE_IP, ) = ( 'roundrobin', 'leastconn', 'source', ) ATTRIBUTES = ( MEMBER_NUMBER_ATTR, STATUS_ATTR, ) = ( 'member_number', 'status', ) properties_schema = { NAME: properties.Schema( properties.Schema.STRING, _('The name of the listener.'), required=True,<|fim▁hole|> constraints=[ constraints.AllowedPattern('^[0-9a-zA-Z-_]{1,64}$')] ), DESCRIPTION: properties.Schema( properties.Schema.STRING, _('The description of the listener.'), update_allowed=True, constraints=[constraints.AllowedPattern('^[^<>]{1,128}$')] ), LB_ID: properties.Schema( properties.Schema.STRING, _('The ID of load balancer associated.'), required=True, constraints=[ constraints.CustomConstraint('elb.lb') ] ), PROTOCOL: properties.Schema( properties.Schema.STRING, _('The protocol of the listener.'), constraints=[ constraints.AllowedValues(_PROTOCOLS) ], required=True ), BACKEND_PROTOCOL: properties.Schema( properties.Schema.STRING, _('The backend protocol of the listener.'), constraints=[ constraints.AllowedValues(_BACKEND_PROTOCOLS) ], required=True ), PORT: properties.Schema( properties.Schema.INTEGER, _('The port of the listener.'), constraints=[ constraints.Range(min=1, max=65535) ], required=True, update_allowed=True, ), BACKEND_PORT: properties.Schema( properties.Schema.INTEGER, _('The backend port of the listener.'), constraints=[ constraints.Range(min=1, max=65535) ], required=True, update_allowed=True, ), LB_ALGORITHM: properties.Schema( properties.Schema.STRING, _('The algorithm used to distribute load.'), constraints=[ constraints.AllowedValues(_LB_ALGORITHMS) ], required=True, update_allowed=True, ), SESSION_STICKY: properties.Schema( properties.Schema.BOOLEAN, _('Whether to keep the session.'), update_allowed=True ), STICKY_SESSION_TYPE: properties.Schema( properties.Schema.STRING, _('The way of handing cookie.'), constraints=[ constraints.AllowedValues(['insert']) ], ), COOKIE_TIMEOUT: properties.Schema( properties.Schema.INTEGER, _('The timeout of cookie in minute.'), constraints=[ constraints.Range(min=1, max=1440) ], update_allowed=True ), CERTIFICATE: properties.Schema( properties.Schema.STRING, _('The ID of certificate.'), constraints=[ constraints.CustomConstraint('elb.cert') ] ), TCP_TIMEOUT: properties.Schema( properties.Schema.INTEGER, _('The timeout of TCP session in minute.'), constraints=[ constraints.Range(min=1, max=5) ], update_allowed=True ), } attributes_schema = { MEMBER_NUMBER_ATTR: attributes.Schema( _('The number of the members listened by this listener.'), ), STATUS_ATTR: attributes.Schema( _('The status of the listener.'), ), } def validate(self): super(Listener, self).validate() protocol = self.properties[self.PROTOCOL] session_sticky = self.properties[self.SESSION_STICKY] sticky_type = self.properties[self.STICKY_SESSION_TYPE] certificate = self.properties[self.CERTIFICATE] tcp_timeout = self.properties[self.TCP_TIMEOUT] if protocol == self.HTTP and session_sticky: if sticky_type != 'insert': msg = (_('Property %(sticky_type)s should be "insert" ' 'when %(protocol)s is %(http)s and ' '%(session_sticky)s is enabled.') % {'sticky_type': self.STICKY_SESSION_TYPE, 'protocol': self.PROTOCOL, 'http': self.HTTP, 'session_sticky': self.SESSION_STICKY}) raise exception.StackValidationFailed(message=msg) if protocol == self.HTTPS: if not certificate: msg = (_('Property %(cert)s is required when %(protocol)s ' 'is %(https)s') % {'cert': self.CERTIFICATE, 'protocol': self.PROTOCOL, 'https': self.HTTPS}) raise exception.StackValidationFailed(message=msg) if tcp_timeout and protocol != self.TCP: msg = (_('Property %(tcpt)s is valid when %(protocol)s ' 'is %(tcp)s') % {'tcpt': self.TCP_TIMEOUT, 'protocol': self.PROTOCOL, 'tcp': self.TCP}) raise exception.StackValidationFailed(message=msg) def _resolve_attribute(self, name): if not self.resource_id: return ls = self.client().listener.get(self.resource_id) if name == self.MEMBER_NUMBER_ATTR: return ls.extra['member_number'] if name == self.STATUS_ATTR: return ls.status def FnGetRefId(self): return self.resource_id def handle_create(self): props = self._prepare_properties(self.properties) ls = self.client().listener.create(**props) self.resource_id_set(ls.id) return ls.status def handle_update(self, json_snippet, tmpl_diff, prop_diff): if prop_diff: if self.COOKIE_TIMEOUT in prop_diff: if prop_diff[self.COOKIE_TIMEOUT] is None: prop_diff.pop(self.COOKIE_TIMEOUT) if self.TCP_TIMEOUT in prop_diff: if prop_diff[self.TCP_TIMEOUT] is None: prop_diff.pop(self.TCP_TIMEOUT) if self.SESSION_STICKY in prop_diff: if prop_diff[self.SESSION_STICKY] is None: prop_diff.pop(self.SESSION_STICKY) self.client().listener.update(listener_id=self.resource_id, **prop_diff) def handle_delete(self): if not self.resource_id: return try: self.client().listener.delete(self.resource_id) except Exception as e: # here we don't use ignore_not_found, because elb raises: # BadRequest("Bad Request {'message': 'find listener failed', # 'code': 'ELB.6030'}",) if 'ELB.6030' in e.message: return raise def check_create_complete(self, ls_status): return self._check_active(ls_status) def needs_replace_failed(self): if not self.resource_id: return True with self.client_plugin().ignore_not_found: ls = self.client().listener.get(self.resource_id) return ls.status == 'ERROR' return True def resource_mapping(): return { 'OSE::ELB::Listener': Listener, }<|fim▁end|>
update_allowed=True,
<|file_name|>cookie_http_state.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use hyper::header::{Header, SetCookie}; use net::cookie::Cookie; use net::cookie_storage::CookieStorage; use net_traits::CookieSource; use servo_url::ServoUrl; fn run(set_location: &str, set_cookies: &[&str], final_location: &str) -> String { let mut storage = CookieStorage::new(150); let url = ServoUrl::parse(set_location).unwrap(); let source = CookieSource::HTTP; // Add all cookies to the store for str_cookie in set_cookies { let bytes = str_cookie.to_string().into_bytes(); let header = Header::parse_header(&[bytes]); if let Ok(SetCookie(cookies)) = header { for bare_cookie in cookies { if let Some(cookie) = Cookie::new_wrapped(bare_cookie, &url, source) { storage.push(cookie, source); } } } } // Get cookies for the test location let url = ServoUrl::parse(final_location).unwrap(); storage.cookies_for_url(&url, source).unwrap_or("".to_string()) } // Following are all tests extracted from https://github.com/abarth/http-state.git // They are generated by `./mach update-net-cookies` // Test listing #[test] fn test_0001() { let r = run("http://home.example.org:8888/cookie-parser?0001", &["foo=bar"], "http://home.example.org:8888/cookie-parser-result?0001"); assert_eq!(&r, "foo=bar"); } #[test] fn test_0002() { let r = run("http://home.example.org:8888/cookie-parser?0002", &["foo=bar; Expires=Fri, 07 Aug 2019 08:04:19 GMT"], "http://home.example.org:8888/cookie-parser-result?0002"); assert_eq!(&r, "foo=bar"); } #[test] #[should_panic] // Look at cookie_http_state_utils.py if this test fails fn test_0003() { let r = run("http://home.example.org:8888/cookie-parser?0003", &["foo=bar; Expires=Fri, 07 Aug 2007 08:04:19 GMT", "foo2=bar2; Expires=Fri, 07 Aug 2017 08:04:19 GMT"], "http://home.example.org:8888/cookie-parser-result?0003"); assert_eq!(&r, "foo2=bar2"); } #[test] fn test_0004() { let r = run("http://home.example.org:8888/cookie-parser?0004", &["foo"], "http://home.example.org:8888/cookie-parser-result?0004"); assert_eq!(&r, ""); } #[test] fn test_0005() { let r = run("http://home.example.org:8888/cookie-parser?0005", &["foo=bar; max-age=10000;"], "http://home.example.org:8888/cookie-parser-result?0005"); assert_eq!(&r, "foo=bar"); } #[test] #[should_panic] // Look at cookie_http_state_utils.py if this test fails fn test_0006() { let r = run("http://home.example.org:8888/cookie-parser?0006", &["foo=bar; max-age=0;"], "http://home.example.org:8888/cookie-parser-result?0006"); assert_eq!(&r, ""); } #[test] fn test_0007() { let r = run("http://home.example.org:8888/cookie-parser?0007", &["foo=bar; version=1;"], "http://home.example.org:8888/cookie-parser-result?0007"); assert_eq!(&r, "foo=bar"); } #[test] fn test_0008() { let r = run("http://home.example.org:8888/cookie-parser?0008", &["foo=bar; version=1000;"], "http://home.example.org:8888/cookie-parser-result?0008"); assert_eq!(&r, "foo=bar"); } #[test] fn test_0009() { let r = run("http://home.example.org:8888/cookie-parser?0009", &["foo=bar; customvalue=1000;"], "http://home.example.org:8888/cookie-parser-result?0009"); assert_eq!(&r, "foo=bar"); } #[test] fn test_0010() { let r = run("http://home.example.org:8888/cookie-parser?0010", &["foo=bar; secure;"], "http://home.example.org:8888/cookie-parser-result?0010"); assert_eq!(&r, ""); } #[test] fn test_0011() { let r = run("http://home.example.org:8888/cookie-parser?0011", &["foo=bar; customvalue=\"1000 or more\";"], "http://home.example.org:8888/cookie-parser-result?0011"); assert_eq!(&r, "foo=bar"); } #[test] fn test_0012() { let r = run("http://home.example.org:8888/cookie-parser?0012", &["foo=bar; customvalue=\"no trailing semicolon\""], "http://home.example.org:8888/cookie-parser-result?0012"); assert_eq!(&r, "foo=bar"); } #[test] fn test_0013() { let r = run("http://home.example.org:8888/cookie-parser?0013", &["foo=bar", "foo=qux"], "http://home.example.org:8888/cookie-parser-result?0013"); assert_eq!(&r, "foo=qux"); } #[test] fn test_0014() { let r = run("http://home.example.org:8888/cookie-parser?0014", &["foo1=bar", "foo2=qux"], "http://home.example.org:8888/cookie-parser-result?0014"); assert_eq!(&r, "foo1=bar; foo2=qux"); } #[test] fn test_0015() { let r = run("http://home.example.org:8888/cookie-parser?0015", &["a=b", "z=y"], "http://home.example.org:8888/cookie-parser-result?0015"); assert_eq!(&r, "a=b; z=y"); } #[test] fn test_0016() { let r = run("http://home.example.org:8888/cookie-parser?0016", &["z=y", "a=b"], "http://home.example.org:8888/cookie-parser-result?0016"); assert_eq!(&r, "z=y; a=b"); } #[test] fn test_0017() { let r = run("http://home.example.org:8888/cookie-parser?0017", &["z=y, a=b"], "http://home.example.org:8888/cookie-parser-result?0017"); assert_eq!(&r, "z=y, a=b"); } #[test] fn test_0018() { let r = run("http://home.example.org:8888/cookie-parser?0018", &["z=y; foo=bar, a=b"], "http://home.example.org:8888/cookie-parser-result?0018"); assert_eq!(&r, "z=y"); } #[test] fn test_0019() { let r = run("http://home.example.org:8888/cookie-parser?0019", &["foo=b;max-age=3600, c=d;path=/"], "http://home.example.org:8888/cookie-parser-result?0019"); assert_eq!(&r, "foo=b"); } #[test] fn test_0020() { let r = run("http://home.example.org:8888/cookie-parser?0020", &["a=b", "=", "c=d"], "http://home.example.org:8888/cookie-parser-result?0020"); assert_eq!(&r, "a=b; c=d"); } #[test] fn test_0021() { let r = run("http://home.example.org:8888/cookie-parser?0021", &["a=b", "=x", "c=d"], "http://home.example.org:8888/cookie-parser-result?0021"); assert_eq!(&r, "a=b; c=d"); } #[test] fn test_0022() { let r = run("http://home.example.org:8888/cookie-parser?0022", &["a=b", "x=", "c=d"], "http://home.example.org:8888/cookie-parser-result?0022"); assert_eq!(&r, "a=b; x=; c=d"); } #[test] fn test_0023() { let r = run("http://home.example.org:8888/cookie-parser?0023", &["foo"], "http://home.example.org:8888/cookie-parser-result?0023"); assert_eq!(&r, ""); } #[test] fn test_0024() { let r = run("http://home.example.org:8888/cookie-parser?0024", &["foo", "="], "http://home.example.org:8888/cookie-parser-result?0024"); assert_eq!(&r, ""); } #[test] fn test_0025() { let r = run("http://home.example.org:8888/cookie-parser?0025", &["foo", "; bar"], "http://home.example.org:8888/cookie-parser-result?0025"); assert_eq!(&r, ""); } #[test] fn test_0026() { let r = run("http://home.example.org:8888/cookie-parser?0026", &["foo"], "http://home.example.org:8888/cookie-parser-result?0026"); assert_eq!(&r, ""); } #[test] fn test_0027() { let r = run("http://home.example.org:8888/cookie-parser?0027", &["foo", "bar"], "http://home.example.org:8888/cookie-parser-result?0027"); assert_eq!(&r, ""); } #[test] fn test_0028() { let r = run("http://home.example.org:8888/cookie-parser?0028", &["foo"], "http://home.example.org:8888/cookie-parser-result?0028"); assert_eq!(&r, ""); } #[test] fn test_attribute0001() { let r = run("http://home.example.org:8888/cookie-parser?attribute0001", &["foo=bar; Secure"], "http://home.example.org:8888/cookie-parser-result?attribute0001"); assert_eq!(&r, ""); } #[test] fn test_attribute0002() { let r = run("http://home.example.org:8888/cookie-parser?attribute0002", &["foo=bar; seCURe"], "http://home.example.org:8888/cookie-parser-result?attribute0002"); assert_eq!(&r, ""); } #[test] fn test_attribute0003() { let r = run("http://home.example.org:8888/cookie-parser?attribute0003", &["foo=bar; \"Secure\""], "http://home.example.org:8888/cookie-parser-result?attribute0003"); assert_eq!(&r, "foo=bar"); } #[test] fn test_attribute0004() { let r = run("http://home.example.org:8888/cookie-parser?attribute0004", &["foo=bar; Secure="], "http://home.example.org:8888/cookie-parser-result?attribute0004"); assert_eq!(&r, ""); } #[test] fn test_attribute0005() { let r = run("http://home.example.org:8888/cookie-parser?attribute0005", &["foo=bar; Secure=aaaa"], "http://home.example.org:8888/cookie-parser-result?attribute0005"); assert_eq!(&r, ""); } #[test] fn test_attribute0006() { let r = run("http://home.example.org:8888/cookie-parser?attribute0006", &["foo=bar; Secure qux"], "http://home.example.org:8888/cookie-parser-result?attribute0006"); assert_eq!(&r, "foo=bar"); } #[test] fn test_attribute0007() { let r = run("http://home.example.org:8888/cookie-parser?attribute0007", &["foo=bar; Secure =aaaaa"], "http://home.example.org:8888/cookie-parser-result?attribute0007"); assert_eq!(&r, ""); } #[test] fn test_attribute0008() { let r = run("http://home.example.org:8888/cookie-parser?attribute0008", &["foo=bar; Secure= aaaaa"], "http://home.example.org:8888/cookie-parser-result?attribute0008"); assert_eq!(&r, ""); } #[test] fn test_attribute0009() { let r = run("http://home.example.org:8888/cookie-parser?attribute0009", &["foo=bar; Secure; qux"], "http://home.example.org:8888/cookie-parser-result?attribute0009"); assert_eq!(&r, ""); } #[test] fn test_attribute0010() { let r = run("http://home.example.org:8888/cookie-parser?attribute0010", &["foo=bar; Secure;qux"], "http://home.example.org:8888/cookie-parser-result?attribute0010"); assert_eq!(&r, ""); } #[test] fn test_attribute0011() { let r = run("http://home.example.org:8888/cookie-parser?attribute0011", &["foo=bar; Secure ; qux"], "http://home.example.org:8888/cookie-parser-result?attribute0011"); assert_eq!(&r, ""); } #[test] fn test_attribute0012() { let r = run("http://home.example.org:8888/cookie-parser?attribute0012", &["foo=bar; Secure"], "http://home.example.org:8888/cookie-parser-result?attribute0012"); assert_eq!(&r, ""); } #[test] fn test_attribute0013() { let r = run("http://home.example.org:8888/cookie-parser?attribute0013", &["foo=bar; Secure ;"], "http://home.example.org:8888/cookie-parser-result?attribute0013"); assert_eq!(&r, ""); } #[test] fn test_attribute0014() { let r = run("http://home.example.org:8888/cookie-parser?attribute0014", &["foo=bar; Path"], "http://home.example.org:8888/cookie-parser-result?attribute0014"); assert_eq!(&r, "foo=bar"); } #[test] fn test_attribute0015() { let r = run("http://home.example.org:8888/cookie-parser?attribute0015", &["foo=bar; Path="], "http://home.example.org:8888/cookie-parser-result?attribute0015"); assert_eq!(&r, "foo=bar"); } #[test] fn test_attribute0016() { let r = run("http://home.example.org:8888/cookie-parser?attribute0016", &["foo=bar; Path=/"], "http://home.example.org:8888/cookie-parser-result?attribute0016"); assert_eq!(&r, "foo=bar"); } #[test] fn test_attribute0017() { let r = run("http://home.example.org:8888/cookie-parser?attribute0017", &["foo=bar; Path=/qux"], "http://home.example.org:8888/cookie-parser-result?attribute0017"); assert_eq!(&r, ""); } #[test] fn test_attribute0018() { let r = run("http://home.example.org:8888/cookie-parser?attribute0018", &["foo=bar; Path =/qux"], "http://home.example.org:8888/cookie-parser-result?attribute0018"); assert_eq!(&r, ""); } #[test] fn test_attribute0019() { let r = run("http://home.example.org:8888/cookie-parser?attribute0019", &["foo=bar; Path= /qux"], "http://home.example.org:8888/cookie-parser-result?attribute0019"); assert_eq!(&r, ""); } #[test] fn test_attribute0020() { let r = run("http://home.example.org:8888/cookie-parser?attribute0020", &["foo=bar; Path=/qux ; taz"], "http://home.example.org:8888/cookie-parser-result?attribute0020"); assert_eq!(&r, ""); } #[test] fn test_attribute0021() { let r = run("http://home.example.org:8888/cookie-parser?attribute0021", &["foo=bar; Path=/qux; Path=/"], "http://home.example.org:8888/cookie-parser-result?attribute0021"); assert_eq!(&r, "foo=bar"); } #[test] fn test_attribute0022() { let r = run("http://home.example.org:8888/cookie-parser?attribute0022", &["foo=bar; Path=/; Path=/qux"], "http://home.example.org:8888/cookie-parser-result?attribute0022"); assert_eq!(&r, ""); } #[test] fn test_attribute0023() { let r = run("http://home.example.org:8888/cookie-parser?attribute0023", &["foo=bar; Path=/qux; Path=/cookie-parser-result"], "http://home.example.org:8888/cookie-parser-result?attribute0023"); assert_eq!(&r, "foo=bar"); } #[test] fn test_attribute0024() { let r = run("http://home.example.org:8888/cookie-parser?attribute0024", &["foo=bar; Path=/cookie-parser-result; Path=/qux"], "http://home.example.org:8888/cookie-parser-result?attribute0024"); assert_eq!(&r, ""); } #[test] fn test_attribute0025() { let r = run("http://home.example.org:8888/cookie-parser?attribute0025", &["foo=bar; qux; Secure"], "http://home.example.org:8888/cookie-parser-result?attribute0025"); assert_eq!(&r, ""); } #[test] fn test_attribute0026() { let r = run("http://home.example.org:8888/cookie-parser?attribute0026", &["foo=bar; qux=\"aaa;bbb\"; Secure"], "http://home.example.org:8888/cookie-parser-result?attribute0026"); assert_eq!(&r, ""); } #[test] fn test_charset0001() { let r = run("http://home.example.org:8888/cookie-parser?charset0001", &["foo=\u{6625}\u{8282}\u{56de}\u{5bb6}\u{8def}\u{b7}\u{6625}\u{8fd0}\u{5b8c}\ \u{5168}\u{624b}\u{518c}"], "http://home.example.org:8888/cookie-parser-result?charset0001"); assert_eq!(&r, "foo=\u{6625}\u{8282}\u{56de}\u{5bb6}\u{8def}\u{b7}\u{6625}\u{8fd0}\u{5b8c}\ \u{5168}\u{624b}\u{518c}"); } #[test] fn test_charset0002() { let r = run("http://home.example.org:8888/cookie-parser?charset0002", &["\u{6625}\u{8282}\u{56de}=\u{5bb6}\u{8def}\u{b7}\u{6625}\u{8fd0}\u{5b8c}\ \u{5168}\u{624b}\u{518c}"], "http://home.example.org:8888/cookie-parser-result?charset0002"); assert_eq!(&r, "\u{6625}\u{8282}\u{56de}=\u{5bb6}\u{8def}\u{b7}\u{6625}\u{8fd0}\u{5b8c}\ \u{5168}\u{624b}\u{518c}"); } #[test] fn test_charset0003() { let r = run("http://home.example.org:8888/cookie-parser?charset0003", &["\u{6625}\u{8282}\u{56de}=\u{5bb6}\u{8def}\u{b7}\u{6625}\u{8fd0}; \u{5b8c}\ \u{5168}\u{624b}\u{518c}"], "http://home.example.org:8888/cookie-parser-result?charset0003"); assert_eq!(&r, "\u{6625}\u{8282}\u{56de}=\u{5bb6}\u{8def}\u{b7}\u{6625}\u{8fd0}"); } #[test] fn test_charset0004() { let r = run("http://home.example.org:8888/cookie-parser?charset0004", &["foo=\"\u{6625}\u{8282}\u{56de}\u{5bb6}\u{8def}\u{b7}\u{6625}\u{8fd0}\u{5b8c}\ \u{5168}\u{624b}\u{518c}\""], "http://home.example.org:8888/cookie-parser-result?charset0004"); assert_eq!(&r, "foo=\"\u{6625}\u{8282}\u{56de}\u{5bb6}\u{8def}\u{b7}\u{6625}\u{8fd0}\u{5b8c}\ \u{5168}\u{624b}\u{518c}\""); } #[test] fn test_chromium0001() { let r = run("http://home.example.org:8888/cookie-parser?chromium0001", &["a=b"], "http://home.example.org:8888/cookie-parser-result?chromium0001"); assert_eq!(&r, "a=b"); } #[test] fn test_chromium0002() { let r = run("http://home.example.org:8888/cookie-parser?chromium0002", &["aBc=\"zzz \" ;"], "http://home.example.org:8888/cookie-parser-result?chromium0002"); assert_eq!(&r, "aBc=\"zzz \""); } #[test] fn test_chromium0003() { let r = run("http://home.example.org:8888/cookie-parser?chromium0003", &["aBc=\"zzz \" ;"], "http://home.example.org:8888/cookie-parser-result?chromium0003"); assert_eq!(&r, "aBc=\"zzz \""); } #[test] fn test_chromium0004() { let r = run("http://home.example.org:8888/cookie-parser?chromium0004", &["aBc=\"zz;pp\" ; ;"], "http://home.example.org:8888/cookie-parser-result?chromium0004"); assert_eq!(&r, "aBc=\"zz"); } #[test] fn test_chromium0005() { let r = run("http://home.example.org:8888/cookie-parser?chromium0005", &["aBc=\"zz ;"], "http://home.example.org:8888/cookie-parser-result?chromium0005"); assert_eq!(&r, "aBc=\"zz"); } #[test] fn test_chromium0006() { let r = run("http://home.example.org:8888/cookie-parser?chromium0006", &["aBc=\"zzz \" \"ppp\" ;"], "http://home.example.org:8888/cookie-parser-result?chromium0006"); assert_eq!(&r, "aBc=\"zzz \" \"ppp\""); } #[test] fn test_chromium0007() { let r = run("http://home.example.org:8888/cookie-parser?chromium0007", &["aBc=\"zzz \" \"ppp\" ;"], "http://home.example.org:8888/cookie-parser-result?chromium0007"); assert_eq!(&r, "aBc=\"zzz \" \"ppp\""); } #[test] fn test_chromium0008() { let r = run("http://home.example.org:8888/cookie-parser?chromium0008", &["aBc=A\"B ;"], "http://home.example.org:8888/cookie-parser-result?chromium0008"); assert_eq!(&r, "aBc=A\"B"); } #[test] fn test_chromium0009() { let r = run("http://home.example.org:8888/cookie-parser?chromium0009", &["BLAHHH; path=/;"], "http://home.example.org:8888/cookie-parser-result?chromium0009"); assert_eq!(&r, ""); } #[test] fn test_chromium0010() { let r = run("http://home.example.org:8888/cookie-parser?chromium0010", &["\"BLA\\\"HHH\"; path=/;"], "http://home.example.org:8888/cookie-parser-result?chromium0010"); assert_eq!(&r, ""); } #[test] fn test_chromium0011() { let r = run("http://home.example.org:8888/cookie-parser?chromium0011", &["a=\"B"], "http://home.example.org:8888/cookie-parser-result?chromium0011"); assert_eq!(&r, "a=\"B"); } #[test] fn test_chromium0012() { let r = run("http://home.example.org:8888/cookie-parser?chromium0012", &["=ABC"], "http://home.example.org:8888/cookie-parser-result?chromium0012"); assert_eq!(&r, ""); } #[test] fn test_chromium0013() { let r = run("http://home.example.org:8888/cookie-parser?chromium0013", &["ABC=; path = /"], "http://home.example.org:8888/cookie-parser-result?chromium0013"); assert_eq!(&r, "ABC="); } #[test] fn test_chromium0014() { let r = run("http://home.example.org:8888/cookie-parser?chromium0014", &[" A = BC ;foo;;; bar"], "http://home.example.org:8888/cookie-parser-result?chromium0014"); assert_eq!(&r, "A=BC"); } #[test] fn test_chromium0015() { let r = run("http://home.example.org:8888/cookie-parser?chromium0015", &[" A=== BC ;foo;;; bar"], "http://home.example.org:8888/cookie-parser-result?chromium0015"); assert_eq!(&r, "A=== BC"); } #[test] fn test_chromium0016() { let r = run("http://home.example.org:8888/cookie-parser?chromium0016", &["foo=\"zohNumRKgI0oxyhSsV3Z7D\" ; expires=Sun, 18-Apr-2027 21:06:29 GMT\ ; path=/ ;"], "http://home.example.org:8888/cookie-parser-result?chromium0016"); assert_eq!(&r, "foo=\"zohNumRKgI0oxyhSsV3Z7D\""); } #[test] fn test_chromium0017() { let r = run("http://home.example.org:8888/cookie-parser?chromium0017", &["foo=zohNumRKgI0oxyhSsV3Z7D ; expires=Sun, 18-Apr-2027 21:06:29 GMT ; p\ ath=/ ;"], "http://home.example.org:8888/cookie-parser-result?chromium0017"); assert_eq!(&r, "foo=zohNumRKgI0oxyhSsV3Z7D"); } #[test] fn test_chromium0018() { let r = run("http://home.example.org:8888/cookie-parser?chromium0018", &[], "http://home.example.org:8888/cookie-parser-result?chromium0018"); assert_eq!(&r, ""); } #[test] fn test_chromium0019() { let r = run("http://home.example.org:8888/cookie-parser?chromium0019", &["a=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"], "http://home.example.org:8888/cookie-parser-result?chromium0019"); assert_eq!(&r, "a=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"); } #[test] fn test_chromium0021() { let r = run("http://home.example.org:8888/cookie-parser?chromium0021", &[], "http://home.example.org:8888/cookie-parser-result?chromium0021"); assert_eq!(&r, ""); } #[test] fn test_comma0001() { let r = run("http://home.example.org:8888/cookie-parser?comma0001", &["foo=bar, baz=qux"], "http://home.example.org:8888/cookie-parser-result?comma0001"); assert_eq!(&r, "foo=bar, baz=qux"); } #[test] fn test_comma0002() { let r = run("http://home.example.org:8888/cookie-parser?comma0002", &["foo=\"bar, baz=qux\""], "http://home.example.org:8888/cookie-parser-result?comma0002"); assert_eq!(&r, "foo=\"bar, baz=qux\""); } #[test] fn test_comma0003() { let r = run("http://home.example.org:8888/cookie-parser?comma0003", &["foo=bar; b,az=qux"], "http://home.example.org:8888/cookie-parser-result?comma0003"); assert_eq!(&r, "foo=bar"); } #[test] fn test_comma0004() { let r = run("http://home.example.org:8888/cookie-parser?comma0004", &["foo=bar; baz=q,ux"], "http://home.example.org:8888/cookie-parser-result?comma0004"); assert_eq!(&r, "foo=bar"); } #[test] fn test_comma0005() { let r = run("http://home.example.org:8888/cookie-parser?comma0005", &["foo=bar; Max-Age=50,399"], "http://home.example.org:8888/cookie-parser-result?comma0005"); assert_eq!(&r, "foo=bar"); } #[test] fn test_comma0006() { let r = run("http://home.example.org:8888/cookie-parser?comma0006", &["foo=bar; Expires=Fri, 07 Aug 2019 08:04:19 GMT"], "http://home.example.org:8888/cookie-parser-result?comma0006"); assert_eq!(&r, "foo=bar"); } #[test] fn test_comma0007() { let r = run("http://home.example.org:8888/cookie-parser?comma0007", &["foo=bar; Expires=Fri 07 Aug 2019 08:04:19 GMT, baz=qux"], "http://home.example.org:8888/cookie-parser-result?comma0007"); assert_eq!(&r, "foo=bar"); } #[test] fn test_domain0001() { let r = run("http://home.example.org:8888/cookie-parser?domain0001", &["foo=bar; domain=home.example.org"], "http://home.example.org:8888/cookie-parser-result?domain0001"); assert_eq!(&r, "foo=bar"); } #[test] fn test_domain0002() { let r = run("http://home.example.org:8888/cookie-parser?domain0002", &["foo=bar; domain=home.example.org"], "http://sibling.example.org:8888/cookie-parser-result?domain0002"); assert_eq!(&r, ""); } #[test] fn test_domain0003() { let r = run("http://home.example.org:8888/cookie-parser?domain0003", &["foo=bar; domain=.home.example.org"], "http://home.example.org:8888/cookie-parser-result?domain0003"); assert_eq!(&r, "foo=bar"); } #[test] fn test_domain0004() { let r = run("http://home.example.org:8888/cookie-parser?domain0004", &["foo=bar; domain=home.example.org"], "http://subdomain.home.example.org:8888/cookie-parser-result?domain0004"); assert_eq!(&r, "foo=bar"); } #[test] fn test_domain0005() { let r = run("http://home.example.org:8888/cookie-parser?domain0005", &["foo=bar; domain=.home.example.org"], "http://subdomain.home.example.org:8888/cookie-parser-result?domain0005"); assert_eq!(&r, "foo=bar"); } #[test] fn test_domain0006() { let r = run("http://home.example.org:8888/cookie-parser?domain0006", &["foo=bar; domain=.home.example.org"], "http://sibling.example.org:8888/cookie-parser-result?domain0006"); assert_eq!(&r, ""); } #[test] fn test_domain0007() { let r = run("http://home.example.org:8888/cookie-parser?domain0007", &["foo=bar; domain=sibling.example.org"], "http://sibling.example.org:8888/cookie-parser-result?domain0007"); assert_eq!(&r, ""); } #[test] fn test_domain0008() { let r = run("http://home.example.org:8888/cookie-parser?domain0008", &["foo=bar; domain=.example.org"], "http://home.example.org:8888/cookie-parser-result?domain0008"); assert_eq!(&r, "foo=bar"); } #[test] fn test_domain0009() { let r = run("http://home.example.org:8888/cookie-parser?domain0009", &["foo=bar; domain=example.org"], "http://home.example.org:8888/cookie-parser-result?domain0009"); assert_eq!(&r, "foo=bar"); } #[test] fn test_domain0010() { let r = run("http://home.example.org:8888/cookie-parser?domain0010", &["foo=bar; domain=..home.example.org"], "http://home.example.org:8888/cookie-parser-result?domain0010"); assert_eq!(&r, ""); } #[test] fn test_domain0011() { let r = run("http://home.example.org:8888/cookie-parser?domain0011", &["foo=bar; domain=home..example.org"], "http://home.example.org:8888/cookie-parser-result?domain0011"); assert_eq!(&r, ""); } #[test] fn test_domain0012() { let r = run("http://home.example.org:8888/cookie-parser?domain0012", &["foo=bar; domain= .home.example.org"], "http://home.example.org:8888/cookie-parser-result?domain0012"); assert_eq!(&r, "foo=bar"); } #[test] fn test_domain0013() { let r = run("http://home.example.org:8888/cookie-parser?domain0013", &["foo=bar; domain= . home.example.org"], "http://home.example.org:8888/cookie-parser-result?domain0013"); assert_eq!(&r, ""); } #[test] fn test_domain0014() { let r = run("http://home.example.org:8888/cookie-parser?domain0014", &["foo=bar; domain=home.example.org."], "http://home.example.org:8888/cookie-parser-result?domain0014"); assert_eq!(&r, ""); } #[test] fn test_domain0015() { let r = run("http://home.example.org:8888/cookie-parser?domain0015", &["foo=bar; domain=home.example.org.."], "http://home.example.org:8888/cookie-parser-result?domain0015"); assert_eq!(&r, ""); } #[test] fn test_domain0016() { let r = run("http://home.example.org:8888/cookie-parser?domain0016", &["foo=bar; domain=home.example.org ."], "http://home.example.org:8888/cookie-parser-result?domain0016"); assert_eq!(&r, ""); } #[test] fn test_domain0017() { let r = run("http://home.example.org:8888/cookie-parser?domain0017", &["foo=bar; domain=.org"], "http://home.example.org:8888/cookie-parser-result?domain0017"); assert_eq!(&r, ""); } #[test] fn test_domain0018() { let r = run("http://home.example.org:8888/cookie-parser?domain0018", &["foo=bar; domain=.org."], "http://home.example.org:8888/cookie-parser-result?domain0018"); assert_eq!(&r, ""); } #[test] fn test_domain0019() { let r = run("http://home.example.org:8888/cookie-parser?domain0019", &["foo=bar; domain=home.example.org", "foo2=bar2; domain=.home.example.org"], "http://home.example.org:8888/cookie-parser-result?domain0019"); assert_eq!(&r, "foo=bar; foo2=bar2"); } #[test] fn test_domain0020() { let r = run("http://home.example.org:8888/cookie-parser?domain0020", &["foo2=bar2; domain=.home.example.org", "foo=bar; domain=home.example.org"], "http://home.example.org:8888/cookie-parser-result?domain0020"); assert_eq!(&r, "foo2=bar2; foo=bar"); } #[test] fn test_domain0021() { let r = run("http://home.example.org:8888/cookie-parser?domain0021", &["foo=bar; domain=\"home.example.org\""], "http://home.example.org:8888/cookie-parser-result?domain0021"); assert_eq!(&r, ""); } #[test] fn test_domain0022() { let r = run("http://home.example.org:8888/cookie-parser?domain0022", &["foo=bar; domain=home.example.org", "foo2=bar2; domain=.example.org"], "http://home.example.org:8888/cookie-parser-result?domain0022"); assert_eq!(&r, "foo=bar; foo2=bar2"); } #[test] fn test_domain0023() { let r = run("http://home.example.org:8888/cookie-parser?domain0023", &["foo2=bar2; domain=.example.org", "foo=bar; domain=home.example.org"], "http://home.example.org:8888/cookie-parser-result?domain0023"); assert_eq!(&r, "foo2=bar2; foo=bar"); } #[test] fn test_domain0024() { let r = run("http://home.example.org:8888/cookie-parser?domain0024", &["foo=bar; domain=.example.org; domain=home.example.org"], "http://sibling.example.org:8888/cookie-parser-result?domain0024"); assert_eq!(&r, ""); } #[test] fn test_domain0025() { let r = run("http://home.example.org:8888/cookie-parser?domain0025", &["foo=bar; domain=home.example.org; domain=.example.org"], "http://sibling.example.org:8888/cookie-parser-result?domain0025"); assert_eq!(&r, "foo=bar"); } #[test] fn test_domain0026() { let r = run("http://home.example.org:8888/cookie-parser?domain0026", &["foo=bar; domain=home.eXaMpLe.org"], "http://home.example.org:8888/cookie-parser-result?domain0026"); assert_eq!(&r, "foo=bar"); } #[test] fn test_domain0027() { let r = run("http://home.example.org:8888/cookie-parser?domain0027", &["foo=bar; domain=home.example.org:8888"], "http://home.example.org:8888/cookie-parser-result?domain0027"); assert_eq!(&r, ""); } #[test] fn test_domain0028() { let r = run("http://home.example.org:8888/cookie-parser?domain0028", &["foo=bar; domain=subdomain.home.example.org"], "http://subdomain.home.example.org:8888/cookie-parser-result?domain0028"); assert_eq!(&r, ""); } #[test] fn test_domain0029() { let r = run("http://home.example.org:8888/cookie-parser?domain0029", &["foo=bar"], "http://subdomain.home.example.org:8888/cookie-parser-result?domain0029"); assert_eq!(&r, ""); } #[test] fn test_domain0031() { let r = run("http://home.example.org:8888/cookie-parser?domain0031", &["foo=bar; domain=home.example.org; domain=.example.org"], "http://sibling.example.org:8888/cookie-parser-result?domain0031"); assert_eq!(&r, "foo=bar"); } #[test] fn test_domain0033() { let r = run("http://home.example.org:8888/cookie-parser?domain0033", &["foo=bar; domain=home.example.org"], "http://hoMe.eXaMplE.org:8888/cookie-parser-result?domain0033"); assert_eq!(&r, "foo=bar"); } #[test] fn test_domain0034() { let r = run("http://home.example.org:8888/cookie-parser?domain0034", &["foo=bar; domain=home.example.org; domain=home.example.com"], "http://home.example.org:8888/cookie-parser-result?domain0034"); assert_eq!(&r, ""); } #[test] fn test_domain0035() { let r = run("http://home.example.org:8888/cookie-parser?domain0035", &["foo=bar; domain=home.example.com; domain=home.example.org"], "http://home.example.org:8888/cookie-parser-result?domain0035"); assert_eq!(&r, "foo=bar"); } #[test] fn test_domain0036() { let r = run("http://home.example.org:8888/cookie-parser?domain0036", &["foo=bar; domain=home.example.org; domain=home.example.com; domain=home.\ example.org"], "http://home.example.org:8888/cookie-parser-result?domain0036"); assert_eq!(&r, "foo=bar"); } #[test] fn test_domain0037() { let r = run("http://home.example.org:8888/cookie-parser?domain0037", &["foo=bar; domain=home.example.com; domain=home.example.org; domain=home.\ example.com"], "http://home.example.org:8888/cookie-parser-result?domain0037"); assert_eq!(&r, ""); } #[test] fn test_domain0038() { let r = run("http://home.example.org:8888/cookie-parser?domain0038", &["foo=bar; domain=home.example.org; domain=home.example.org"], "http://home.example.org:8888/cookie-parser-result?domain0038"); assert_eq!(&r, "foo=bar"); } #[test] fn test_domain0039() { let r = run("http://home.example.org:8888/cookie-parser?domain0039", &["foo=bar; domain=home.example.org; domain=example.org"], "http://home.example.org:8888/cookie-parser-result?domain0039"); assert_eq!(&r, "foo=bar"); } #[test] fn test_domain0040() { let r = run("http://home.example.org:8888/cookie-parser?domain0040", &["foo=bar; domain=example.org; domain=home.example.org"], "http://home.example.org:8888/cookie-parser-result?domain0040"); assert_eq!(&r, "foo=bar"); } #[test] fn test_domain0041() { let r = run("http://home.example.org:8888/cookie-parser?domain0041", &["foo=bar; domain=.sibling.example.org"], "http://sibling.example.org:8888/cookie-parser-result?domain0041"); assert_eq!(&r, ""); } #[test] fn test_domain0042() { let r = run("http://home.example.org:8888/cookie-parser?domain0042", &["foo=bar; domain=.sibling.home.example.org"], "http://sibling.home.example.org:8888/cookie-parser-result?domain0042"); assert_eq!(&r, ""); } #[test] #[should_panic] // Look at cookie_http_state_utils.py if this test fails fn test_mozilla0001() { let r = run("http://home.example.org:8888/cookie-parser?mozilla0001", &["foo=bar; max-age=-1"], "http://home.example.org:8888/cookie-parser-result?mozilla0001"); assert_eq!(&r, ""); } #[test] #[should_panic] // Look at cookie_http_state_utils.py if this test fails fn test_mozilla0002() { let r = run("http://home.example.org:8888/cookie-parser?mozilla0002", &["foo=bar; max-age=0"], "http://home.example.org:8888/cookie-parser-result?mozilla0002"); assert_eq!(&r, ""); } #[test] #[should_panic] // Look at cookie_http_state_utils.py if this test fails fn test_mozilla0003() { let r = run("http://home.example.org:8888/cookie-parser?mozilla0003", &["foo=bar; expires=Thu, 10 Apr 1980 16:33:12 GMT"], "http://home.example.org:8888/cookie-parser-result?mozilla0003"); assert_eq!(&r, ""); } #[test] fn test_mozilla0004() { let r = run("http://home.example.org:8888/cookie-parser?mozilla0004", &["foo=bar; max-age=60"], "http://home.example.org:8888/cookie-parser-result?mozilla0004"); assert_eq!(&r, "foo=bar"); } #[test] #[should_panic] // Look at cookie_http_state_utils.py if this test fails fn test_mozilla0005() { let r = run("http://home.example.org:8888/cookie-parser?mozilla0005", &["foo=bar; max-age=-20"], "http://home.example.org:8888/cookie-parser-result?mozilla0005"); assert_eq!(&r, ""); } #[test] fn test_mozilla0006() { let r = run("http://home.example.org:8888/cookie-parser?mozilla0006", &["foo=bar; max-age=60"], "http://home.example.org:8888/cookie-parser-result?mozilla0006"); assert_eq!(&r, "foo=bar"); } #[test] #[should_panic] // Look at cookie_http_state_utils.py if this test fails fn test_mozilla0007() { let r = run("http://home.example.org:8888/cookie-parser?mozilla0007", &["foo=bar; expires=Thu, 10 Apr 1980 16:33:12 GMT"], "http://home.example.org:8888/cookie-parser-result?mozilla0007"); assert_eq!(&r, ""); } #[test] fn test_mozilla0008() { let r = run("http://home.example.org:8888/cookie-parser?mozilla0008", &["foo=bar; max-age=60", "foo1=bar; max-age=60"], "http://home.example.org:8888/cookie-parser-result?mozilla0008"); assert_eq!(&r, "foo=bar; foo1=bar"); } #[test] #[should_panic] // Look at cookie_http_state_utils.py if this test fails fn test_mozilla0009() { let r = run("http://home.example.org:8888/cookie-parser?mozilla0009", &["foo=bar; max-age=60", "foo1=bar; max-age=60", "foo=differentvalue; max-age=0"], "http://home.example.org:8888/cookie-parser-result?mozilla0009"); assert_eq!(&r, "foo1=bar"); } #[test] #[should_panic] // Look at cookie_http_state_utils.py if this test fails fn test_mozilla0010() { let r = run("http://home.example.org:8888/cookie-parser?mozilla0010", &["foo=bar; max-age=60", "foo1=bar; max-age=60", "foo=differentvalue; max-age=0", "foo2=evendifferentvalue; max-age=0"], "http://home.example.org:8888/cookie-parser-result?mozilla0010"); assert_eq!(&r, "foo1=bar"); } #[test] fn test_mozilla0011() { let r = run("http://home.example.org:8888/cookie-parser?mozilla0011", &["test=parser; domain=.parser.test; ;; ;=; ,,, ===,abc,=; abracadabra! ma\ x-age=20;=;;"], "http://home.example.org:8888/cookie-parser-result?mozilla0011"); assert_eq!(&r, ""); } #[test] fn test_mozilla0012() { let r = run("http://home.example.org:8888/cookie-parser?mozilla0012", &["test=\"fubar! = foo;bar\\\";\" parser; max-age=6", "five; max-age=2.63,"], "http://home.example.org:8888/cookie-parser-result?mozilla0012"); assert_eq!(&r, "test=\"fubar! = foo"); } #[test] #[should_panic] // Look at cookie_http_state_utils.py if this test fails fn test_mozilla0013() { let r = run("http://home.example.org:8888/cookie-parser?mozilla0013", &["test=kill; max-age=0", "five; max-age=0"], "http://home.example.org:8888/cookie-parser-result?mozilla0013"); assert_eq!(&r, ""); } #[test] fn test_mozilla0014() { let r = run("http://home.example.org:8888/cookie-parser?mozilla0014", &["six"], "http://home.example.org:8888/cookie-parser-result?mozilla0014"); assert_eq!(&r, ""); } #[test] fn test_mozilla0015() { let r = run("http://home.example.org:8888/cookie-parser?mozilla0015", &["six", "seven"], "http://home.example.org:8888/cookie-parser-result?mozilla0015"); assert_eq!(&r, ""); } #[test] fn test_mozilla0016() { let r = run("http://home.example.org:8888/cookie-parser?mozilla0016", &["six", "seven", " =eight"], "http://home.example.org:8888/cookie-parser-result?mozilla0016"); assert_eq!(&r, ""); } #[test] fn test_mozilla0017() { let r = run("http://home.example.org:8888/cookie-parser?mozilla0017", &["six", "seven", " =eight", "test=six"], "http://home.example.org:8888/cookie-parser-result?mozilla0017"); assert_eq!(&r, "test=six"); } #[test] fn test_name0001() { let r = run("http://home.example.org:8888/cookie-parser?name0001", &["a=bar"], "http://home.example.org:8888/cookie-parser-result?name0001"); assert_eq!(&r, "a=bar"); } #[test] fn test_name0002() { let r = run("http://home.example.org:8888/cookie-parser?name0002", &["1=bar"], "http://home.example.org:8888/cookie-parser-result?name0002"); assert_eq!(&r, "1=bar"); } #[test] fn test_name0003() { let r = run("http://home.example.org:8888/cookie-parser?name0003", &["$=bar"], "http://home.example.org:8888/cookie-parser-result?name0003"); assert_eq!(&r, "$=bar"); } #[test] fn test_name0004() { let r = run("http://home.example.org:8888/cookie-parser?name0004", &["!a=bar"], "http://home.example.org:8888/cookie-parser-result?name0004"); assert_eq!(&r, "!a=bar"); } #[test] fn test_name0005() { let r = run("http://home.example.org:8888/cookie-parser?name0005", &["@a=bar"], "http://home.example.org:8888/cookie-parser-result?name0005"); assert_eq!(&r, "@a=bar"); } #[test] fn test_name0006() { let r = run("http://home.example.org:8888/cookie-parser?name0006", &["#a=bar"], "http://home.example.org:8888/cookie-parser-result?name0006"); assert_eq!(&r, "#a=bar"); } #[test] fn test_name0007() { let r = run("http://home.example.org:8888/cookie-parser?name0007", &["$a=bar"], "http://home.example.org:8888/cookie-parser-result?name0007"); assert_eq!(&r, "$a=bar"); } #[test] fn test_name0008() { let r = run("http://home.example.org:8888/cookie-parser?name0008", &["%a=bar"], "http://home.example.org:8888/cookie-parser-result?name0008"); assert_eq!(&r, "%a=bar"); } #[test] fn test_name0009() { let r = run("http://home.example.org:8888/cookie-parser?name0009", &["^a=bar"], "http://home.example.org:8888/cookie-parser-result?name0009"); assert_eq!(&r, "^a=bar"); } #[test] fn test_name0010() { let r = run("http://home.example.org:8888/cookie-parser?name0010", &["&a=bar"], "http://home.example.org:8888/cookie-parser-result?name0010"); assert_eq!(&r, "&a=bar"); } #[test] fn test_name0011() { let r = run("http://home.example.org:8888/cookie-parser?name0011", &["*a=bar"], "http://home.example.org:8888/cookie-parser-result?name0011"); assert_eq!(&r, "*a=bar"); } #[test] fn test_name0012() { let r = run("http://home.example.org:8888/cookie-parser?name0012", &["(a=bar"], "http://home.example.org:8888/cookie-parser-result?name0012"); assert_eq!(&r, "(a=bar"); } #[test] fn test_name0013() { let r = run("http://home.example.org:8888/cookie-parser?name0013", &[")a=bar"], "http://home.example.org:8888/cookie-parser-result?name0013"); assert_eq!(&r, ")a=bar"); } #[test] fn test_name0014() { let r = run("http://home.example.org:8888/cookie-parser?name0014", &["-a=bar"], "http://home.example.org:8888/cookie-parser-result?name0014"); assert_eq!(&r, "-a=bar"); } #[test] fn test_name0015() { let r = run("http://home.example.org:8888/cookie-parser?name0015", &["_a=bar"], "http://home.example.org:8888/cookie-parser-result?name0015"); assert_eq!(&r, "_a=bar"); } #[test] fn test_name0016() { let r = run("http://home.example.org:8888/cookie-parser?name0016", &["+=bar"], "http://home.example.org:8888/cookie-parser-result?name0016"); assert_eq!(&r, "+=bar"); } #[test] fn test_name0017() { let r = run("http://home.example.org:8888/cookie-parser?name0017", &["=a=bar"], "http://home.example.org:8888/cookie-parser-result?name0017"); assert_eq!(&r, ""); } #[test] fn test_name0018() { let r = run("http://home.example.org:8888/cookie-parser?name0018", &["a =bar"], "http://home.example.org:8888/cookie-parser-result?name0018"); assert_eq!(&r, "a=bar"); } #[test] fn test_name0019() { let r = run("http://home.example.org:8888/cookie-parser?name0019", &["\"a=bar"], "http://home.example.org:8888/cookie-parser-result?name0019"); assert_eq!(&r, "\"a=bar"); } #[test] fn test_name0020() { let r = run("http://home.example.org:8888/cookie-parser?name0020", &["\"a=b\"=bar"], "http://home.example.org:8888/cookie-parser-result?name0020"); assert_eq!(&r, "\"a=b\"=bar"); } #[test] fn test_name0021() { let r = run("http://home.example.org:8888/cookie-parser?name0021", &["\"a=b\"=bar", "\"a=qux"], "http://home.example.org:8888/cookie-parser-result?name0021"); assert_eq!(&r, "\"a=qux"); } #[test] fn test_name0022() { let r = run("http://home.example.org:8888/cookie-parser?name0022", &[" foo=bar"], "http://home.example.org:8888/cookie-parser-result?name0022"); assert_eq!(&r, "foo=bar"); } #[test] fn test_name0023() { let r = run("http://home.example.org:8888/cookie-parser?name0023", &["foo;bar=baz"], "http://home.example.org:8888/cookie-parser-result?name0023"); assert_eq!(&r, ""); } #[test] fn test_name0024() { let r = run("http://home.example.org:8888/cookie-parser?name0024", &["$Version=1; foo=bar"], "http://home.example.org:8888/cookie-parser-result?name0024"); assert_eq!(&r, "$Version=1"); } #[test] fn test_name0025() { let r = run("http://home.example.org:8888/cookie-parser?name0025", &["===a=bar"], "http://home.example.org:8888/cookie-parser-result?name0025"); assert_eq!(&r, ""); } #[test] fn test_name0026() { let r = run("http://home.example.org:8888/cookie-parser?name0026", &["foo=bar"], "http://home.example.org:8888/cookie-parser-result?name0026"); assert_eq!(&r, "foo=bar"); } #[test] fn test_name0027() { let r = run("http://home.example.org:8888/cookie-parser?name0027", &["foo=bar ;"], "http://home.example.org:8888/cookie-parser-result?name0027"); assert_eq!(&r, "foo=bar"); } #[test] fn test_name0028() { let r = run("http://home.example.org:8888/cookie-parser?name0028", &["=a"], "http://home.example.org:8888/cookie-parser-result?name0028"); assert_eq!(&r, ""); } #[test] fn test_name0029() { let r = run("http://home.example.org:8888/cookie-parser?name0029", &["="], "http://home.example.org:8888/cookie-parser-result?name0029"); assert_eq!(&r, ""); } #[test] fn test_name0030() { let r = run("http://home.example.org:8888/cookie-parser?name0030", &["foo bar=baz"], "http://home.example.org:8888/cookie-parser-result?name0030"); assert_eq!(&r, "foo bar=baz"); } #[test] fn test_name0031() { let r = run("http://home.example.org:8888/cookie-parser?name0031", &["\"foo;bar\"=baz"], "http://home.example.org:8888/cookie-parser-result?name0031"); assert_eq!(&r, ""); } #[test] fn test_name0032() { let r = run("http://home.example.org:8888/cookie-parser?name0032", &["\"foo\\\"bar;baz\"=qux"], "http://home.example.org:8888/cookie-parser-result?name0032"); assert_eq!(&r, ""); } #[test] fn test_name0033() { let r = run("http://home.example.org:8888/cookie-parser?name0033", &["=foo=bar", "aaa"], "http://home.example.org:8888/cookie-parser-result?name0033"); assert_eq!(&r, ""); } #[test] fn test_optional_domain0030() { let r = run("http://home.example.org:8888/cookie-parser?optional-domain0030", &["foo=bar; domain="], "http://home.example.org:8888/cookie-parser-result?optional-domain0030"); assert_eq!(&r, "foo=bar"); } #[test] fn test_optional_domain0041() { let r = run("http://home.example.org:8888/cookie-parser?optional-domain0041", &["foo=bar; domain=example.org; domain="], "http://home.example.org:8888/cookie-parser-result?optional-domain0041"); assert_eq!(&r, "foo=bar"); } #[test] fn test_optional_domain0042() { let r = run("http://home.example.org:8888/cookie-parser?optional-domain0042", &["foo=bar; domain=foo.example.org; domain="], "http://home.example.org:8888/cookie-parser-result?optional-domain0042"); assert_eq!(&r, ""); } #[test] fn test_optional_domain0043() { let r = run("http://home.example.org:8888/cookie-parser?optional-domain0043", &["foo=bar; domain=foo.example.org; domain="], "http://subdomain.home.example.org:8888/cookie-parser-result?optional-do\ main0043"); assert_eq!(&r, ""); } #[test] fn test_ordering0001() { let r = run("http://home.example.org:8888/cookie-parser?ordering0001", &["key=val0;", "key=val1; path=/cookie-parser-result", "key=val2; path=/", "key=val3; path=/bar", "key=val4; domain=.example.org", "key=val5; domain=.example.org; path=/cookie-parser-result/foo"], "http://home.example.org:8888/cookie-parser-result/foo/baz?ordering0001"); assert_eq!(&r, "key=val5; key=val1; key=val2; key=val4"); } #[test] fn test_path0001() { let r = run("http://home.example.org:8888/cookie-parser?path0001", &["a=b; path=/", "x=y; path=/cookie-parser-result"], "http://home.example.org:8888/cookie-parser-result?path0001"); assert_eq!(&r, "x=y; a=b"); } #[test] fn test_path0002() { let r = run("http://home.example.org:8888/cookie-parser?path0002", &["a=b; path=/cookie-parser-result", "x=y; path=/"], "http://home.example.org:8888/cookie-parser-result?path0002"); assert_eq!(&r, "a=b; x=y"); } #[test] fn test_path0003() { let r = run("http://home.example.org:8888/cookie-parser?path0003", &["x=y; path=/", "a=b; path=/cookie-parser-result"], "http://home.example.org:8888/cookie-parser-result?path0003"); assert_eq!(&r, "a=b; x=y"); } #[test] fn test_path0004() { let r = run("http://home.example.org:8888/cookie-parser?path0004", &["x=y; path=/cookie-parser-result", "a=b; path=/"], "http://home.example.org:8888/cookie-parser-result?path0004"); assert_eq!(&r, "x=y; a=b"); } #[test] fn test_path0005() { let r = run("http://home.example.org:8888/cookie-parser?path0005", &["foo=bar; path=/cookie-parser-result/foo"], "http://home.example.org:8888/cookie-parser-result?path0005"); assert_eq!(&r, ""); }<|fim▁hole|> &["foo=bar", "foo=qux; path=/cookie-parser-result/foo"], "http://home.example.org:8888/cookie-parser-result?path0006"); assert_eq!(&r, "foo=bar"); } #[test] fn test_path0007() { let r = run("http://home.example.org:8888/cookie-parser?path0007", &["foo=bar; path=/cookie-parser-result/foo"], "http://home.example.org:8888/cookie-parser-result/foo?path0007"); assert_eq!(&r, "foo=bar"); } #[test] fn test_path0008() { let r = run("http://home.example.org:8888/cookie-parser?path0008", &["foo=bar; path=/cookie-parser-result/foo"], "http://home.example.org:8888/cookie-parser-result/bar?path0008"); assert_eq!(&r, ""); } #[test] fn test_path0009() { let r = run("http://home.example.org:8888/cookie-parser?path0009", &["foo=bar; path=/cookie-parser-result/foo/qux"], "http://home.example.org:8888/cookie-parser-result/foo?path0009"); assert_eq!(&r, ""); } #[test] fn test_path0010() { let r = run("http://home.example.org:8888/cookie-parser?path0010", &["foo=bar; path=/cookie-parser-result/foo/qux"], "http://home.example.org:8888/cookie-parser-result/foo/qux?path0010"); assert_eq!(&r, "foo=bar"); } #[test] fn test_path0011() { let r = run("http://home.example.org:8888/cookie-parser?path0011", &["foo=bar; path=/cookie-parser-result/foo/qux"], "http://home.example.org:8888/cookie-parser-result/bar/qux?path0011"); assert_eq!(&r, ""); } #[test] fn test_path0012() { let r = run("http://home.example.org:8888/cookie-parser?path0012", &["foo=bar; path=/cookie-parser-result/foo/qux"], "http://home.example.org:8888/cookie-parser-result/foo/baz?path0012"); assert_eq!(&r, ""); } #[test] fn test_path0013() { let r = run("http://home.example.org:8888/cookie-parser?path0013", &["foo=bar; path=/cookie-parser-result/foo/qux/"], "http://home.example.org:8888/cookie-parser-result/foo/baz?path0013"); assert_eq!(&r, ""); } #[test] fn test_path0014() { let r = run("http://home.example.org:8888/cookie-parser?path0014", &["foo=bar; path=/cookie-parser-result/foo/qux/"], "http://home.example.org:8888/cookie-parser-result/foo/qux?path0014"); assert_eq!(&r, ""); } #[test] fn test_path0015() { let r = run("http://home.example.org:8888/cookie-parser?path0015", &["foo=bar; path=/cookie-parser-result/foo/qux/"], "http://home.example.org:8888/cookie-parser-result/foo/qux/?path0015"); assert_eq!(&r, "foo=bar"); } #[test] fn test_path0016() { let r = run("http://home.example.org:8888/cookie-parser?path0016", &["foo=bar; path=/cookie-parser-result/foo/"], "http://home.example.org:8888/cookie-parser-result/foo/qux?path0016"); assert_eq!(&r, "foo=bar"); } #[test] fn test_path0017() { let r = run("http://home.example.org:8888/cookie-parser?path0017", &["foo=bar; path=/cookie-parser-result/foo/"], "http://home.example.org:8888/cookie-parser-result/foo//qux?path0017"); assert_eq!(&r, "foo=bar"); } #[test] fn test_path0018() { let r = run("http://home.example.org:8888/cookie-parser?path0018", &["foo=bar; path=/cookie-parser-result/foo/"], "http://home.example.org:8888/cookie-parser-result/fooqux?path0018"); assert_eq!(&r, ""); } #[test] fn test_path0019() { let r = run("http://home.example.org:8888/cookie-parser?path0019", &["foo=bar; path"], "http://home.example.org:8888/cookie-parser-result?path0019"); assert_eq!(&r, "foo=bar"); } #[test] fn test_path0020() { let r = run("http://home.example.org:8888/cookie-parser?path0020", &["foo=bar; path="], "http://home.example.org:8888/cookie-parser-result?path0020"); assert_eq!(&r, "foo=bar"); } #[test] fn test_path0021() { let r = run("http://home.example.org:8888/cookie-parser?path0021", &["foo=bar; path=/"], "http://home.example.org:8888/cookie-parser-result?path0021"); assert_eq!(&r, "foo=bar"); } #[test] fn test_path0022() { let r = run("http://home.example.org:8888/cookie-parser?path0022", &["foo=bar; path= /"], "http://home.example.org:8888/cookie-parser-result?path0022"); assert_eq!(&r, "foo=bar"); } #[test] fn test_path0023() { let r = run("http://home.example.org:8888/cookie-parser?path0023", &["foo=bar; Path=/cookie-PARSER-result"], "http://home.example.org:8888/cookie-parser-result?path0023"); assert_eq!(&r, ""); } #[test] fn test_path0024() { let r = run("http://home.example.org:8888/cookie-parser?path0024", &["foo=bar; path=/cookie-parser-result/foo/qux?"], "http://home.example.org:8888/cookie-parser-result/foo/qux?path0024"); assert_eq!(&r, ""); } #[test] fn test_path0025() { let r = run("http://home.example.org:8888/cookie-parser?path0025", &["foo=bar; path=/cookie-parser-result/foo/qux#"], "http://home.example.org:8888/cookie-parser-result/foo/qux?path0025"); assert_eq!(&r, ""); } #[test] fn test_path0026() { let r = run("http://home.example.org:8888/cookie-parser?path0026", &["foo=bar; path=/cookie-parser-result/foo/qux;"], "http://home.example.org:8888/cookie-parser-result/foo/qux?path0026"); assert_eq!(&r, "foo=bar"); } #[test] fn test_path0027() { let r = run("http://home.example.org:8888/cookie-parser?path0027", &["foo=bar; path=\"/cookie-parser-result/foo/qux;\""], "http://home.example.org:8888/cookie-parser-result/foo/qux?path0027"); assert_eq!(&r, "foo=bar"); } #[test] fn test_path0028() { let r = run("http://home.example.org:8888/cookie-parser?path0028", &["foo=bar; path=/cookie-parser-result/f%6Fo/bar"], "http://home.example.org:8888/cookie-parser-result/foo/bar?path0028"); assert_eq!(&r, ""); } #[test] fn test_path0029() { let r = run("http://home.example.org:8888/cookie-parser?path0029", &["a=b; \tpath\t=\t/cookie-parser-result", "x=y; \tpath\t=\t/book"], "http://home.example.org:8888/cookie-parser-result?path0029"); assert_eq!(&r, "a=b"); } #[test] fn test_path0030() { let r = run("http://home.example.org:8888/cookie-parser?path0030", &["foo=bar; path=/dog; path="], "http://home.example.org:8888/cookie-parser-result?path0030"); assert_eq!(&r, "foo=bar"); } #[test] fn test_path0031() { let r = run("http://home.example.org:8888/cookie-parser?path0031", &["foo=bar; path=; path=/dog"], "http://home.example.org:8888/cookie-parser-result?path0031"); assert_eq!(&r, ""); } #[test] fn test_path0032() { let r = run("http://home.example.org:8888/cookie-parser?path0032", &["foo=bar; path=/cookie-parser-result", "foo=qux; path=/cookie-parser-result/"], "http://home.example.org:8888/cookie-parser-result/dog?path0032"); assert_eq!(&r, "foo=qux; foo=bar"); } #[test] fn test_value0001() { let r = run("http://home.example.org:8888/cookie-parser?value0001", &["foo= bar"], "http://home.example.org:8888/cookie-parser-result?value0001"); assert_eq!(&r, "foo=bar"); } #[test] fn test_value0002() { let r = run("http://home.example.org:8888/cookie-parser?value0002", &["foo=\"bar\""], "http://home.example.org:8888/cookie-parser-result?value0002"); assert_eq!(&r, "foo=\"bar\""); } #[test] fn test_value0003() { let r = run("http://home.example.org:8888/cookie-parser?value0003", &["foo=\" bar \""], "http://home.example.org:8888/cookie-parser-result?value0003"); assert_eq!(&r, "foo=\" bar \""); } #[test] fn test_value0004() { let r = run("http://home.example.org:8888/cookie-parser?value0004", &["foo=\"bar;baz\""], "http://home.example.org:8888/cookie-parser-result?value0004"); assert_eq!(&r, "foo=\"bar"); } #[test] fn test_value0005() { let r = run("http://home.example.org:8888/cookie-parser?value0005", &["foo=\"bar=baz\""], "http://home.example.org:8888/cookie-parser-result?value0005"); assert_eq!(&r, "foo=\"bar=baz\""); } #[test] fn test_value0006() { let r = run("http://home.example.org:8888/cookie-parser?value0006", &["\tfoo\t=\tbar\t \t;\tttt"], "http://home.example.org:8888/cookie-parser-result?value0006"); assert_eq!(&r, "foo=bar"); }<|fim▁end|>
#[test] fn test_path0006() { let r = run("http://home.example.org:8888/cookie-parser?path0006",
<|file_name|>HttpTypes.ts<|end_file_name|><|fim▁begin|><|fim▁hole|> export const enum HttpMethod { Get = 'get', Post = 'post', Delete = 'delete', Patch = 'patch', Put = 'put' } export type ProgressFunction = (loaded: number, total: number) => void; export type LoadedProgressFunction = (loaded: number) => void; export interface HttpRequest<T extends ResponseBodyDataTypes> { responseType: T; body: RequestBody; url: string; method: HttpMethod; query?: Record<string, string>; progress?: ProgressFunction; headers?: Record<string, string>; credentials?: boolean; } export interface HttpResponse<T extends ResponseBody> { headers: Record<string, string>; statusCode: number; body: T; } export type JwtToken = string; export type JwtTokenFactory = (fresh: boolean) => FutureResult<JwtToken, HttpError>; type Omit<T, K> = Pick<T, Exclude<keyof T, K>>; export type PostPutInit <T extends ResponseBodyDataTypes> = Omit<HttpRequest<T>, 'method'>; export type GetDelInit <T extends ResponseBodyDataTypes> = Omit<HttpRequest<T>, 'method' | 'body'>; export interface DownloadHttpRequest { url: string; progress?: LoadedProgressFunction; headers?: Record<string, string>; credentials?: boolean; }<|fim▁end|>
import { FutureResult } from '@ephox/katamari'; import { ResponseBodyDataTypes, RequestBody, ResponseBody } from './HttpData'; import { HttpError } from './HttpError';
<|file_name|>import_panel_example.py<|end_file_name|><|fim▁begin|># This file is part of the pyqualtrics package. # For copyright and licensing information about this package, see the # NOTICE.txt and LICENSE.txt files in its top-level directory; they are # available at https://github.com/Baguage/pyqualtrics # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from pyqualtrics import Qualtrics import os user = None # os.environ["QUALTRICS_USER"] token = None # os.environ["QUALTRICS_TOKEN"] if __name__ == "__main__": print "This is an example of panel import" print "Make sure you have set QUALTRICS_USER, QUALTRICS_TOKEN and QUALTRICS_LIBRARY_ID enviroment variable" # Note is user and token are None, QUALTRICS_USER and QUALTRICS_TOKEN environment variables will be used instead qualtrics = Qualtrics(user, token) library_id = os.environ["QUALTRICS_LIBRARY_ID"] panel_id = qualtrics.importJsonPanel(<|fim▁hole|> library_id, Name="New Panel Created by PyQualtrics library (DELETE ME)", panel=[ {"Email": "[email protected]", "FirstName": "PyQualtrics", "LastName": "Library", "SubjectID": "123"}, {"Email": "[email protected]", "FirstName": "PyQualtrics2", "LastName": "Library2"} ], headers=["Email", "FirstName", "LastName", "ExternalRef", "SubjectID"], AllED=1) if qualtrics.last_error_message: print "Error creating panel: " + qualtrics.last_error_message else: print "Panel created successfully, PanelID: " + panel_id<|fim▁end|>
<|file_name|>sass.js<|end_file_name|><|fim▁begin|>define("ace/snippets/sass",["require","exports","module"], function(require, exports, module) { "use strict"; <|fim▁hole|> }); (function() { window.require(["ace/snippets/sass"], function(m) { if (typeof module == "object" && typeof exports == "object" && module) { module.exports = m; } }); })();<|fim▁end|>
exports.snippetText = ""; exports.scope = "sass";
<|file_name|>struct_create_packets_1_1__2.js<|end_file_name|><|fim▁begin|>var struct_create_packets_1_1__2 = [<|fim▁hole|> [ "angle", "struct_create_packets_1_1__2.html#a425d33bd27790066ff7edb4a608a8149", null ], [ "buttons", "struct_create_packets_1_1__2.html#a6b7d2d6c0a3a063f873420c010063b33", null ], [ "distance", "struct_create_packets_1_1__2.html#afb30de28ec41190d0cb278640d4782ab", null ], [ "ir", "struct_create_packets_1_1__2.html#ac834057741105e898b3d4613b96c6eb1", null ] ];<|fim▁end|>
<|file_name|>test_dellscapi.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015 Dell Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from cinder import context from cinder import exception from cinder.openstack.common import log as logging from cinder import test from cinder.volume.drivers.dell import dell_storagecenter_api import mock from requests import models import uuid LOG = logging.getLogger(__name__) # We patch these here as they are used by every test to keep # from trying to contact a Dell Storage Center. @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '__init__', return_value=None) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, 'open_connection') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, 'close_connection') class DellSCSanAPITestCase(test.TestCase): '''DellSCSanAPITestCase Class to test the Storage Center API using Mock. ''' SC = {u'IPv6ManagementIPPrefix': 128, u'connectionError': u'', u'instanceId': u'64702', u'scSerialNumber': 64702, u'dataProgressionRunning': False, u'hostOrIpAddress': u'192.168.0.80', u'userConnected': True, u'portsBalanced': True, u'managementIp': u'192.168.0.80', u'version': u'6.5.1.269', u'location': u'', u'objectType': u'StorageCenter', u'instanceName': u'Storage Center 64702', u'statusMessage': u'', u'status': u'Up', u'flashOptimizedConfigured': False, u'connected': True, u'operationMode': u'Normal', u'userName': u'Admin', u'nonFlashOptimizedConfigured': True, u'name': u'Storage Center 64702', u'scName': u'Storage Center 64702', u'notes': u'', u'serialNumber': 64702, u'raidRebalanceRunning': False, u'userPasswordExpired': False, u'contact': u'', u'IPv6ManagementIP': u'::'} VOLUME = {u'instanceId': u'64702.3494', u'scSerialNumber': 64702, u'replicationSource': False, u'liveVolume': False, u'vpdId': 3496, u'objectType': u'ScVolume', u'index': 3494, u'volumeFolderPath': u'devstackvol/fcvm/', u'hostCacheEnabled': False, u'usedByLegacyFluidFsNasVolume': False, u'inRecycleBin': False, u'volumeFolderIndex': 17, u'instanceName': u'volume-37883deb-85cd-426a-9a98-62eaad8671ea', u'statusMessage': u'', u'status': u'Up', u'storageType': {u'instanceId': u'64702.1', u'instanceName': u'Assigned - Redundant - 2 MB', u'objectType': u'ScStorageType'}, u'cmmDestination': False, u'replicationDestination': False, u'volumeFolder': {u'instanceId': u'64702.17', u'instanceName': u'fcvm', u'objectType': u'ScVolumeFolder'}, u'deviceId': u'6000d31000fcbe000000000000000da8', u'active': True, u'portableVolumeDestination': False, u'deleteAllowed': True, u'name': u'volume-37883deb-85cd-426a-9a98-62eaad8671ea', u'scName': u'Storage Center 64702', u'secureDataUsed': False, u'serialNumber': u'0000fcbe-00000da8', u'replayAllowed': True, u'flashOptimized': False, u'configuredSize': u'1.073741824E9 Bytes', u'mapped': False, u'cmmSource': False} INACTIVE_VOLUME = \ {u'instanceId': u'64702.3494', u'scSerialNumber': 64702, u'replicationSource': False, u'liveVolume': False, u'vpdId': 3496, u'objectType': u'ScVolume', u'index': 3494, u'volumeFolderPath': u'devstackvol/fcvm/', u'hostCacheEnabled': False, u'usedByLegacyFluidFsNasVolume': False, u'inRecycleBin': False, u'volumeFolderIndex': 17, u'instanceName': u'volume-37883deb-85cd-426a-9a98-62eaad8671ea', u'statusMessage': u'', u'status': u'Up', u'storageType': {u'instanceId': u'64702.1', u'instanceName': u'Assigned - Redundant - 2 MB', u'objectType': u'ScStorageType'}, u'cmmDestination': False, u'replicationDestination': False, u'volumeFolder': {u'instanceId': u'64702.17', u'instanceName': u'fcvm', u'objectType': u'ScVolumeFolder'}, u'deviceId': u'6000d31000fcbe000000000000000da8', u'active': False, u'portableVolumeDestination': False, u'deleteAllowed': True, u'name': u'volume-37883deb-85cd-426a-9a98-62eaad8671ea', u'scName': u'Storage Center 64702', u'secureDataUsed': False, u'serialNumber': u'0000fcbe-00000da8', u'replayAllowed': True, u'flashOptimized': False, u'configuredSize': u'1.073741824E9 Bytes', u'mapped': False, u'cmmSource': False} SCSERVER = {u'scName': u'Storage Center 64702', u'volumeCount': 0, u'removeHbasAllowed': True, u'legacyFluidFs': False, u'serverFolderIndex': 4, u'alertOnConnectivity': True, u'objectType': u'ScPhysicalServer', u'instanceName': u'Server_21000024ff30441d', u'instanceId': u'64702.47', u'serverFolderPath': u'devstacksrv/', u'portType': [u'FibreChannel'], u'type': u'Physical', u'statusMessage': u'Only 5 of 6 expected paths are up', u'status': u'Degraded', u'scSerialNumber': 64702, u'serverFolder': {u'instanceId': u'64702.4', u'instanceName': u'devstacksrv', u'objectType': u'ScServerFolder'}, u'parentIndex': 0, u'connectivity': u'Partial', u'hostCacheIndex': 0, u'deleteAllowed': True, u'pathCount': 5, u'name': u'Server_21000024ff30441d', u'hbaPresent': True, u'hbaCount': 2, u'notes': u'Created by Dell Cinder Driver', u'mapped': False, u'operatingSystem': {u'instanceId': u'64702.38', u'instanceName': u'Red Hat Linux 6.x', u'objectType': u'ScServerOperatingSystem'} } # ScServer where deletedAllowed=False (not allowed to be deleted) SCSERVER_NO_DEL = {u'scName': u'Storage Center 64702', u'volumeCount': 0, u'removeHbasAllowed': True, u'legacyFluidFs': False, u'serverFolderIndex': 4, u'alertOnConnectivity': True, u'objectType': u'ScPhysicalServer', u'instanceName': u'Server_21000024ff30441d', u'instanceId': u'64702.47', u'serverFolderPath': u'devstacksrv/', u'portType': [u'FibreChannel'], u'type': u'Physical', u'statusMessage': u'Only 5 of 6 expected paths are up', u'status': u'Degraded', u'scSerialNumber': 64702, u'serverFolder': {u'instanceId': u'64702.4', u'instanceName': u'devstacksrv', u'objectType': u'ScServerFolder'}, u'parentIndex': 0, u'connectivity': u'Partial', u'hostCacheIndex': 0, u'deleteAllowed': False, u'pathCount': 5, u'name': u'Server_21000024ff30441d', u'hbaPresent': True, u'hbaCount': 2, u'notes': u'Created by Dell Cinder Driver', u'mapped': False, u'operatingSystem': {u'instanceId': u'64702.38', u'instanceName': u'Red Hat Linux 6.x', u'objectType': u'ScServerOperatingSystem'} } SCSERVERS = [{u'scName': u'Storage Center 64702', u'volumeCount': 5, u'removeHbasAllowed': True, u'legacyFluidFs': False, u'serverFolderIndex': 0, u'alertOnConnectivity': True, u'objectType': u'ScPhysicalServer', u'instanceName': u'openstack4', u'instanceId': u'64702.1', u'serverFolderPath': u'', u'portType': [u'Iscsi'], u'type': u'Physical', u'statusMessage': u'', u'status': u'Up', u'scSerialNumber': 64702, u'serverFolder': {u'instanceId': u'64702.0', u'instanceName': u'Servers', u'objectType': u'ScServerFolder'}, u'parentIndex': 0, u'connectivity': u'Up', u'hostCacheIndex': 0, u'deleteAllowed': True, u'pathCount': 0, u'name': u'openstack4', u'hbaPresent': True, u'hbaCount': 1, u'notes': u'', u'mapped': True, u'operatingSystem': {u'instanceId': u'64702.3', u'instanceName': u'Other Multipath', u'objectType': u'ScServerOperatingSystem'}}, {u'scName': u'Storage Center 64702', u'volumeCount': 1, u'removeHbasAllowed': True, u'legacyFluidFs': False, u'serverFolderIndex': 0, u'alertOnConnectivity': True, u'objectType': u'ScPhysicalServer', u'instanceName': u'openstack5', u'instanceId': u'64702.2', u'serverFolderPath': u'', u'portType': [u'Iscsi'], u'type': u'Physical', u'statusMessage': u'', u'status': u'Up', u'scSerialNumber': 64702, u'serverFolder': {u'instanceId': u'64702.0', u'instanceName': u'Servers', u'objectType': u'ScServerFolder'}, u'parentIndex': 0, u'connectivity': u'Up', u'hostCacheIndex': 0, u'deleteAllowed': True, u'pathCount': 0, u'name': u'openstack5', u'hbaPresent': True, u'hbaCount': 1, u'notes': u'', u'mapped': True, u'operatingSystem': {u'instanceId': u'64702.2', u'instanceName': u'Other Singlepath', u'objectType': u'ScServerOperatingSystem'}}] # ScServers list where status = Down SCSERVERS_DOWN = \ [{u'scName': u'Storage Center 64702', u'volumeCount': 5, u'removeHbasAllowed': True, u'legacyFluidFs': False, u'serverFolderIndex': 0, u'alertOnConnectivity': True, u'objectType': u'ScPhysicalServer', u'instanceName': u'openstack4', u'instanceId': u'64702.1', u'serverFolderPath': u'', u'portType': [u'Iscsi'], u'type': u'Physical', u'statusMessage': u'', u'status': u'Down', u'scSerialNumber': 64702, u'serverFolder': {u'instanceId': u'64702.0', u'instanceName': u'Servers', u'objectType': u'ScServerFolder'}, u'parentIndex': 0, u'connectivity': u'Up', u'hostCacheIndex': 0, u'deleteAllowed': True, u'pathCount': 0, u'name': u'openstack4', u'hbaPresent': True, u'hbaCount': 1, u'notes': u'', u'mapped': True, u'operatingSystem': {u'instanceId': u'64702.3', u'instanceName': u'Other Multipath', u'objectType': u'ScServerOperatingSystem'}}] MAP_PROFILES = [{u'instanceId': u'64702.2941', u'scName': u'Storage Center 64702', u'scSerialNumber': 64702, u'controller': {u'instanceId': u'64702.64703', u'instanceName': u'SN 64703', u'objectType': u'ScController'}, u'lunUsed': [1], u'server': {u'instanceId': u'64702.47', u'instanceName': u'Server_21000024ff30441d', u'objectType': u'ScPhysicalServer'}, u'volume': {u'instanceId': u'64702.6025', u'instanceName': u'Server_21000024ff30441d Test Vol', u'objectType': u'ScVolume'}, u'connectivity': u'Up', u'readOnly': False, u'objectType': u'ScMappingProfile', u'hostCache': False, u'mappedVia': u'Server', u'mapCount': 3, u'instanceName': u'6025-47', u'lunRequested': u'N/A'}] MAP_PROFILE = {u'instanceId': u'64702.2941', u'scName': u'Storage Center 64702', u'scSerialNumber': 64702, u'controller': {u'instanceId': u'64702.64703', u'instanceName': u'SN 64703', u'objectType': u'ScController'}, u'lunUsed': [1], u'server': {u'instanceId': u'64702.47', u'instanceName': u'Server_21000024ff30441d', u'objectType': u'ScPhysicalServer'}, u'volume': {u'instanceId': u'64702.6025', u'instanceName': u'Server_21000024ff30441d Test Vol', u'objectType': u'ScVolume'}, u'connectivity': u'Up', u'readOnly': False, u'objectType': u'ScMappingProfile', u'hostCache': False, u'mappedVia': u'Server', u'mapCount': 3, u'instanceName': u'6025-47', u'lunRequested': u'N/A'} MAPPINGS = [{u'profile': {u'instanceId': u'64702.104', u'instanceName': u'92-30', u'objectType': u'ScMappingProfile'}, u'status': u'Down', u'statusMessage': u'', u'instanceId': u'64702.969.64702', u'scName': u'Storage Center 64702', u'scSerialNumber': 64702, u'controller': {u'instanceId': u'64702.64702', u'instanceName': u'SN 64702', u'objectType': u'ScController'}, u'server': {u'instanceId': u'64702.30', u'instanceName': u'Server_iqn.1993-08.org.debian:01:3776df826e4f', u'objectType': u'ScPhysicalServer'}, u'volume': {u'instanceId': u'64702.92', u'instanceName': u'volume-74a21934-60ad-4cf2-b89b-1f0dda309ddf', u'objectType': u'ScVolume'}, u'readOnly': False, u'lun': 1, u'lunUsed': [1], u'serverHba': {u'instanceId': u'64702.3454975614', u'instanceName': u'iqn.1993-08.org.debian:01:3776df826e4f', u'objectType': u'ScServerHba'}, u'path': {u'instanceId': u'64702.64702.64702.31.8', u'instanceName': u'iqn.1993-08.org.debian:' '01:3776df826e4f-5000D31000FCBE43', u'objectType': u'ScServerHbaPath'}, u'controllerPort': {u'instanceId': u'64702.5764839588723736131.91', u'instanceName': u'5000D31000FCBE43', u'objectType': u'ScControllerPort'}, u'instanceName': u'64702-969', u'transport': u'Iscsi', u'objectType': u'ScMapping'}] # Multiple mappings to test find_iscsi_properties with multiple portals MAPPINGS_MULTI_PORTAL = \ [{u'profile': {u'instanceId': u'64702.104', u'instanceName': u'92-30', u'objectType': u'ScMappingProfile'}, u'status': u'Down', u'statusMessage': u'', u'instanceId': u'64702.969.64702', u'scName': u'Storage Center 64702', u'scSerialNumber': 64702, u'controller': {u'instanceId': u'64702.64702', u'instanceName': u'SN 64702', u'objectType': u'ScController'}, u'server': {u'instanceId': u'64702.30', u'instanceName': u'Server_iqn.1993-08.org.debian:01:3776df826e4f', u'objectType': u'ScPhysicalServer'}, u'volume': {u'instanceId': u'64702.92', u'instanceName': u'volume-74a21934-60ad-4cf2-b89b-1f0dda309ddf', u'objectType': u'ScVolume'}, u'readOnly': False, u'lun': 1, u'lunUsed': [1], u'serverHba': {u'instanceId': u'64702.3454975614', u'instanceName': u'iqn.1993-08.org.debian:01:3776df826e4f', u'objectType': u'ScServerHba'}, u'path': {u'instanceId': u'64702.64702.64702.31.8', u'instanceName': u'iqn.1993-08.org.debian:' '01:3776df826e4f-5000D31000FCBE43', u'objectType': u'ScServerHbaPath'}, u'controllerPort': {u'instanceId': u'64702.5764839588723736131.91', u'instanceName': u'5000D31000FCBE43', u'objectType': u'ScControllerPort'}, u'instanceName': u'64702-969', u'transport': u'Iscsi', u'objectType': u'ScMapping'}, {u'profile': {u'instanceId': u'64702.104', u'instanceName': u'92-30', u'objectType': u'ScMappingProfile'}, u'status': u'Down', u'statusMessage': u'', u'instanceId': u'64702.969.64702', u'scName': u'Storage Center 64702', u'scSerialNumber': 64702, u'controller': {u'instanceId': u'64702.64702', u'instanceName': u'SN 64702', u'objectType': u'ScController'}, u'server': {u'instanceId': u'64702.30', u'instanceName': u'Server_iqn.1993-08.org.debian:01:3776df826e4f', u'objectType': u'ScPhysicalServer'}, u'volume': {u'instanceId': u'64702.92', u'instanceName': u'volume-74a21934-60ad-4cf2-b89b-1f0dda309ddf', u'objectType': u'ScVolume'}, u'readOnly': False, u'lun': 1, u'lunUsed': [1], u'serverHba': {u'instanceId': u'64702.3454975614', u'instanceName': u'iqn.1993-08.org.debian:01:3776df826e4f', u'objectType': u'ScServerHba'}, u'path': {u'instanceId': u'64702.64702.64702.31.8', u'instanceName': u'iqn.1993-08.org.debian:' '01:3776df826e4f-5000D31000FCBE43', u'objectType': u'ScServerHbaPath'}, u'controllerPort': {u'instanceId': u'64702.5764839588723736131.91', u'instanceName': u'5000D31000FCBE43', u'objectType': u'ScControllerPort'}, u'instanceName': u'64702-969', u'transport': u'Iscsi', u'objectType': u'ScMapping'}] MAPPINGS_READ_ONLY = \ [{u'profile': {u'instanceId': u'64702.104', u'instanceName': u'92-30', u'objectType': u'ScMappingProfile'}, u'status': u'Down', u'statusMessage': u'', u'instanceId': u'64702.969.64702', u'scName': u'Storage Center 64702', u'scSerialNumber': 64702, u'controller': {u'instanceId': u'64702.64702', u'instanceName': u'SN 64702', u'objectType': u'ScController'}, u'server': {u'instanceId': u'64702.30', u'instanceName': u'Server_iqn.1993-08.org.debian:01:3776df826e4f', u'objectType': u'ScPhysicalServer'}, u'volume': {u'instanceId': u'64702.92', u'instanceName': u'volume-74a21934-60ad-4cf2-b89b-1f0dda309ddf', u'objectType': u'ScVolume'}, u'readOnly': True, u'lun': 1, u'lunUsed': [1], u'serverHba': {u'instanceId': u'64702.3454975614', u'instanceName': u'iqn.1993-08.org.debian:01:3776df826e4f', u'objectType': u'ScServerHba'}, u'path': {u'instanceId': u'64702.64702.64702.31.8', u'instanceName': u'iqn.1993-08.org.debian:' '01:3776df826e4f-5000D31000FCBE43', u'objectType': u'ScServerHbaPath'}, u'controllerPort': {u'instanceId': u'64702.5764839588723736131.91', u'instanceName': u'5000D31000FCBE43', u'objectType': u'ScControllerPort'}, u'instanceName': u'64702-969', u'transport': u'Iscsi', u'objectType': u'ScMapping'}] FC_MAPPINGS = [{u'profile': {u'instanceId': u'64702.2941', u'instanceName': u'6025-47', u'objectType': u'ScMappingProfile'}, u'status': u'Up', u'statusMessage': u'', u'instanceId': u'64702.7639.64702', u'scName': u'Storage Center 64702', u'scSerialNumber': 64702, u'controller': {u'instanceId': u'64702.64703', u'instanceName': u'SN 64703', u'objectType': u'ScController'}, u'server': {u'instanceId': u'64702.47', u'instanceName': u'Server_21000024ff30441d', u'objectType': u'ScPhysicalServer'}, u'volume': {u'instanceId': u'64702.6025', u'instanceName': u'Server_21000024ff30441d Test Vol', u'objectType': u'ScVolume'}, u'readOnly': False, u'lun': 1, u'serverHba': {u'instanceId': u'64702.3282218607', u'instanceName': u'21000024FF30441C', u'objectType': u'ScServerHba'}, u'path': {u'instanceId': u'64702.64702.64703.27.73', u'instanceName': u'21000024FF30441C-5000D31000FCBE36', u'objectType': u'ScServerHbaPath'}, u'controllerPort': {u'instanceId': u'64702.5764839588723736118.50', u'instanceName': u'5000D31000FCBE36', u'objectType': u'ScControllerPort'}, u'instanceName': u'64702-7639', u'transport': u'FibreChannel', u'objectType': u'ScMapping'}, {u'profile': {u'instanceId': u'64702.2941', u'instanceName': u'6025-47', u'objectType': u'ScMappingProfile'}, u'status': u'Up', u'statusMessage': u'', u'instanceId': u'64702.7640.64702', u'scName': u'Storage Center 64702', u'scSerialNumber': 64702, u'controller': {u'instanceId': u'64702.64703', u'instanceName': u'SN 64703', u'objectType': u'ScController'}, u'server': {u'instanceId': u'64702.47', u'instanceName': u'Server_21000024ff30441d', u'objectType': u'ScPhysicalServer'}, u'volume': {u'instanceId': u'64702.6025', u'instanceName': u'Server_21000024ff30441d Test Vol', u'objectType': u'ScVolume'}, u'readOnly': False, u'lun': 1, u'serverHba': {u'instanceId': u'64702.3282218606', u'instanceName': u'21000024FF30441D', u'objectType': u'ScServerHba'}, u'path': {u'instanceId': u'64702.64702.64703.27.78', u'instanceName': u'21000024FF30441D-5000D31000FCBE36', u'objectType': u'ScServerHbaPath'}, u'controllerPort': {u'instanceId': u'64702.5764839588723736118.50', u'instanceName': u'5000D31000FCBE36', u'objectType': u'ScControllerPort'}, u'instanceName': u'64702-7640', u'transport': u'FibreChannel', u'objectType': u'ScMapping'}, {u'profile': {u'instanceId': u'64702.2941', u'instanceName': u'6025-47', u'objectType': u'ScMappingProfile'}, u'status': u'Up', u'statusMessage': u'', u'instanceId': u'64702.7638.64702', u'scName': u'Storage Center 64702', u'scSerialNumber': 64702, u'controller': {u'instanceId': u'64702.64703', u'instanceName': u'SN 64703', u'objectType': u'ScController'}, u'server': {u'instanceId': u'64702.47', u'instanceName': u'Server_21000024ff30441d', u'objectType': u'ScPhysicalServer'}, u'volume': {u'instanceId': u'64702.6025', u'instanceName': u'Server_21000024ff30441d Test Vol', u'objectType': u'ScVolume'}, u'readOnly': False, u'lun': 1, u'serverHba': {u'instanceId': u'64702.3282218606', u'instanceName': u'21000024FF30441D', u'objectType': u'ScServerHba'}, u'path': {u'instanceId': u'64702.64702.64703.28.76', u'instanceName': u'21000024FF30441D-5000D31000FCBE3E', u'objectType': u'ScServerHbaPath'}, u'controllerPort': {u'instanceId': u'64702.5764839588723736126.60', u'instanceName': u'5000D31000FCBE3E', u'objectType': u'ScControllerPort'}, u'instanceName': u'64702-7638', u'transport': u'FibreChannel', u'objectType': u'ScMapping'}] RPLAY = {u'scSerialNumber': 64702, u'globalIndex': u'64702-46-250', u'description': u'Cinder Clone Replay', u'parent': {u'instanceId': u'64702.46.249', u'instanceName': u'64702-46-249', u'objectType': u'ScReplay'}, u'instanceId': u'64702.46.250', u'scName': u'Storage Center 64702', u'consistent': False, u'expires': True, u'freezeTime': u'12/09/2014 03:52:08 PM', u'createVolume': {u'instanceId': u'64702.46', u'instanceName': u'volume-ff9589d3-2d41-48d5-9ef5-2713a875e85b', u'objectType': u'ScVolume'}, u'expireTime': u'12/09/2014 04:52:08 PM', u'source': u'Manual', u'spaceRecovery': False, u'writesHeldDuration': 7910, u'active': False, u'markedForExpiration': False, u'objectType': u'ScReplay', u'instanceName': u'12/09/2014 03:52:08 PM', u'size': u'0.0 Bytes' } RPLAYS = [{u'scSerialNumber': 64702, u'globalIndex': u'64702-6025-5', u'description': u'Manually Created', u'parent': {u'instanceId': u'64702.6025.4', u'instanceName': u'64702-6025-4', u'objectType': u'ScReplay'}, u'instanceId': u'64702.6025.5', u'scName': u'Storage Center 64702', u'consistent': False, u'expires': True, u'freezeTime': u'02/02/2015 08:23:55 PM', u'createVolume': {u'instanceId': u'64702.6025', u'instanceName': u'Server_21000024ff30441d Test Vol', u'objectType': u'ScVolume'}, u'expireTime': u'02/02/2015 09:23:55 PM', u'source': u'Manual', u'spaceRecovery': False, u'writesHeldDuration': 7889, u'active': False, u'markedForExpiration': False, u'objectType': u'ScReplay', u'instanceName': u'02/02/2015 08:23:55 PM', u'size': u'0.0 Bytes'}, {u'scSerialNumber': 64702, u'globalIndex': u'64702-6025-4', u'description': u'Cinder Test Replay012345678910', u'parent': {u'instanceId': u'64702.6025.3', u'instanceName': u'64702-6025-3', u'objectType': u'ScReplay'}, u'instanceId': u'64702.6025.4', u'scName': u'Storage Center 64702', u'consistent': False, u'expires': True, u'freezeTime': u'02/02/2015 08:23:47 PM', u'createVolume': {u'instanceId': u'64702.6025', u'instanceName': u'Server_21000024ff30441d Test Vol', u'objectType': u'ScVolume'}, u'expireTime': u'02/02/2015 09:23:47 PM', u'source': u'Manual', u'spaceRecovery': False, u'writesHeldDuration': 7869, u'active': False, u'markedForExpiration': False, u'objectType': u'ScReplay', u'instanceName': u'02/02/2015 08:23:47 PM', u'size': u'0.0 Bytes'}] TST_RPLAY = {u'scSerialNumber': 64702, u'globalIndex': u'64702-6025-4', u'description': u'Cinder Test Replay012345678910', u'parent': {u'instanceId': u'64702.6025.3', u'instanceName': u'64702-6025-3', u'objectType': u'ScReplay'}, u'instanceId': u'64702.6025.4', u'scName': u'Storage Center 64702', u'consistent': False, u'expires': True, u'freezeTime': u'02/02/2015 08:23:47 PM', u'createVolume': {u'instanceId': u'64702.6025', u'instanceName': u'Server_21000024ff30441d Test Vol', u'objectType': u'ScVolume'}, u'expireTime': u'02/02/2015 09:23:47 PM', u'source': u'Manual', u'spaceRecovery': False, u'writesHeldDuration': 7869, u'active': False, u'markedForExpiration': False, u'objectType': u'ScReplay', u'instanceName': u'02/02/2015 08:23:47 PM', u'size': u'0.0 Bytes'} FLDR = {u'status': u'Up', u'instanceName': u'opnstktst', u'name': u'opnstktst', u'parent': {u'instanceId': u'64702.0', u'instanceName': u'Volumes', u'objectType': u'ScVolumeFolder'}, u'instanceId': u'64702.43', u'scName': u'Storage Center 64702', u'notes': u'Folder for OpenStack Cinder Driver', u'scSerialNumber': 64702, u'parentIndex': 0, u'okToDelete': True, u'folderPath': u'', u'root': False, u'statusMessage': u'', u'objectType': u'ScVolumeFolder'} SVR_FLDR = {u'status': u'Up', u'instanceName': u'devstacksrv', u'name': u'devstacksrv', u'parent': {u'instanceId': u'64702.0', u'instanceName': u'Servers', u'objectType': u'ScServerFolder'}, u'instanceId': u'64702.4', u'scName': u'Storage Center 64702', u'notes': u'Folder for OpenStack Cinder Driver', u'scSerialNumber': 64702, u'parentIndex': 0, u'okToDelete': False, u'folderPath': u'', u'root': False, u'statusMessage': u'', u'objectType': u'ScServerFolder'} ISCSI_HBA = {u'portWwnList': [], u'iscsiIpAddress': u'0.0.0.0', u'pathCount': 1, u'name': u'iqn.1993-08.org.debian:01:52332b70525', u'connectivity': u'Down', u'instanceId': u'64702.3786433166', u'scName': u'Storage Center 64702', u'notes': u'', u'scSerialNumber': 64702, u'server': {u'instanceId': u'64702.38', u'instanceName': u'Server_iqn.1993-08.org.debian:01:52332b70525', u'objectType': u'ScPhysicalServer'}, u'remoteStorageCenter': False, u'iscsiName': u'', u'portType': u'Iscsi', u'instanceName': u'iqn.1993-08.org.debian:01:52332b70525', u'objectType': u'ScServerHba'} FC_HBAS = [{u'portWwnList': [], u'iscsiIpAddress': u'0.0.0.0', u'pathCount': 2, u'name': u'21000024FF30441C', u'connectivity': u'Up', u'instanceId': u'64702.3282218607', u'scName': u'Storage Center 64702', u'notes': u'', u'scSerialNumber': 64702, u'server': {u'instanceId': u'64702.47', u'instanceName': u'Server_21000024ff30441d', u'objectType': u'ScPhysicalServer'}, u'remoteStorageCenter': False, u'iscsiName': u'', u'portType': u'FibreChannel', u'instanceName': u'21000024FF30441C', u'objectType': u'ScServerHba'}, {u'portWwnList': [], u'iscsiIpAddress': u'0.0.0.0', u'pathCount': 3, u'name': u'21000024FF30441D', u'connectivity': u'Partial', u'instanceId': u'64702.3282218606', u'scName': u'Storage Center 64702', u'notes': u'', u'scSerialNumber': 64702, u'server': {u'instanceId': u'64702.47', u'instanceName': u'Server_21000024ff30441d', u'objectType': u'ScPhysicalServer'}, u'remoteStorageCenter': False, u'iscsiName': u'', u'portType': u'FibreChannel', u'instanceName': u'21000024FF30441D', u'objectType': u'ScServerHba'}] FC_HBA = {u'portWwnList': [], u'iscsiIpAddress': u'0.0.0.0', u'pathCount': 3, u'name': u'21000024FF30441D', u'connectivity': u'Partial', u'instanceId': u'64702.3282218606', u'scName': u'Storage Center 64702', u'notes': u'', u'scSerialNumber': 64702, u'server': {u'instanceId': u'64702.47', u'instanceName': u'Server_21000024ff30441d', u'objectType': u'ScPhysicalServer'}, u'remoteStorageCenter': False, u'iscsiName': u'', u'portType': u'FibreChannel', u'instanceName': u'21000024FF30441D', u'objectType': u'ScServerHba'} SVR_OS_S = [{u'allowsLunGaps': True, u'product': u'Red Hat Linux', u'supportsActiveMappingDeletion': True, u'version': u'6.x', u'requiresLunZero': False, u'scName': u'Storage Center 64702', u'virtualMachineGuest': True, u'virtualMachineHost': False, u'allowsCrossTransportMapping': False, u'objectType': u'ScServerOperatingSystem', u'instanceId': u'64702.38', u'lunCanVaryAcrossPaths': False, u'scSerialNumber': 64702, u'maximumVolumeSize': u'0.0 Bytes', u'multipath': True, u'instanceName': u'Red Hat Linux 6.x', u'supportsActiveMappingCreation': True, u'name': u'Red Hat Linux 6.x'}] ISCSI_FLT_DOMAINS = [{u'headerDigestEnabled': False, u'classOfServicePriority': 0, u'wellKnownIpAddress': u'192.168.0.21', u'scSerialNumber': 64702, u'iscsiName': u'iqn.2002-03.com.compellent:5000d31000fcbe42', u'portNumber': 3260, u'subnetMask': u'255.255.255.0', u'gateway': u'192.168.0.1', u'objectType': u'ScIscsiFaultDomain', u'chapEnabled': False, u'instanceId': u'64702.6.5.3', u'childStatus': u'Up', u'defaultTimeToRetain': u'SECONDS_20', u'dataDigestEnabled': False, u'instanceName': u'iSCSI 10G 2', u'statusMessage': u'', u'status': u'Up', u'transportType': u'Iscsi', u'vlanId': 0, u'windowSize': u'131072.0 Bytes', u'defaultTimeToWait': u'SECONDS_2', u'scsiCommandTimeout': u'MINUTES_1', u'deleteAllowed': False, u'name': u'iSCSI 10G 2', u'immediateDataWriteEnabled': False, u'scName': u'Storage Center 64702', u'notes': u'', u'mtu': u'MTU_1500', u'bidirectionalChapSecret': u'', u'keepAliveTimeout': u'SECONDS_30'}] # For testing find_iscsi_properties where multiple portals are found ISCSI_FLT_DOMAINS_MULTI_PORTALS = \ [{u'headerDigestEnabled': False, u'classOfServicePriority': 0, u'wellKnownIpAddress': u'192.168.0.21', u'scSerialNumber': 64702, u'iscsiName': u'iqn.2002-03.com.compellent:5000d31000fcbe42', u'portNumber': 3260, u'subnetMask': u'255.255.255.0', u'gateway': u'192.168.0.1', u'objectType': u'ScIscsiFaultDomain', u'chapEnabled': False, u'instanceId': u'64702.6.5.3', u'childStatus': u'Up', u'defaultTimeToRetain': u'SECONDS_20', u'dataDigestEnabled': False, u'instanceName': u'iSCSI 10G 2', u'statusMessage': u'', u'status': u'Up', u'transportType': u'Iscsi', u'vlanId': 0, u'windowSize': u'131072.0 Bytes', u'defaultTimeToWait': u'SECONDS_2', u'scsiCommandTimeout': u'MINUTES_1', u'deleteAllowed': False, u'name': u'iSCSI 10G 2', u'immediateDataWriteEnabled': False, u'scName': u'Storage Center 64702', u'notes': u'', u'mtu': u'MTU_1500', u'bidirectionalChapSecret': u'', u'keepAliveTimeout': u'SECONDS_30'}, {u'headerDigestEnabled': False, u'classOfServicePriority': 0, u'wellKnownIpAddress': u'192.168.0.25', u'scSerialNumber': 64702, u'iscsiName': u'iqn.2002-03.com.compellent:5000d31000fcbe42', u'portNumber': 3260, u'subnetMask': u'255.255.255.0', u'gateway': u'192.168.0.1', u'objectType': u'ScIscsiFaultDomain', u'chapEnabled': False, u'instanceId': u'64702.6.5.3', u'childStatus': u'Up', u'defaultTimeToRetain': u'SECONDS_20', u'dataDigestEnabled': False, u'instanceName': u'iSCSI 10G 2', u'statusMessage': u'', u'status': u'Up', u'transportType': u'Iscsi', u'vlanId': 0, u'windowSize': u'131072.0 Bytes', u'defaultTimeToWait': u'SECONDS_2', u'scsiCommandTimeout': u'MINUTES_1', u'deleteAllowed': False, u'name': u'iSCSI 10G 2', u'immediateDataWriteEnabled': False, u'scName': u'Storage Center 64702', u'notes': u'', u'mtu': u'MTU_1500', u'bidirectionalChapSecret': u'', u'keepAliveTimeout': u'SECONDS_30'}] ISCSI_FLT_DOMAIN = {u'headerDigestEnabled': False, u'classOfServicePriority': 0, u'wellKnownIpAddress': u'192.168.0.21', u'scSerialNumber': 64702, u'iscsiName': u'iqn.2002-03.com.compellent:5000d31000fcbe42', u'portNumber': 3260, u'subnetMask': u'255.255.255.0', u'gateway': u'192.168.0.1', u'objectType': u'ScIscsiFaultDomain', u'chapEnabled': False, u'instanceId': u'64702.6.5.3', u'childStatus': u'Up', u'defaultTimeToRetain': u'SECONDS_20', u'dataDigestEnabled': False, u'instanceName': u'iSCSI 10G 2', u'statusMessage': u'', u'status': u'Up', u'transportType': u'Iscsi', u'vlanId': 0, u'windowSize': u'131072.0 Bytes', u'defaultTimeToWait': u'SECONDS_2', u'scsiCommandTimeout': u'MINUTES_1', u'deleteAllowed': False, u'name': u'iSCSI 10G 2', u'immediateDataWriteEnabled': False, u'scName': u'Storage Center 64702', u'notes': u'', u'mtu': u'MTU_1500', u'bidirectionalChapSecret': u'', u'keepAliveTimeout': u'SECONDS_30'} CTRLR_PORT = {u'status': u'Up', u'iscsiIpAddress': u'0.0.0.0', u'WWN': u'5000D31000FCBE06', u'name': u'5000D31000FCBE06', u'iscsiGateway': u'0.0.0.0', u'instanceId': u'64702.5764839588723736070.51', u'scName': u'Storage Center 64702', u'scSerialNumber': 64702, u'transportType': u'FibreChannel', u'virtual': False, u'controller': {u'instanceId': u'64702.64702', u'instanceName': u'SN 64702', u'objectType': u'ScController'}, u'iscsiName': u'', u'purpose': u'FrontEnd', u'iscsiSubnetMask': u'0.0.0.0', u'faultDomain': {u'instanceId': u'64702.4.3', u'instanceName': u'Domain 1', u'objectType': u'ScControllerPortFaultDomain'}, u'instanceName': u'5000D31000FCBE06', u'statusMessage': u'', u'objectType': u'ScControllerPort'} ISCSI_CTRLR_PORT = {u'preferredParent': {u'instanceId': u'64702.5764839588723736074.69', u'instanceName': u'5000D31000FCBE0A', u'objectType': u'ScControllerPort'}, u'status': u'Up', u'iscsiIpAddress': u'10.23.8.235', u'WWN': u'5000D31000FCBE43', u'name': u'5000D31000FCBE43', u'parent': {u'instanceId': u'64702.5764839588723736074.69', u'instanceName': u'5000D31000FCBE0A', u'objectType': u'ScControllerPort'}, u'iscsiGateway': u'0.0.0.0', u'instanceId': u'64702.5764839588723736131.91', u'scName': u'Storage Center 64702', u'scSerialNumber': 64702, u'transportType': u'Iscsi', u'virtual': True, u'controller': {u'instanceId': u'64702.64702', u'instanceName': u'SN 64702', u'objectType': u'ScController'}, u'iscsiName': u'iqn.2002-03.com.compellent:5000d31000fcbe43', u'purpose': u'FrontEnd', u'iscsiSubnetMask': u'0.0.0.0', u'faultDomain': {u'instanceId': u'64702.6.5', u'instanceName': u'iSCSI 10G 2', u'objectType': u'ScControllerPortFaultDomain'}, u'instanceName': u'5000D31000FCBE43', u'childStatus': u'Up', u'statusMessage': u'', u'objectType': u'ScControllerPort'} FC_CTRLR_PORT = {u'preferredParent': {u'instanceId': u'64702.5764839588723736093.57', u'instanceName': u'5000D31000FCBE1D', u'objectType': u'ScControllerPort'}, u'status': u'Up', u'iscsiIpAddress': u'0.0.0.0', u'WWN': u'5000D31000FCBE36', u'name': u'5000D31000FCBE36', u'parent': {u'instanceId': u'64702.5764839588723736093.57', u'instanceName': u'5000D31000FCBE1D', u'objectType': u'ScControllerPort'}, u'iscsiGateway': u'0.0.0.0', u'instanceId': u'64702.5764839588723736118.50', u'scName': u'Storage Center 64702', u'scSerialNumber': 64702, u'transportType': u'FibreChannel', u'virtual': True, u'controller': {u'instanceId': u'64702.64703', u'instanceName': u'SN 64703', u'objectType': u'ScController'}, u'iscsiName': u'', u'purpose': u'FrontEnd', u'iscsiSubnetMask': u'0.0.0.0', u'faultDomain': {u'instanceId': u'64702.1.0', u'instanceName': u'Domain 0', u'objectType': u'ScControllerPortFaultDomain'}, u'instanceName': u'5000D31000FCBE36', u'childStatus': u'Up', u'statusMessage': u'', u'objectType': u'ScControllerPort'} STRG_USAGE = {u'systemSpace': u'7.38197504E8 Bytes', u'freeSpace': u'1.297659461632E13 Bytes', u'oversubscribedSpace': u'0.0 Bytes', u'instanceId': u'64702', u'scName': u'Storage Center 64702', u'savingVsRaidTen': u'1.13737990144E11 Bytes', u'allocatedSpace': u'1.66791217152E12 Bytes', u'usedSpace': u'3.25716017152E11 Bytes', u'configuredSpace': u'9.155796533248E12 Bytes', u'alertThresholdSpace': u'1.197207956992E13 Bytes', u'availableSpace': u'1.3302310633472E13 Bytes', u'badSpace': u'0.0 Bytes', u'time': u'02/02/2015 02:23:39 PM', u'scSerialNumber': 64702, u'instanceName': u'Storage Center 64702', u'storageAlertThreshold': 10, u'objectType': u'StorageCenterStorageUsage'} IQN = 'iqn.2002-03.com.compellent:5000D31000000001' WWN = u'21000024FF30441C' WWNS = [u'21000024FF30441C', u'21000024FF30441D'] FLDR_PATH = 'StorageCenter/ScVolumeFolder/' # Create a Response object that indicates OK response_ok = models.Response() response_ok.status_code = 200 response_ok.reason = u'ok' RESPONSE_200 = response_ok # Create a Response object that indicates created response_created = models.Response() response_created.status_code = 201 response_created.reason = u'created' RESPONSE_201 = response_created # Create a Response object that indicates a failure (no content) response_nc = models.Response() response_nc.status_code = 204 response_nc.reason = u'duplicate' RESPONSE_204 = response_nc def setUp(self): super(DellSCSanAPITestCase, self).setUp() # Configuration is a mock. A mock is pretty much a blank # slate. I believe mock's done in setup are not happy time # mocks. So we just do a few things like driver config here. self.configuration = mock.Mock() self.configuration.san_is_local = False self.configuration.san_ip = "192.168.0.1" self.configuration.san_login = "admin" self.configuration.san_password = "mmm" self.configuration.dell_sc_ssn = 12345 self.configuration.dell_sc_server_folder = 'opnstktst' self.configuration.dell_sc_volume_folder = 'opnstktst' self.configuration.dell_sc_api_port = 3033 self.configuration.iscsi_ip_address = '192.168.1.1' self.configuration.iscsi_port = 3260 self._context = context.get_admin_context() # Set up the StorageCenterApi self.scapi = dell_storagecenter_api.StorageCenterApi( self.configuration.san_ip, self.configuration.dell_sc_api_port, self.configuration.san_login, self.configuration.san_password) self.volid = str(uuid.uuid4()) self.volume_name = "volume" + self.volid def test_path_to_array(self, mock_close_connection, mock_open_connection, mock_init): res = self.scapi._path_to_array(u'folder1/folder2/folder3') expected = [u'folder1', u'folder2', u'folder3'] self.assertEqual(expected, res, 'Unexpected folder path') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_result', return_value=SC) @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_200) def test_find_sc(self, mock_get, mock_get_result, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.find_sc(64702) mock_get.assert_called_once_with('StorageCenter/StorageCenter') mock_get_result.assert_called() self.assertEqual(u'64702', res, 'Unexpected SSN') @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=None) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_result', return_value=None) def test_find_sc_failure(self, mock_get_result, mock_get, mock_close_connection, mock_open_connection, mock_init): self.assertRaises(exception.VolumeBackendAPIException, self.scapi.find_sc, 12345) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_first_result', return_value=FLDR) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_201) def test_create_folder(self, mock_post, mock_first_result, mock_close_connection, mock_open_connection, mock_init): res = self.scapi._create_folder( 'StorageCenter/ScVolumeFolder', 12345, '', self.configuration.dell_sc_volume_folder) mock_post.assert_called() mock_first_result.assert_called() self.assertEqual(self.FLDR, res, 'Unexpected Folder') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_first_result', return_value=FLDR) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_201) def test_create_folder_with_parent(self, mock_post, mock_first_result, mock_close_connection, mock_open_connection, mock_init): # Test case where parent folder name is specified res = self.scapi._create_folder( 'StorageCenter/ScVolumeFolder', 12345, 'parentFolder', self.configuration.dell_sc_volume_folder) mock_post.assert_called() mock_first_result.assert_called() self.assertEqual(self.FLDR, res, 'Unexpected Folder') @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_204) def test_create_folder_failure(self, mock_post, mock_close_connection, mock_open_connection, mock_init): res = self.scapi._create_folder( 'StorageCenter/ScVolumeFolder', 12345, '', self.configuration.dell_sc_volume_folder) self.assertIsNone(res, 'Test Create folder - None expected') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_folder', return_value=FLDR) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_path_to_array', return_value=['Cinder_Test_Folder']) def test_create_folder_path(self, mock_path_to_array, mock_find_folder, mock_close_connection, mock_open_connection, mock_init): res = self.scapi._create_folder_path( 'StorageCenter/ScVolumeFolder', 12345, self.configuration.dell_sc_volume_folder) mock_path_to_array.assert_called_once_with( self.configuration.dell_sc_volume_folder) mock_find_folder.assert_called() self.assertEqual(self.FLDR, res, 'Unexpected ScFolder') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_create_folder', return_value=FLDR) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_folder', return_value=None) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_path_to_array', return_value=['Cinder_Test_Folder']) def test_create_folder_path_create_fldr(self, mock_path_to_array, mock_find_folder, mock_create_folder, mock_close_connection, mock_open_connection, mock_init): # Test case where folder is not found and must be created res = self.scapi._create_folder_path( 'StorageCenter/ScVolumeFolder', 12345, self.configuration.dell_sc_volume_folder) mock_path_to_array.assert_called_once_with( self.configuration.dell_sc_volume_folder) mock_find_folder.assert_called() mock_create_folder.assert_called() self.assertEqual(self.FLDR, res, 'Unexpected ScFolder') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_create_folder', return_value=None) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_folder', return_value=None) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_path_to_array', return_value=['Cinder_Test_Folder']) def test_create_folder_path_failure(self, mock_path_to_array, mock_find_folder, mock_create_folder, mock_close_connection, mock_open_connection, mock_init): # Test case where folder is not found, must be created # and creation fails res = self.scapi._create_folder_path( 'StorageCenter/ScVolumeFolder', 12345, self.configuration.dell_sc_volume_folder) mock_path_to_array.assert_called_once_with( self.configuration.dell_sc_volume_folder) mock_find_folder.assert_called() mock_create_folder.assert_called() self.assertIsNone(res, 'Expected None') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_result', return_value=u'devstackvol/fcvm/') @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_find_folder(self, mock_post, mock_get_result, mock_close_connection, mock_open_connection, mock_init): res = self.scapi._find_folder( 'StorageCenter/ScVolumeFolder', 12345, self.configuration.dell_sc_volume_folder) mock_post.assert_called() mock_get_result.assert_called() self.assertEqual(u'devstackvol/fcvm/', res, 'Unexpected folder') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_result', return_value=u'devstackvol/fcvm/') @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_find_folder_multi_fldr(self, mock_post, mock_get_result, mock_close_connection, mock_open_connection, mock_init): # Test case for folder path with multiple folders res = self.scapi._find_folder( 'StorageCenter/ScVolumeFolder', 12345, u'testParentFolder/opnstktst') mock_post.assert_called() mock_get_result.assert_called() self.assertEqual(u'devstackvol/fcvm/', res, 'Unexpected folder') @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_204) def test_find_folder_failure(self, mock_post, mock_close_connection, mock_open_connection, mock_init): res = self.scapi._find_folder( 'StorageCenter/ScVolumeFolder', 12345, self.configuration.dell_sc_volume_folder) self.assertIsNone(res, 'Test find folder - None expected') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_create_folder_path', return_value=FLDR) def test_create_volume_folder_path(self, mock_create_vol_fldr_path, mock_close_connection, mock_open_connection, mock_init): res = self.scapi._create_volume_folder_path( 12345, self.configuration.dell_sc_volume_folder) mock_create_vol_fldr_path.assert_called_once_with( 'StorageCenter/ScVolumeFolder', 12345, self.configuration.dell_sc_volume_folder) self.assertEqual(self.FLDR, res, 'Unexpected ScFolder') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_folder', return_value=FLDR) def test_find_volume_folder(self, mock_find_folder, mock_close_connection, mock_open_connection, mock_init): res = self.scapi._find_volume_folder( 12345, self.configuration.dell_sc_volume_folder) mock_find_folder.assert_called_once_with( 'StorageCenter/ScVolumeFolder/GetList', 12345, self.configuration.dell_sc_volume_folder) self.assertEqual(self.FLDR, res, 'Unexpected Folder') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, 'unmap_volume', return_value=True) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, 'map_volume', return_value=MAPPINGS) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=SCSERVERS) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_init_volume(self, mock_post, mock_get_json, mock_map_volume, mock_unmap_volume, mock_close_connection, mock_open_connection, mock_init): self.scapi._init_volume(self.VOLUME) mock_map_volume.assert_called() mock_unmap_volume.assert_called() @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_204) def test_init_volume_failure(self, mock_post, mock_close_connection, mock_open_connection, mock_init): # Test case where ScServer list fails self.scapi._init_volume(self.VOLUME) mock_post.assert_called() @mock.patch.object(dell_storagecenter_api.StorageCenterApi, 'unmap_volume', return_value=True) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, 'map_volume', return_value=MAPPINGS) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=SCSERVERS_DOWN) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_init_volume_servers_down(self, mock_post, mock_get_json, mock_map_volume, mock_unmap_volume, mock_close_connection, mock_open_connection, mock_init): # Test case where ScServer Status = Down self.scapi._init_volume(self.VOLUME) mock_map_volume.assert_called() mock_unmap_volume.assert_called() @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=VOLUME) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_volume_folder', return_value=FLDR) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_201) def test_create_volume(self, mock_post, mock_find_volume_folder, mock_get_json, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.create_volume( self.volume_name, 1, 12345, self.configuration.dell_sc_volume_folder) mock_post.assert_called() mock_get_json.assert_called() mock_find_volume_folder.assert_called_once_with( 12345, self.configuration.dell_sc_volume_folder) self.assertEqual(self.VOLUME, res, 'Unexpected ScVolume') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=VOLUME) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_create_volume_folder_path', return_value=FLDR) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_volume_folder', return_value=None) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_201) def test_create_vol_and_folder(self, mock_post, mock_find_volume_folder, mock_create_vol_folder_path, mock_get_json, mock_close_connection, mock_open_connection, mock_init): # Test calling create_volume where volume folder has to be created res = self.scapi.create_volume( self.volume_name, 1, 12345, self.configuration.dell_sc_volume_folder) mock_post.assert_called() mock_get_json.assert_called() mock_create_vol_folder_path.assert_called_once_with( 12345, self.configuration.dell_sc_volume_folder) mock_find_volume_folder.assert_called_once_with( 12345, self.configuration.dell_sc_volume_folder) self.assertEqual(self.VOLUME, res, 'Unexpected ScVolume') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=VOLUME) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_create_volume_folder_path', return_value=None) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_volume_folder', return_value=None) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_201) def test_create_vol_folder_fail(self, mock_post, mock_find_volume_folder, mock_create_vol_folder_path, mock_get_json, mock_close_connection, mock_open_connection, mock_init): # Test calling create_volume where volume folder does not exist and # fails to be created res = self.scapi.create_volume( self.volume_name, 1, 12345, self.configuration.dell_sc_volume_folder) mock_post.assert_called() mock_get_json.assert_called() mock_create_vol_folder_path.assert_called_once_with( 12345, self.configuration.dell_sc_volume_folder) mock_find_volume_folder.assert_called_once_with( 12345, self.configuration.dell_sc_volume_folder) self.assertEqual(self.VOLUME, res, 'Unexpected ScVolume') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=None) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_volume_folder', return_value=FLDR) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_204) def test_create_volume_failure(self, mock_post, mock_find_volume_folder, mock_get_json, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.create_volume( self.volume_name, 1, 12345, self.configuration.dell_sc_volume_folder) mock_find_volume_folder.assert_called_once_with( 12345, self.configuration.dell_sc_volume_folder) self.assertIsNone(res, 'None expected') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_first_result', return_value=VOLUME) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_find_volume_by_name(self, mock_post, mock_first_result, mock_close_connection, mock_open_connection, mock_init): # Test case to find volume by name res = self.scapi.find_volume(12345, self.volume_name) mock_post.assert_called() mock_first_result.assert_called() self.assertEqual(self.VOLUME, res, 'Unexpected volume') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_first_result', return_value=VOLUME) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) # Test case to find volume by InstancedId def test_find_volume_by_instanceid(self, mock_post, mock_first_result, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.find_volume(12345, None, '64702.3494') mock_post.assert_called() mock_first_result.assert_called() self.assertEqual(self.VOLUME, res, 'Unexpected volume') def test_find_volume_no_name_or_instance(self, mock_close_connection, mock_open_connection, mock_init): # Test calling find_volume with no name or instanceid res = self.scapi.find_volume(12345) self.assertEqual(res, None, 'Expected None') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_first_result', return_value=None) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_204) def test_find_volume_not_found(self, mock_post, mock_first_result, mock_close_connection, mock_open_connection, mock_init): # Test calling find_volume with result of no volume found res = self.scapi.find_volume(12345, self.volume_name) self.assertEqual(None, res, 'None expected') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=True) @mock.patch.object(dell_storagecenter_api.HttpClient, 'delete', return_value=RESPONSE_200) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, 'find_volume', return_value=VOLUME) def test_delete_volume(self, mock_find_volume, mock_delete, mock_get_json, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.delete_volume(12345, self.volume_name) mock_delete.assert_called() mock_find_volume.assert_called_once_with(12345, self.volume_name, None) mock_get_json.assert_called() self.assertTrue(res, 'Expected True') @mock.patch.object(dell_storagecenter_api.HttpClient, 'delete', return_value=RESPONSE_204) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, 'find_volume', return_value=VOLUME) def test_delete_volume_failure(self, mock_find_volume, mock_delete, mock_close_connection, mock_open_connection, mock_init): self.assertRaises(exception.VolumeBackendAPIException, self.scapi.delete_volume, 12345, self.volume_name) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, 'find_volume', return_value=None) def test_delete_volume_no_vol_found(self, mock_find_volume, mock_close_connection, mock_open_connection, mock_init): # Test case where volume to be deleted does not exist res = self.scapi.delete_volume(12345, self.volume_name) self.assertTrue(res, 'Expected True') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_create_folder_path', return_value=SVR_FLDR) def test_create_server_folder_path(self, mock_create_svr_fldr_path, mock_close_connection, mock_open_connection, mock_init): res = self.scapi._create_server_folder_path( 12345, self.configuration.dell_sc_server_folder) mock_create_svr_fldr_path.assert_called_once_with( 'StorageCenter/ScServerFolder', 12345, self.configuration.dell_sc_server_folder) self.assertEqual(self.SVR_FLDR, res, 'Unexpected server folder') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_folder', return_value=SVR_FLDR) def test_find_server_folder(self, mock_find_folder, mock_close_connection, mock_open_connection, mock_init): res = self.scapi._find_server_folder( 12345, self.configuration.dell_sc_server_folder) mock_find_folder.assert_called_once_with( 'StorageCenter/ScServerFolder/GetList', 12345, self.configuration.dell_sc_server_folder) self.assertEqual(self.SVR_FLDR, res, 'Unexpected server folder') @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_add_hba(self, mock_post, mock_close_connection, mock_open_connection, mock_init): res = self.scapi._add_hba(self.SCSERVER, self.IQN, False) mock_post.assert_called() self.assertTrue(res, 'Expected True') @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_add_hba_fc(self, mock_post, mock_close_connection, mock_open_connection, mock_init): res = self.scapi._add_hba(self.SCSERVER, self.WWN, True) mock_post.assert_called() self.assertTrue(res, 'Expected True') @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_204) def test_add_hba_failure(self, mock_post, mock_close_connection, mock_open_connection, mock_init): res = self.scapi._add_hba(self.SCSERVER, self.IQN, False) mock_post.assert_called() self.assertFalse(res, 'Expected False') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=SVR_OS_S) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_find_serveros(self, mock_post, mock_get_json, mock_close_connection, mock_open_connection, mock_init): res = self.scapi._find_serveros(12345, 'Red Hat Linux 6.x') mock_get_json.assert_called() mock_post.assert_called() self.assertEqual('64702.38', res, 'Wrong InstanceId') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=SVR_OS_S) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_find_serveros_not_found(self, mock_post, mock_get_json, mock_close_connection, mock_open_connection, mock_init): # Test requesting a Server OS that will not be found res = self.scapi._find_serveros(12345, 'Non existent OS') mock_get_json.assert_called() mock_post.assert_called() self.assertIsNone(res, 'None expected') @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_204) def test_find_serveros_failed(self, mock_post, mock_close_connection, mock_open_connection, mock_init): res = self.scapi._find_serveros(12345, 'Red Hat Linux 6.x') self.assertEqual(None, res, 'None expected') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_add_hba', return_value=FC_HBA) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, 'create_server', return_value=SCSERVER) def test_create_server_multiple_hbas(self, mock_create_server, mock_add_hba, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.create_server_multiple_hbas( 12345, self.configuration.dell_sc_server_folder, self.WWNS) mock_create_server.assert_called() mock_add_hba.assert_called() self.assertEqual(self.SCSERVER, res, 'Unexpected ScServer') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_add_hba', return_value=True) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_first_result', return_value=SCSERVER) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_server_folder', return_value=SVR_FLDR) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_serveros', return_value='64702.38') @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_201) def test_create_server(self, mock_post, mock_find_serveros, mock_find_server_folder, mock_first_result, mock_add_hba, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.create_server( 12345, self.configuration.dell_sc_server_folder, self.IQN, False) mock_find_serveros.assert_called() mock_find_server_folder.assert_called() mock_first_result.assert_called() mock_add_hba.assert_called() self.assertEqual(self.SCSERVER, res, 'Unexpected ScServer') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_add_hba', return_value=True) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_first_result', return_value=SCSERVER) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_server_folder', return_value=SVR_FLDR) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_serveros', return_value=None) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_201) def test_create_server_os_not_found(self, mock_post, mock_find_serveros, mock_find_server_folder, mock_first_result, mock_add_hba, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.create_server( 12345, self.configuration.dell_sc_server_folder, self.IQN, False) mock_find_serveros.assert_called() self.assertEqual(self.SCSERVER, res, 'Unexpected ScServer') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_add_hba', return_value=True) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_first_result', return_value=SCSERVER) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_create_server_folder_path', return_value=None) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_server_folder', return_value=None) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_serveros', return_value='64702.38') @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_201) def test_create_server_fldr_not_found(self, mock_post, mock_find_serveros, mock_find_server_folder, mock_create_svr_fldr_path, mock_first_result, mock_add_hba, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.create_server( 12345, self.configuration.dell_sc_server_folder, self.IQN, False) mock_find_server_folder.assert_called() mock_create_svr_fldr_path.assert_called() self.assertEqual(self.SCSERVER, res, 'Unexpected ScServer') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_add_hba', return_value=True) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_first_result', return_value=SCSERVER) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_create_server_folder_path', return_value=None) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_server_folder', return_value=None) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_serveros', return_value='64702.38') @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_204) def test_create_server_failure(self, mock_post, mock_find_serveros, mock_find_server_folder, mock_create_svr_fldr_path, mock_first_result, mock_add_hba, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.create_server( 12345, self.configuration.dell_sc_server_folder, self.IQN, False) self.assertIsNone(res, 'None expected') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_add_hba', return_value=True) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_first_result', return_value=None) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_create_server_folder_path', return_value=None) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_server_folder', return_value=None) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_serveros', return_value='64702.38') @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_201) def test_create_server_not_found(self, mock_post, mock_find_serveros, mock_find_server_folder, mock_create_svr_fldr_path, mock_first_result, mock_add_hba, mock_close_connection, mock_open_connection, mock_init): # Test create server where _first_result is None res = self.scapi.create_server( 12345, self.configuration.dell_sc_server_folder, self.IQN, False) self.assertIsNone(res, 'None expected') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_delete_server', return_value=None) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_add_hba', return_value=False) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_first_result', return_value=SCSERVER) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_server_folder', return_value=SVR_FLDR) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_serveros', return_value='64702.38') @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_201) def test_create_server_addhba_fail(self, mock_post, mock_find_serveros, mock_find_server_folder, mock_first_result, mock_add_hba, mock_delete_server, mock_close_connection, mock_open_connection, mock_init): # Tests create server where add hba fails res = self.scapi.create_server( 12345, self.configuration.dell_sc_server_folder, self.IQN, False) mock_delete_server.assert_called() self.assertIsNone(res, 'None expected') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_first_result', return_value=SCSERVER) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_serverhba', return_value=ISCSI_HBA) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_find_server(self, mock_post, mock_find_serverhba, mock_first_result, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.find_server(12345, self.IQN) mock_find_serverhba.assert_called() mock_first_result.assert_called() self.assertIsNotNone(res, 'Expected ScServer') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_serverhba', return_value=None) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_find_server_no_hba(self, mock_post, mock_find_serverhba, mock_close_connection, mock_open_connection, mock_init): # Test case where a ScServer HBA does not exist with the specified IQN # or WWN res = self.scapi.find_server(12345, self.IQN) mock_find_serverhba.assert_called() self.assertIsNone(res, 'Expected None') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_serverhba', return_value=ISCSI_HBA) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_204) def test_find_server_failure(self, mock_post, mock_find_serverhba, mock_close_connection, mock_open_connection, mock_init): # Test case where a ScServer does not exist with the specified # ScServerHba res = self.scapi.find_server(12345, self.IQN) mock_find_serverhba.assert_called() self.assertIsNone(res, 'Expected None') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_first_result', return_value=ISCSI_HBA) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_find_serverhba(self, mock_post, mock_first_result, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.find_server(12345, self.IQN) mock_post.assert_called() mock_first_result.assert_called() self.assertIsNotNone(res, 'Expected ScServerHba') @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_204) def test_find_serverhba_failure(self, mock_post, mock_close_connection, mock_open_connection, mock_init): # Test case where a ScServer does not exist with the specified # ScServerHba res = self.scapi.find_server(12345, self.IQN) self.assertIsNone(res, 'Expected None') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=ISCSI_FLT_DOMAINS) @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_200) def test_find_domains(self, mock_get, mock_get_json, mock_close_connection, mock_open_connection, mock_init): res = self.scapi._find_domains(u'64702.5764839588723736074.69') mock_get .assert_called() mock_get_json.assert_called() self.assertEqual( self.ISCSI_FLT_DOMAINS, res, 'Unexpected ScIscsiFaultDomain') @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_204) def test_find_domains_error(self, mock_get, mock_close_connection, mock_open_connection, mock_init): # Test case where get of ScControllerPort FaultDomainList fails res = self.scapi._find_domains(u'64702.5764839588723736074.69') self.assertIsNone(res, 'Expected None') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=ISCSI_FLT_DOMAINS) @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_200) def test_find_domain(self, mock_get, mock_get_json, mock_close_connection, mock_open_connection, mock_init): res = self.scapi._find_domain(u'64702.5764839588723736074.69', u'192.168.0.21') mock_get .assert_called() mock_get_json.assert_called() self.assertIsNotNone(res, 'Expected ScIscsiFaultDomain') @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_204) def test_find_domain_error(self, mock_get, mock_close_connection, mock_open_connection, mock_init): # Test case where get of ScControllerPort FaultDomainList fails res = self.scapi._find_domain(u'64702.5764839588723736074.69', u'192.168.0.21') self.assertIsNone(res, 'Expected None') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=ISCSI_FLT_DOMAINS) @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_200) def test_find_domain_not_found(self, mock_get, mock_get_json, mock_close_connection, mock_open_connection, mock_init): # Test case where domainip does not equal any WellKnownIpAddress # of the fault domains res = self.scapi._find_domain(u'64702.5764839588723736074.69', u'192.168.0.22') self.assertIsNone(res, 'Expected None') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=FC_HBAS) @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_200) def test_find_fc_initiators(self, mock_get, mock_get_json, mock_close_connection, mock_open_connection, mock_init): res = self.scapi._find_fc_initiators(self.SCSERVER) mock_get.assert_called() mock_get_json.assert_called() self.assertIsNotNone(res, 'Expected WWN list') @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_204) def test_find_fc_initiators_error(self, mock_get, mock_close_connection, mock_open_connection, mock_init): # Test case where get of ScServer HbaList fails res = self.scapi._find_fc_initiators(self.SCSERVER) self.assertListEqual([], res, 'Expected empty list') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=MAPPINGS) @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_200) def test_get_volume_count(self, mock_get, mock_get_json, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.get_volume_count(self.SCSERVER) mock_get.assert_called() mock_get_json.assert_called() self.assertEqual(len(self.MAPPINGS), res, 'Mapping count mismatch') @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_204) def test_get_volume_count_failure(self, mock_get, mock_close_connection, mock_open_connection, mock_init): # Test case of where get of ScServer MappingList fails res = self.scapi.get_volume_count(self.SCSERVER) mock_get.assert_called() self.assertEqual(-1, res, 'Mapping count not -1') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=[]) @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_200) def test_get_volume_count_no_volumes(self, mock_get, mock_get_json, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.get_volume_count(self.SCSERVER) mock_get.assert_called() mock_get_json.assert_called() self.assertEqual(len([]), res, 'Mapping count mismatch') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=MAPPINGS) @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_200) def test_find_mappings(self, mock_get, mock_get_json, mock_close_connection, mock_open_connection, mock_init): res = self.scapi._find_mappings(self.VOLUME) mock_get.assert_called() mock_get_json.assert_called() self.assertEqual(self.MAPPINGS, res, 'Mapping mismatch') @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_200) def test_find_mappings_inactive_vol(self, mock_get, mock_close_connection, mock_open_connection, mock_init): # Test getting volume mappings on inactive volume res = self.scapi._find_mappings(self.INACTIVE_VOLUME) mock_get.assert_called() self.assertEqual([], res, 'No mappings expected') @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_204) def test_find_mappings_failure(self, mock_get, mock_close_connection, mock_open_connection, mock_init): # Test case of where get of ScVolume MappingList fails res = self.scapi._find_mappings(self.VOLUME) mock_get.assert_called() self.assertEqual([], res, 'Mapping count not empty') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=[]) @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_200) def test_find_mappings_no_mappings(self, mock_get, mock_get_json, mock_close_connection, mock_open_connection, mock_init):<|fim▁hole|> mock_get_json.assert_called() self.assertEqual([], res, 'Mapping count mismatch') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_first_result', return_value=CTRLR_PORT) @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_200) def test_find_controller_port(self, mock_get, mock_first_result, mock_close_connection, mock_open_connection, mock_init): res = self.scapi._find_controller_port(u'64702.5764839588723736070.51') mock_get.assert_called() mock_first_result.assert_called() self.assertEqual(self.CTRLR_PORT, res, 'ScControllerPort mismatch') @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_204) def test_find_controller_port_failure(self, mock_get, mock_close_connection, mock_open_connection, mock_init): # Test case where get of ScVolume MappingList fails res = self.scapi._find_controller_port(self.VOLUME) mock_get.assert_called() self.assertIsNone(res, 'None expected') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_controller_port', return_value=FC_CTRLR_PORT) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_mappings', return_value=FC_MAPPINGS) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_fc_initiators', return_value=WWNS) def test_find_wwns(self, mock_find_fc_initiators, mock_find_mappings, mock_find_controller_port, mock_close_connection, mock_open_connection, mock_init): lun, wwns, itmap = self.scapi.find_wwns(self.VOLUME, self.SCSERVER) mock_find_fc_initiators.assert_called() mock_find_mappings.assert_called() mock_find_controller_port.assert_called() # The _find_controller_port is Mocked, so all mapping pairs # will have the same WWN for the ScControllerPort itmapCompare = {u'21000024FF30441C': [u'5000D31000FCBE36'], u'21000024FF30441D': [u'5000D31000FCBE36', u'5000D31000FCBE36']} self.assertEqual(1, lun, 'Incorrect LUN') self.assertIsNotNone(wwns, 'WWNs is None') self.assertEqual(itmapCompare, itmap, 'WWN mapping incorrect') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_mappings', return_value=[]) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_fc_initiators', return_value=FC_HBAS) def test_find_wwns_no_mappings(self, mock_find_fc_initiators, mock_find_mappings, mock_close_connection, mock_open_connection, mock_init): # Test case where there are no ScMapping(s) lun, wwns, itmap = self.scapi.find_wwns(self.VOLUME, self.SCSERVER) mock_find_fc_initiators.assert_called() mock_find_mappings.assert_called() self.assertEqual(None, lun, 'Incorrect LUN') self.assertEqual([], wwns, 'WWNs is not empty') self.assertEqual({}, itmap, 'WWN mapping not empty') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_controller_port', return_value=None) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_mappings', return_value=FC_MAPPINGS) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_fc_initiators', return_value=WWNS) def test_find_wwns_no_ctlr_port(self, mock_find_fc_initiators, mock_find_mappings, mock_find_controller_port, mock_close_connection, mock_open_connection, mock_init): # Test case where ScControllerPort is none lun, wwns, itmap = self.scapi.find_wwns(self.VOLUME, self.SCSERVER) mock_find_fc_initiators.assert_called() mock_find_mappings.assert_called() mock_find_controller_port.assert_called() self.assertEqual(None, lun, 'Incorrect LUN') self.assertEqual([], wwns, 'WWNs is not empty') self.assertEqual({}, itmap, 'WWN mapping not empty') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_controller_port', return_value=ISCSI_CTRLR_PORT) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_domains', return_value=ISCSI_FLT_DOMAINS) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_mappings', return_value=MAPPINGS) def test_find_iscsi_properties_mappings(self, mock_find_mappings, mock_find_domain, mock_find_ctrl_port, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.find_iscsi_properties(self.VOLUME) mock_find_mappings.assert_called() mock_find_domain.assert_called() mock_find_ctrl_port.assert_called() expected = {'access_mode': 'rw', 'target_discovered': False, 'target_iqns': [u'iqn.2002-03.com.compellent:5000d31000fcbe43'], 'target_luns': [1], 'target_portals': [u'192.168.0.21:3260']} self.assertEqual(expected, res, 'Wrong Target Info') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_controller_port', return_value=ISCSI_CTRLR_PORT) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_domains', return_value=ISCSI_FLT_DOMAINS) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_mappings', return_value=MAPPINGS) def test_find_iscsi_properties_by_address(self, mock_find_mappings, mock_find_domain, mock_find_ctrl_port, mock_close_connection, mock_open_connection, mock_init): # Test case to find iSCSI mappings by IP Address & port res = self.scapi.find_iscsi_properties( self.VOLUME, '192.168.0.21', 3260) mock_find_mappings.assert_called() mock_find_domain.assert_called() mock_find_ctrl_port.assert_called() expected = {'access_mode': 'rw', 'target_discovered': False, 'target_iqns': [u'iqn.2002-03.com.compellent:5000d31000fcbe43'], 'target_luns': [1], 'target_portals': [u'192.168.0.21:3260']} self.assertEqual(expected, res, 'Wrong Target Info') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_controller_port', return_value=ISCSI_CTRLR_PORT) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_domains', return_value=ISCSI_FLT_DOMAINS) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_mappings', return_value=MAPPINGS) def test_find_iscsi_properties_by_address_not_found(self, mock_find_mappings, mock_find_domain, mock_find_ctrl_port, mock_close_connection, mock_open_connection, mock_init): # Test case to find iSCSI mappings by IP Address & port are not found res = self.scapi.find_iscsi_properties( self.VOLUME, '192.168.1.21', 3260) mock_find_mappings.assert_called() mock_find_domain.assert_called() mock_find_ctrl_port.assert_called() expected = {'access_mode': 'rw', 'target_discovered': False, 'target_iqns': [], 'target_luns': [], 'target_portals': []} self.assertEqual(expected, res, 'Wrong Target Info') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_mappings', return_value=[]) def test_find_iscsi_properties_no_mapping(self, mock_find_mappings, mock_close_connection, mock_open_connection, mock_init): # Test case where there are no ScMapping(s) res = self.scapi.find_iscsi_properties(self.VOLUME) mock_find_mappings.assert_called() expected = {'access_mode': 'rw', 'target_discovered': False, 'target_iqns': [], 'target_luns': [], 'target_portals': []} self.assertEqual(expected, res, 'Expected empty Target Info') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_controller_port', return_value=ISCSI_CTRLR_PORT) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_domains', return_value=None) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_mappings', return_value=MAPPINGS) def test_find_iscsi_properties_no_domain(self, mock_find_mappings, mock_find_domain, mock_find_ctrl_port, mock_close_connection, mock_open_connection, mock_init): # Test case where there are no ScFaultDomain(s) res = self.scapi.find_iscsi_properties(self.VOLUME) mock_find_mappings.assert_called() mock_find_domain.assert_called() mock_find_ctrl_port.assert_called() expected = {'access_mode': 'rw', 'target_discovered': False, 'target_iqns': [], 'target_luns': [], 'target_portals': []} self.assertEqual(expected, res, 'Expected empty Target Info') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_controller_port', return_value=None) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_domains', return_value=ISCSI_FLT_DOMAINS) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_mappings', return_value=MAPPINGS) def test_find_iscsi_properties_no_ctrl_port(self, mock_find_mappings, mock_find_domain, mock_find_ctrl_port, mock_close_connection, mock_open_connection, mock_init): # Test case where there are no ScFaultDomain(s) res = self.scapi.find_iscsi_properties(self.VOLUME) mock_find_mappings.assert_called() mock_find_domain.assert_called() mock_find_ctrl_port.assert_called() expected = {'access_mode': 'rw', 'target_discovered': False, 'target_iqns': [], 'target_luns': [], 'target_portals': []} self.assertEqual(expected, res, 'Expected empty Target Info') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_controller_port', return_value=ISCSI_CTRLR_PORT) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_domains', return_value=ISCSI_FLT_DOMAINS) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_mappings', return_value=MAPPINGS_READ_ONLY) def test_find_iscsi_properties_ro(self, mock_find_mappings, mock_find_domain, mock_find_ctrl_port, mock_close_connection, mock_open_connection, mock_init): # Test case where Read Only mappings are found res = self.scapi.find_iscsi_properties(self.VOLUME) mock_find_mappings.assert_called() mock_find_domain.assert_called() mock_find_ctrl_port.assert_called() expected = {'access_mode': 'ro', 'target_discovered': False, 'target_iqns': [u'iqn.2002-03.com.compellent:5000d31000fcbe43'], 'target_luns': [1], 'target_portals': [u'192.168.0.21:3260']} self.assertEqual(expected, res, 'Wrong Target Info') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_controller_port', return_value=ISCSI_CTRLR_PORT) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_domains', return_value=ISCSI_FLT_DOMAINS_MULTI_PORTALS) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_mappings', return_value=MAPPINGS_MULTI_PORTAL) def test_find_iscsi_properties_multi_portals(self, mock_find_mappings, mock_find_domain, mock_find_ctrl_port, mock_close_connection, mock_open_connection, mock_init): # Test case where there are multiple portals res = self.scapi.find_iscsi_properties(self.VOLUME) mock_find_mappings.assert_called() mock_find_domain.assert_called() mock_find_ctrl_port.assert_called() expected = {'access_mode': 'rw', 'target_discovered': False, 'target_iqns': [u'iqn.2002-03.com.compellent:5000d31000fcbe43'], 'target_luns': [1], 'target_portals': [u'192.168.0.21:3260', u'192.168.0.25:3260']} self.assertEqual(expected, res, 'Wrong Target Info') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_first_result', return_value=MAP_PROFILE) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_map_volume(self, mock_post, mock_first_result, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.map_volume(self.VOLUME, self.SCSERVER) mock_post.assert_called() mock_first_result.assert_called() self.assertEqual(self.MAP_PROFILE, res, 'Incorrect ScMappingProfile') @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_204) def test_map_volume_failure(self, mock_post, mock_close_connection, mock_open_connection, mock_init): # Test case where mapping volume to server fails res = self.scapi.map_volume(self.VOLUME, self.SCSERVER) mock_post.assert_called() self.assertIsNone(res, 'None expected') @mock.patch.object(dell_storagecenter_api.HttpClient, 'delete', return_value=RESPONSE_200) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=MAP_PROFILES) @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_200) def test_unmap_volume(self, mock_get, mock_get_json, mock_delete, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.unmap_volume(self.VOLUME, self.SCSERVER) mock_get.assert_called() mock_get_json.assert_called() mock_delete.assert_called() self.assertTrue(res, 'Expected True') @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_204) def test_unmap_volume_failure(self, mock_get, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.unmap_volume(self.VOLUME, self.SCSERVER) mock_get.assert_called() self.assertFalse(res, 'Expected False') @mock.patch.object(dell_storagecenter_api.HttpClient, 'delete', return_value=RESPONSE_200) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=[]) @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_200) def test_unmap_volume_no_map_profile(self, mock_get, mock_get_json, mock_delete, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.unmap_volume(self.VOLUME, self.SCSERVER) mock_get.assert_called() mock_get_json.assert_called() mock_delete.assert_called() self.assertTrue(res, 'Expected True') @mock.patch.object(dell_storagecenter_api.HttpClient, 'delete', return_value=RESPONSE_204) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=MAP_PROFILES) @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_200) def test_unmap_volume_del_fail(self, mock_get, mock_get_json, mock_delete, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.unmap_volume(self.VOLUME, self.SCSERVER) mock_get.assert_called() mock_get_json.assert_called() mock_delete.assert_called() self.assertFalse(res, 'Expected False') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=STRG_USAGE) @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_200) def test_get_storage_usage(self, mock_get, mock_get_json, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.get_storage_usage(64702) mock_get.assert_called() mock_get_json.assert_called() self.assertEqual(self.STRG_USAGE, res, 'Unexpected ScStorageUsage') def test_get_storage_usage_no_ssn(self, mock_close_connection, mock_open_connection, mock_init): # Test case where SSN is none res = self.scapi.get_storage_usage(None) self.assertIsNone(res, 'None expected') @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_204) # Test case where get of Storage Usage fails def test_get_storage_usage_failure(self, mock_get, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.get_storage_usage(64702) mock_get.assert_called() self.assertIsNone(res, 'None expected') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_first_result', return_value=RPLAY) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_create_replay(self, mock_post, mock_first_result, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.create_replay(self.VOLUME, 'Test Replay', 60) mock_post.assert_called() mock_first_result.assert_called() self.assertEqual(self.RPLAY, res, 'Unexpected ScReplay') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_first_result', return_value=RPLAY) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_init_volume') @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_create_replay_inact_vol(self, mock_post, mock_init_volume, mock_first_result, mock_close_connection, mock_open_connection, mock_init): # Test case where the specified volume is inactive res = self.scapi.create_replay(self.INACTIVE_VOLUME, 'Test Replay', 60) mock_post.assert_called() mock_init_volume.assert_called_once_with(self.INACTIVE_VOLUME) mock_first_result.assert_called() self.assertEqual(self.RPLAY, res, 'Unexpected ScReplay') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_first_result', return_value=RPLAY) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_create_replay_no_expire(self, mock_post, mock_first_result, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.create_replay(self.VOLUME, 'Test Replay', 0) mock_post.assert_called() mock_first_result.assert_called() self.assertEqual(self.RPLAY, res, 'Unexpected ScReplay') @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_create_replay_no_volume(self, mock_post, mock_close_connection, mock_open_connection, mock_init): # Test case where no ScVolume is specified res = self.scapi.create_replay(None, 'Test Replay', 60) self.assertIsNone(res, 'Expected None') @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_204) def test_create_replay_failure(self, mock_post, mock_close_connection, mock_open_connection, mock_init): # Test case where create ScReplay fails res = self.scapi.create_replay(self.VOLUME, 'Test Replay', 60) mock_post.assert_called() self.assertIsNone(res, 'Expected None') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=RPLAYS) @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_200) def test_find_replay(self, mock_post, mock_get_json, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.find_replay(self.VOLUME, u'Cinder Test Replay012345678910') mock_post.assert_called() mock_get_json.assert_called() self.assertEqual(self.TST_RPLAY, res, 'Unexpected ScReplay') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=[]) @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_200) def test_find_replay_no_replays(self, mock_post, mock_get_json, mock_close_connection, mock_open_connection, mock_init): # Test case where no replays are found res = self.scapi.find_replay(self.VOLUME, u'Cinder Test Replay012345678910') mock_post.assert_called() mock_get_json.assert_called() self.assertIsNone(res, 'Expected None') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=None) @mock.patch.object(dell_storagecenter_api.HttpClient, 'get', return_value=RESPONSE_204) def test_find_replay_failure(self, mock_post, mock_get_json, mock_close_connection, mock_open_connection, mock_init): # Test case where None is returned for replays res = self.scapi.find_replay(self.VOLUME, u'Cinder Test Replay012345678910') mock_post.assert_called() mock_get_json.assert_called() self.assertIsNone(res, 'Expected None') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, 'find_replay', return_value=RPLAYS) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_204) def test_delete_replay(self, mock_post, mock_find_replay, mock_close_connection, mock_open_connection, mock_init): replayId = u'Cinder Test Replay012345678910' res = self.scapi.delete_replay(self.VOLUME, replayId) mock_post.assert_called() mock_find_replay.assert_called_once_with(self.VOLUME, replayId) self.assertTrue(res, 'Expected True') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, 'find_replay', return_value=None) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_204) def test_delete_replay_no_replay(self, mock_post, mock_find_replay, mock_close_connection, mock_open_connection, mock_init): # Test case where specified ScReplay does not exist replayId = u'Cinder Test Replay012345678910' res = self.scapi.delete_replay(self.VOLUME, replayId) mock_post.assert_called() mock_find_replay.assert_called_once_with(self.VOLUME, replayId) self.assertTrue(res, 'Expected True') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, 'find_replay', return_value=TST_RPLAY) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_delete_replay_failure(self, mock_post, mock_find_replay, mock_close_connection, mock_open_connection, mock_init): # Test case where delete ScReplay results in an error replayId = u'Cinder Test Replay012345678910' res = self.scapi.delete_replay(self.VOLUME, replayId) mock_post.assert_called() mock_find_replay.assert_called_once_with(self.VOLUME, replayId) self.assertFalse(res, 'Expected False') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_first_result', return_value=VOLUME) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_volume_folder', return_value=FLDR) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_create_view_volume(self, mock_post, mock_find_volume_folder, mock_first_result, mock_close_connection, mock_open_connection, mock_init): vol_name = u'Test_create_vol' res = self.scapi.create_view_volume( vol_name, self.configuration.dell_sc_volume_folder, self.TST_RPLAY) mock_post.assert_called() mock_find_volume_folder.assert_called_once_with( 64702, self.configuration.dell_sc_volume_folder) mock_first_result.assert_called() self.assertEqual(self.VOLUME, res, 'Unexpected ScVolume') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_first_result', return_value=VOLUME) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_create_volume_folder_path', return_value=FLDR) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_volume_folder', return_value=None) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_create_view_volume_create_fldr(self, mock_post, mock_find_volume_folder, mock_create_volume_folder, mock_first_result, mock_close_connection, mock_open_connection, mock_init): # Test case where volume folder does not exist and must be created vol_name = u'Test_create_vol' res = self.scapi.create_view_volume( vol_name, self.configuration.dell_sc_volume_folder, self.TST_RPLAY) mock_post.assert_called() mock_find_volume_folder.assert_called_once_with( 64702, self.configuration.dell_sc_volume_folder) mock_create_volume_folder.assert_called_once_with( 64702, self.configuration.dell_sc_volume_folder) mock_first_result.assert_called() self.assertEqual(self.VOLUME, res, 'Unexpected ScVolume') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_first_result', return_value=VOLUME) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_create_volume_folder_path', return_value=None) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_volume_folder', return_value=None) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_create_view_volume_no_vol_fldr(self, mock_post, mock_find_volume_folder, mock_create_volume_folder, mock_first_result, mock_close_connection, mock_open_connection, mock_init): # Test case where volume folder does not exist and cannot be created vol_name = u'Test_create_vol' res = self.scapi.create_view_volume( vol_name, self.configuration.dell_sc_volume_folder, self.TST_RPLAY) mock_post.assert_called() mock_find_volume_folder.assert_called_once_with( 64702, self.configuration.dell_sc_volume_folder) mock_create_volume_folder.assert_called_once_with( 64702, self.configuration.dell_sc_volume_folder) mock_first_result.assert_called() self.assertEqual(self.VOLUME, res, 'Unexpected ScVolume') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_find_volume_folder', return_value=FLDR) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_204) def test_create_view_volume_failure(self, mock_post, mock_find_volume_folder, mock_close_connection, mock_open_connection, mock_init): # Test case where view volume create fails vol_name = u'Test_create_vol' res = self.scapi.create_view_volume( vol_name, self.configuration.dell_sc_volume_folder, self.TST_RPLAY) mock_post.assert_called() mock_find_volume_folder.assert_called_once_with( 64702, self.configuration.dell_sc_volume_folder) self.assertIsNone(res, 'Expected None') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, 'create_view_volume', return_value=VOLUME) @mock.patch.object(dell_storagecenter_api.StorageCenterApi, 'create_replay', return_value=RPLAY) def test_create_cloned_volume(self, mock_create_replay, mock_create_view_volume, mock_close_connection, mock_open_connection, mock_init): vol_name = u'Test_create_clone_vol' res = self.scapi.create_cloned_volume( vol_name, self.configuration.dell_sc_volume_folder, self.VOLUME) mock_create_replay.assert_called_once_with(self.VOLUME, 'Cinder Clone Replay', 60) mock_create_view_volume.assert_called_once_with( vol_name, self.configuration.dell_sc_volume_folder, self.RPLAY) self.assertEqual(self.VOLUME, res, 'Unexpected ScVolume') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, 'create_replay', return_value=None) def test_create_cloned_volume_failure(self, mock_create_replay, mock_close_connection, mock_open_connection, mock_init): # Test case where create cloned volumes fails because create_replay # fails vol_name = u'Test_create_clone_vol' res = self.scapi.create_cloned_volume( vol_name, self.configuration.dell_sc_volume_folder, self.VOLUME) mock_create_replay.assert_called_once_with(self.VOLUME, 'Cinder Clone Replay', 60) self.assertIsNone(res, 'Expected None') @mock.patch.object(dell_storagecenter_api.StorageCenterApi, '_get_json', return_value=VOLUME) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_expand_volume(self, mock_post, mock_get_json, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.expand_volume(self.VOLUME, 550) mock_post.assert_called() mock_get_json.assert_called() self.assertEqual(self.VOLUME, res, 'Unexpected ScVolume') @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_204) def test_expand_volume_failure(self, mock_post, mock_close_connection, mock_open_connection, mock_init): res = self.scapi.expand_volume(self.VOLUME, 550) mock_post.assert_called() self.assertIsNone(res, 'Expected None') @mock.patch.object(dell_storagecenter_api.HttpClient, 'delete', return_value=RESPONSE_200) def test_delete_server(self, mock_delete, mock_close_connection, mock_open_connection, mock_init): res = self.scapi._delete_server(self.SCSERVER) mock_delete.assert_called() self.assertIsNone(res, 'Expected None') @mock.patch.object(dell_storagecenter_api.HttpClient, 'delete', return_value=RESPONSE_200) def test_delete_server_del_not_allowed(self, mock_delete, mock_close_connection, mock_open_connection, mock_init): # Test case where delete of ScServer not allowed res = self.scapi._delete_server(self.SCSERVER_NO_DEL) mock_delete.assert_called() self.assertIsNone(res, 'Expected None') class DellSCSanAPIConnectionTestCase(test.TestCase): '''DellSCSanAPIConnectionTestCase Class to test the Storage Center API connection using Mock. ''' # Create a Response object that indicates OK response_ok = models.Response() response_ok.status_code = 200 response_ok.reason = u'ok' RESPONSE_200 = response_ok # Create a Response object that indicates a failure (no content) response_nc = models.Response() response_nc.status_code = 204 response_nc.reason = u'duplicate' RESPONSE_204 = response_nc def setUp(self): super(DellSCSanAPIConnectionTestCase, self).setUp() # Configuration is a mock. A mock is pretty much a blank # slate. I believe mock's done in setup are not happy time # mocks. So we just do a few things like driver config here. self.configuration = mock.Mock() self.configuration.san_is_local = False self.configuration.san_ip = "192.168.0.1" self.configuration.san_login = "admin" self.configuration.san_password = "mmm" self.configuration.dell_sc_ssn = 12345 self.configuration.dell_sc_server_folder = 'opnstktst' self.configuration.dell_sc_volume_folder = 'opnstktst' self.configuration.dell_sc_api_port = 3033 self.configuration.iscsi_ip_address = '192.168.1.1' self.configuration.iscsi_port = 3260 self._context = context.get_admin_context() # Set up the StorageCenterApi self.scapi = dell_storagecenter_api.StorageCenterApi( self.configuration.san_ip, self.configuration.dell_sc_api_port, self.configuration.san_login, self.configuration.san_password) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_open_connection(self, mock_post): self.scapi.open_connection() mock_post.assert_called() @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_204) def test_open_connection_failure(self, mock_post): self.assertRaises(exception.VolumeBackendAPIException, self.scapi.open_connection) @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_204) def test_close_connection(self, mock_post): self.scapi.close_connection() mock_post.assert_called() @mock.patch.object(dell_storagecenter_api.HttpClient, 'post', return_value=RESPONSE_200) def test_close_connection_failure(self, mock_post): self.scapi.close_connection() mock_post.assert_called()<|fim▁end|>
# Test case where ScVolume has no mappings res = self.scapi._find_mappings(self.VOLUME) mock_get.assert_called()
<|file_name|>theme.js<|end_file_name|><|fim▁begin|>$(function () { $(window).scroll(function() { if ($(".navbar").offset().top>30) { $(".navbar-fixed-top").addClass("sticky"); } else { $(".navbar-fixed-top").removeClass("sticky"); } }); // Flex if ($(".flexslider").length) { $('.flexslider').flexslider(); } servicesOptions.initialize(); staticHeader.initialize(); portfolioItem.initialize(); // segun esto corrige el pedo del dropdown en tablets and such // hay que testearlo! $('.dropdown-toggle').click(function(e) { e.preventDefault(); setTimeout($.proxy(function() { if ('ontouchstart' in document.documentElement) { $(this).siblings('.dropdown-backdrop').off().remove(); } }, this), 0); }); }); var portfolioItem = { initialize: function () { var $container = $("#portfolio_tem .left_box"); var $bigPics = $container.find(".big img"); var $thumbs = $container.find(".thumbs .thumb"); <|fim▁hole|> $thumbs.click(function (e) { e.preventDefault(); var index = $thumbs.index(this); $bigPics.fadeOut(); $bigPics.eq(index).fadeIn(); }); } } var staticHeader = { initialize: function () { if ($(".navbar-static-top").length) { $("body").css("padding-top", 0); } } } var servicesOptions = { initialize: function () { var $container = $(".services_circles"); var $texts = $container.find(".description .text"); var $circles = $container.find(".areas .circle"); $circles.click(function () { var index = $circles.index(this); $texts.fadeOut(); $texts.eq(index).fadeIn(); $circles.removeClass("active"); $(this).addClass("active"); }); } } $(document).ready(function(){ $("#menuContent div").hide(); $("#menuContent div:first").show(); $("#subMenu li:first").addClass("active"); $("#subMenu li a").click(function(){ $('#subMenu li').removeClass("active"); $(this).parent().addClass("active"); var current = $(this).attr("href"); $("#menuContent div:visible").fadeOut("fast"); $("#menuContent").animate({"height":$(current).height()},function(){ $(current).fadeIn("fast"); }); return false; }); });<|fim▁end|>
$bigPics.hide().eq(0).show();
<|file_name|>AllTests.py<|end_file_name|><|fim▁begin|># ********************************************************************** # # Copyright (c) 2003-2017 ZeroC, Inc. All rights reserved. # # This copy of Ice is licensed to you under the terms described in the # ICE_LICENSE file included in this distribution.<|fim▁hole|> def test(b): if not b: raise RuntimeError('test assertion failed') def allTests(communicator): sys.stdout.write("testing stringToProxy... ") sys.stdout.flush() base = communicator.stringToProxy("test:default -p 12010") test(base) print("ok") sys.stdout.write("testing checked cast... ") sys.stdout.flush() obj = Test.TestIntfPrx.checkedCast(base) test(obj) test(obj == base) print("ok") sys.stdout.write("creating/destroying/recreating object adapter... ") sys.stdout.flush() adapter = communicator.createObjectAdapterWithEndpoints("TransientTestAdapter", "default") try: communicator.createObjectAdapterWithEndpoints("TransientTestAdapter", "default") test(False) except Ice.LocalException: pass adapter.destroy() adapter = communicator.createObjectAdapterWithEndpoints("TransientTestAdapter", "default") adapter.destroy() print("ok") sys.stdout.write("creating/activating/deactivating object adapter in one operation... ") sys.stdout.flush() obj.transient() print("ok") sys.stdout.write("deactivating object adapter in the server... ") sys.stdout.flush() obj.deactivate() print("ok") sys.stdout.write("testing connection closure... "); sys.stdout.flush(); for x in range(10): initData = Ice.InitializationData(); initData.properties = communicator.getProperties().clone(); comm = Ice.initialize(initData); comm.stringToProxy("test:default -p 12010").ice_pingAsync(); comm.destroy(); print("ok"); sys.stdout.write("testing whether server is gone... ") sys.stdout.flush() try: obj.ice_timeout(100).ice_ping() # Use timeout to speed up testing on Windows test(False) except Ice.LocalException: print("ok") return obj<|fim▁end|>
# # ********************************************************************** import sys, Ice, Test
<|file_name|>preferences.entry.ts<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2021 the Octant contributors. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0 */ import { BehaviorSubject, Subscription } from 'rxjs'; import { PreferencesService } from './preferences.service'; import { Preferences } from '../../models/preference'; export class PreferencesEntry<T> { private subscription: Subscription; public subject: BehaviorSubject<T>; constructor( private preferencesService: PreferencesService, public id: string, private defaultValue: T, private defaultText: string, public updatesElectron: boolean = false ) { if (typeof this.defaultValue !== 'string') { this.subject = new BehaviorSubject<T>( JSON.parse( preferencesService.getStoredValue(this.id, this.defaultValue) ) ); } else { this.subject = new BehaviorSubject<T>( preferencesService.getStoredValue(this.id, this.defaultValue) ); } this.subscription = this.subject.subscribe(val => { preferencesService.setStoredValue(this.id, val); }); } public preferencesChanged(update: Preferences) { switch (typeof this.defaultValue) { case 'boolean': const val = (update[this.id] === this.defaultText) as unknown; if (this.subject.value !== (val as T)) { this.subject.next(val as T); return true; } break; default:<|fim▁hole|> const newValue = update[this.id]; if (newValue && this.subject.value !== newValue) { this.subject.next(newValue); return true; } break; } return false; } public setDefaultValue() { this.subject.next(this.defaultValue); } public destroy() { this.subscription?.unsubscribe(); } }<|fim▁end|>
<|file_name|>Result.java<|end_file_name|><|fim▁begin|>/* * Copyright 2000-2016 Vaadin Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.vaadin.data; import java.io.Serializable; import java.util.Objects; import java.util.Optional; import com.vaadin.server.SerializableConsumer; import com.vaadin.server.SerializableFunction; import com.vaadin.server.SerializableSupplier; /** * Represents the result of an operation that might fail, such as type * conversion. A result may contain either a value, signifying a successful * operation, or an error message in case of a failure. * <p> * Result instances are created using the factory methods {@link #ok(R)} and * {@link #error(String)}, denoting success and failure respectively. * <p> * Unless otherwise specified, {@code Result} method arguments cannot be null. * * @param <R> * the result value type */ public interface Result<R> extends Serializable { /** * Returns a successful result wrapping the given value. * * @param <R> * the result value type * @param value * the result value, can be null * @return a successful result */ public static <R> Result<R> ok(R value) { return new SimpleResult<>(value, null); } /** * Returns a failure result wrapping the given error message. * * @param <R> * the result value type * @param message * the error message * @return a failure result */ public static <R> Result<R> error(String message) { Objects.requireNonNull(message, "message cannot be null"); return new SimpleResult<>(null, message); } /** * Returns a Result representing the result of invoking the given supplier. * If the supplier returns a value, returns a {@code Result.ok} of the * value; if an exception is thrown, returns the message in a * {@code Result.error}. * * @param <R> * the result value type * @param supplier * the supplier to run * @param onError * the function to provide the error message * @return the result of invoking the supplier */ public static <R> Result<R> of(SerializableSupplier<R> supplier, SerializableFunction<Exception, String> onError) { Objects.requireNonNull(supplier, "supplier cannot be null"); Objects.requireNonNull(onError, "onError cannot be null"); try { return ok(supplier.get()); } catch (Exception e) { return error(onError.apply(e)); } } /** * If this Result has a value, returns a Result of applying the given * function to the value. Otherwise, returns a Result bearing the same error * as this one. Note that any exceptions thrown by the mapping function are * not wrapped but allowed to propagate. * * @param <S> * the type of the mapped value * @param mapper * the mapping function * @return the mapped result */ public default <S> Result<S> map(SerializableFunction<R, S> mapper) { return flatMap(value -> ok(mapper.apply(value))); } /** * If this Result has a value, applies the given Result-returning function * to the value. Otherwise, returns a Result bearing the same error as this * one. Note that any exceptions thrown by the mapping function are not * wrapped but allowed to propagate. * * @param <S> * the type of the mapped value * @param mapper * the mapping function * @return the mapped result */ public <S> Result<S> flatMap(SerializableFunction<R, Result<S>> mapper); /** * Invokes either the first callback or the second one, depending on whether * this Result denotes a success or a failure, respectively. * * @param ifOk * the function to call if success * @param ifError * the function to call if failure */ public void handle(SerializableConsumer<R> ifOk, SerializableConsumer<String> ifError); <|fim▁hole|> * consumer to apply in case it's not an error */ public default void ifOk(SerializableConsumer<R> consumer) { handle(consumer, error -> { }); } /** * Applies the {@code consumer} if result is an error. * * @param consumer * consumer to apply in case it's an error */ public default void ifError(SerializableConsumer<String> consumer) { handle(value -> { }, consumer); } /** * Checks if the result denotes an error. * * @return <code>true</code> if the result denotes an error, * <code>false</code> otherwise */ public boolean isError(); /** * Returns an Optional of the result message, or an empty Optional if none. * * @return the optional message */ public Optional<String> getMessage(); /** * Return the value, if the result denotes success, otherwise throw an * exception to be created by the provided supplier. * * @param <X> * Type of the exception to be thrown * @param exceptionProvider * The provider which will return the exception to be thrown * based on the given error message * @return the value * @throws X * if this result denotes an error */ public <X extends Throwable> R getOrThrow( SerializableFunction<String, ? extends X> exceptionProvider) throws X; }<|fim▁end|>
/** * Applies the {@code consumer} if result is not an error. * * @param consumer
<|file_name|>paginated-scroll-box.js<|end_file_name|><|fim▁begin|><|fim▁hole|>import Ember from 'ember'; import PaginatedScrollViewMixin from 'kowa/mixins/paginated-scroll-view'; var PaginatedScrollBox = Ember.View.extend(PaginatedScrollViewMixin); export default PaginatedScrollBox;<|fim▁end|>
<|file_name|>AutoDark.tsx<|end_file_name|><|fim▁begin|>import { Box } from '@fower/react'; import { useState } from 'react'; import { Button } from './Button'; export default () => { const [mode, setMode] = useState('Light'); function toggleMode() { if (mode === 'Dark') { setMode('Light'); document.documentElement.classList.remove('dark'); } else { setMode('Dark'); document.documentElement.classList.add('dark'); } }<|fim▁hole|> return ( <Box h-400 roundedLarge bgGray200 p4> <Box red600>40 Lorem ipsum dolor sit amet</Box> <Box column toCenter spaceY3 gray800 textCenter> <Box as="button" onClick={toggleMode} p3 cursorPointer outlineNone white bgBlack bgGreen200--hover // bgGreen600--dark--hover // bg--hover bgGreen300--dark > Toggle {mode} </Box> <Box gray700 text3XL bgBlue300 p4 roundedGigantic rounded--dark> Powerful Theme Mode </Box> <Box textSM> Not only color mode, theme mode any thing: size, padding, margin, rounded... </Box> </Box> </Box> ); };<|fim▁end|>
<|file_name|>publish_to_beta.py<|end_file_name|><|fim▁begin|>"""Uploads apk to rollout track with user fraction.""" import sys import socket from apiclient.discovery import build from oauth2client.service_account import ServiceAccountCredentials import subprocess import xml.etree.ElementTree as ET import os from pathlib import Path TRACK = 'beta' USER_FRACTION = 1 APK_FILE = '../platforms/android/build/outputs/apk/release/android-release.apk' CREDENTIALS_JSON = 'playstore-service-account.json' def main(argv): package_name = os.environ.get('PACKAGE_NAME') if package_name: print('using provided package name', package_name) else: # get package name from somewhere print('finding package name') package_name = ET.parse('../platforms/android/res/xml/config.xml').getroot().attrib['id'] print('found package name', package_name) print() apk_file = os.environ.get('APK_FILE') if apk_file is None: print('using default apk file path', APK_FILE) apk_file = APK_FILE print('Retrieving release notes from CHANGELOG.md...') releaseText = subprocess.run('../../scripts/get_newest_release.js', stdout=subprocess.PIPE).stdout.decode() if len(releaseText) > 500: releaseText = releaseText[:495] + '\n...' print() print(releaseText) print() <|fim▁hole|> print('Found credentials, trying to connect...') socket.setdefaulttimeout(900) service = build('androidpublisher', 'v3', credentials=credentials) edit_response = service.edits().insert(body={}, packageName=package_name).execute() edit_id = edit_response['id'] print('Inserted edit with ID', edit_id) print('Uploading APK...') apk_response = service.edits().apks().upload( editId=edit_id, packageName=package_name, media_body=apk_file ).execute() print('Version code %d has been uploaded' % apk_response['versionCode']) track_response = service.edits().tracks().patch( editId=edit_id, track=TRACK, packageName=package_name, body={ 'releases': [{ 'releaseNotes': [{ 'text': releaseText, 'language': 'en-US' }], 'versionCodes': [apk_response['versionCode']], 'userFraction': USER_FRACTION, 'status': 'inProgress', }] } ).execute() print('Track %s is set with releases: %s' % (track_response['track'], str(track_response['releases']))) if package_name == 'world.karrot': assets = Path('../playstoreAssets') language = 'en-US' listing = assets / language / 'listing' with (listing / 'shortDescription.txt').open() as shortDescription, \ (listing / 'fullDescription.txt').open() as fullDescription: service.edits().listings().update( editId=edit_id, packageName=package_name, language=language, body={ 'title': 'Karrot', 'language': language, 'shortDescription': shortDescription.read(), 'fullDescription': fullDescription.read(), 'video': '', } ).execute() print('Listing of %s has been updated' % package_name) images_path = assets / language / 'images' imageTypes = ( 'featureGraphic', 'icon', 'phoneScreenshots', 'promoGraphic', 'sevenInchScreenshots', 'tenInchScreenshots', 'tvBanner', 'tvScreenshots', 'wearScreenshots', ) images = [str(p) for p in images_path.iterdir()] sha1 = subprocess.run(['sha1sum', *images], stdout=subprocess.PIPE).stdout.decode() sha1_images = {sha1: path for (sha1, path) in [i.split() for i in sha1.splitlines()]} for imageType in imageTypes: our_images = { sha1: path for (sha1, path) in sha1_images.items() if path.split('/')[-1].startswith(imageType) } images_response = service.edits().images().list( editId=edit_id, packageName=package_name, language=language, imageType=imageType, ).execute() their_images = images_response.get('images') or [] their_images = {i['sha1']: i['id'] for i in their_images} to_upload = [our_images.get(k) for k in (our_images.keys() - their_images.keys())] to_delete = [their_images.get(k) for k in (their_images.keys() - our_images.keys())] for image_id in to_delete: service.edits().images().delete( editId=edit_id, packageName=package_name, language=language, imageType=imageType, imageId=image_id, ).execute() print('Deleted', image_id) for path in to_upload: service.edits().images().upload( editId=edit_id, packageName=package_name, language=language, imageType=imageType, media_body=path, ).execute() print('Uploaded', path) commit_request = service.edits().commit(editId=edit_id, packageName=package_name).execute() print('Edit "%s" has been committed' % (commit_request['id'])) if __name__ == '__main__': main(sys.argv)<|fim▁end|>
credentials = ServiceAccountCredentials.from_json_keyfile_name( CREDENTIALS_JSON, scopes=['https://www.googleapis.com/auth/androidpublisher'] )
<|file_name|>TablePage.java<|end_file_name|><|fim▁begin|>package org.openjava.upay.web.domain; import java.util.List; public class TablePage<T> { private long start; private int length; private long recordsTotal; private long recordsFiltered; private List<T> data; public TablePage() { } public long getStart() { return start; } public void setStart(long start) { this.start = start; } public int getLength() { return length; } public void setLength(int length) { this.length = length; } public long getRecordsTotal() { return recordsTotal; } public void setRecordsTotal(long recordsTotal) { this.recordsTotal = recordsTotal; } public long getRecordsFiltered() { return recordsFiltered; } public void setRecordsFiltered(long recordsFiltered) { this.recordsFiltered = recordsFiltered; } public List<T> getData() { return data; } public void setData(List<T> data) {<|fim▁hole|> this.data = data; } public TablePage wrapData(long total, List<T> data) { this.recordsTotal = total; this.recordsFiltered = total; this.data = data; return this; } }<|fim▁end|>
<|file_name|>strings.js<|end_file_name|><|fim▁begin|>define( ({ _widgetLabel: "속성 테이블", ok: "확인", cancel: "취소", unsupportQueryWarning: "속성 테이블 위젯에 표시하려면 레이어가 쿼리 작업을 지원해야 합니다. 서비스에 쿼리 기능이 켜져 있는지 확인하세요.", exportMessage: "데이터를 CSV 파일로 내보내시겠습니까?",<|fim▁hole|> exportFiles: "CSV로 내보내기", exportSelected: "선택한 항목을 CSV로 내보내기", exportAll: "모든 항목을 CSV로 내보내기", options: "옵션", zoomto: "확대", highlight: "그래픽 강조", selectAll: "모든 페이지의 레코드 선택", selectPage: "현재 페이지의 레코드 선택", clearSelection: "선택 항목 지우기", filter: "필터", setFilterTip: "필터를 올바르게 설정하세요.", noFilterTip: "필터 식이 정의되지 않으면 이 쿼리 작업은 지정된 데이터 원본의 모든 피처를 나열합니다.", filterByExtent: "맵 범위로 필터링", showSelectedRecords: "선택한 레코드 표시", showAllRecords: "모든 레코드 보기", showRelatedRecords: "릴레이트된 레코드 표시", noRelatedRecords: "릴레이트된 레코드 없음", refresh: "새로고침", features: "피처", selected: "선택함", transparent: "투명 모드", indicate: "선택 항목 찾기", columns: "열 표시/숨기기", selectionSymbol: "선택 심볼", closeMessage: "테이블 숨기기(하단에서 다시 확장)", dataNotAvailable: "데이터를 사용할 수 없습니다.<br>[새로 고침] 버튼을 클릭하여 다시 시도하세요.", openTableTip: "속성 테이블 열기", closeTableTip: "속성 테이블 숨기기" }) );<|fim▁end|>
<|file_name|>lockfile-compat.rs<|end_file_name|><|fim▁begin|>extern crate cargotest; extern crate hamcrest; use cargotest::support::git; use cargotest::support::registry::Package; use cargotest::support::{execs, project, lines_match}; use hamcrest::assert_that; #[test] fn oldest_lockfile_still_works() { let cargo_commands = vec![ "build", "update" ]; for cargo_command in cargo_commands { oldest_lockfile_still_works_with_command(cargo_command); } } fn oldest_lockfile_still_works_with_command(cargo_command: &str) { Package::new("foo", "0.1.0").publish(); let expected_lockfile = r#"[[package]] name = "foo" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "zzz" version = "0.0.1" dependencies = [ "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [metadata] "checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "[..]" "#; let old_lockfile = r#"[root] name = "zzz" version = "0.0.1" dependencies = [ "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "foo" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" "#; let p = project("bar") .file("Cargo.toml", r#" [project] name = "zzz" version = "0.0.1" authors = [] [dependencies] foo = "0.1.0" "#) .file("src/lib.rs", "") .file("Cargo.lock", old_lockfile) .build(); assert_that(p.cargo(cargo_command), execs().with_status(0)); let lock = p.read_lockfile(); for (l, r) in expected_lockfile.lines().zip(lock.lines()) { assert!(lines_match(l, r), "Lines differ:\n{}\n\n{}", l, r); } assert_eq!(lock.lines().count(), expected_lockfile.lines().count()); } #[test] fn frozen_flag_preserves_old_lockfile() { Package::new("foo", "0.1.0").publish(); let old_lockfile = r#"[root] name = "zzz" version = "0.0.1" dependencies = [ "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "foo" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" [metadata] "checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f9e0a16bdf5c05435698fa27192d89e331b22a26a972c34984f560662544453b" "#; let p = project("bar") .file("Cargo.toml", r#" [project] name = "zzz" version = "0.0.1" authors = [] [dependencies] foo = "0.1.0" "#) .file("src/lib.rs", "") .file("Cargo.lock", old_lockfile) .build(); assert_that(p.cargo("build").arg("--locked"), execs().with_status(0)); let lock = p.read_lockfile(); for (l, r) in old_lockfile.lines().zip(lock.lines()) { assert!(lines_match(l, r), "Lines differ:\n{}\n\n{}", l, r); } assert_eq!(lock.lines().count(), old_lockfile.lines().count()); } #[test] fn totally_wild_checksums_works() { Package::new("foo", "0.1.0").publish(); let p = project("bar") .file("Cargo.toml", r#" [project] name = "bar" version = "0.0.1" authors = [] [dependencies] foo = "0.1.0" "#) .file("src/lib.rs", "") .file("Cargo.lock", r#" [[package]] name = "bar" version = "0.0.1" dependencies = [ "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "foo" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" [metadata] "checksum baz 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" "checksum foo 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" "#); let p = p.build(); assert_that(p.cargo("build"), execs().with_status(0)); let lock = p.read_lockfile(); assert!(lock.starts_with(r#" [[package]] name = "bar" version = "0.0.1" dependencies = [ "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "foo" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" [metadata] "#.trim())); } #[test] fn wrong_checksum_is_an_error() { Package::new("foo", "0.1.0").publish(); let p = project("bar") .file("Cargo.toml", r#" [project] name = "bar" version = "0.0.1" authors = [] [dependencies] foo = "0.1.0" "#)<|fim▁hole|>[[package]] name = "bar" version = "0.0.1" dependencies = [ "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "foo" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" [metadata] "checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" "#); let p = p.build(); assert_that(p.cargo("build"), execs().with_status(101).with_stderr("\ [UPDATING] registry `[..]` error: checksum for `foo v0.1.0` changed between lock files this could be indicative of a few possible errors: * the lock file is corrupt * a replacement source in use (e.g. a mirror) returned a different checksum * the source itself may be corrupt in one way or another unable to verify that `foo v0.1.0` is the same as when the lockfile was generated ")); } // If the checksum is unlisted in the lockfile (e.g. <none>) yet we can // calculate it (e.g. it's a registry dep), then we should in theory just fill // it in. #[test] fn unlisted_checksum_is_bad_if_we_calculate() { Package::new("foo", "0.1.0").publish(); let p = project("bar") .file("Cargo.toml", r#" [project] name = "bar" version = "0.0.1" authors = [] [dependencies] foo = "0.1.0" "#) .file("src/lib.rs", "") .file("Cargo.lock", r#" [[package]] name = "bar" version = "0.0.1" dependencies = [ "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "foo" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" [metadata] "checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "<none>" "#); let p = p.build(); assert_that(p.cargo("fetch"), execs().with_status(101).with_stderr("\ [UPDATING] registry `[..]` error: checksum for `foo v0.1.0` was not previously calculated, but a checksum \ could now be calculated this could be indicative of a few possible situations: * the source `[..]` did not previously support checksums, but was replaced with one that does * newer Cargo implementations know how to checksum this source, but this older implementation does not * the lock file is corrupt ")); } // If the checksum is listed in the lockfile yet we cannot calculate it (e.g. // git dependencies as of today), then make sure we choke. #[test] fn listed_checksum_bad_if_we_cannot_compute() { let git = git::new("foo", |p| { p.file("Cargo.toml", r#" [project] name = "foo" version = "0.1.0" authors = [] "#) .file("src/lib.rs", "") }).unwrap(); let p = project("bar") .file("Cargo.toml", &format!(r#" [project] name = "bar" version = "0.0.1" authors = [] [dependencies] foo = {{ git = '{}' }} "#, git.url())) .file("src/lib.rs", "") .file("Cargo.lock", &format!(r#" [[package]] name = "bar" version = "0.0.1" dependencies = [ "foo 0.1.0 (git+{0})" ] [[package]] name = "foo" version = "0.1.0" source = "git+{0}" [metadata] "checksum foo 0.1.0 (git+{0})" = "checksum" "#, git.url())); let p = p.build(); assert_that(p.cargo("fetch"), execs().with_status(101).with_stderr("\ [UPDATING] git repository `[..]` error: checksum for `foo v0.1.0 ([..])` could not be calculated, but a \ checksum is listed in the existing lock file[..] this could be indicative of a few possible situations: * the source `[..]` supports checksums, but was replaced with one that doesn't * the lock file is corrupt unable to verify that `foo v0.1.0 ([..])` is the same as when the lockfile was generated ")); } #[test] fn current_lockfile_format() { Package::new("foo", "0.1.0").publish(); let p = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies] foo = "0.1.0" "#) .file("src/lib.rs", ""); let p = p.build(); assert_that(p.cargo("build"), execs().with_status(0)); let actual = p.read_lockfile(); let expected = "\ [[package]] name = \"bar\" version = \"0.0.1\" dependencies = [ \"foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)\", ] [[package]] name = \"foo\" version = \"0.1.0\" source = \"registry+https://github.com/rust-lang/crates.io-index\" [metadata] \"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)\" = \"[..]\""; for (l, r) in expected.lines().zip(actual.lines()) { assert!(lines_match(l, r), "Lines differ:\n{}\n\n{}", l, r); } assert_eq!(actual.lines().count(), expected.lines().count()); } #[test] fn lockfile_without_root() { Package::new("foo", "0.1.0").publish(); let lockfile = r#"[[package]] name = "bar" version = "0.0.1" dependencies = [ "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "foo" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" "#; let p = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [dependencies] foo = "0.1.0" "#) .file("src/lib.rs", "") .file("Cargo.lock", lockfile); let p = p.build(); assert_that(p.cargo("build"), execs().with_status(0)); let lock = p.read_lockfile(); assert!(lock.starts_with(lockfile.trim())); } #[test] fn locked_correct_error() { Package::new("foo", "0.1.0").publish(); let p = project("bar") .file("Cargo.toml", r#" [project] name = "bar" version = "0.0.1" authors = [] [dependencies] foo = "0.1.0" "#) .file("src/lib.rs", ""); let p = p.build(); assert_that(p.cargo("build").arg("--locked"), execs().with_status(101).with_stderr("\ [UPDATING] registry `[..]` error: the lock file needs to be updated but --locked was passed to prevent this ")); }<|fim▁end|>
.file("src/lib.rs", "") .file("Cargo.lock", r#"