prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>V3Param.cpp<|end_file_name|><|fim▁begin|>// -*- mode: C++; c-file-style: "cc-mode" -*-
//*************************************************************************
// DESCRIPTION: Verilator: Replicate modules for parameterization
//
// Code available from: https://verilator.org
//
//*************************************************************************
//
// Copyright 2003-2021 by Wilson Snyder. This program is free software; you
// can redistribute it and/or modify it under the terms of either the GNU
// Lesser General Public License Version 3 or the Perl Artistic License
// Version 2.0.
// SPDX-License-Identifier: LGPL-3.0-only OR Artistic-2.0
//
//*************************************************************************
// PARAM TRANSFORMATIONS:
// Top down traversal:
// For each cell:
// If parameterized,
// Determine all parameter widths, constant values.
// (Interfaces also matter, as if an interface is parameterized
// this effectively changes the width behavior of all that
// reference the iface.)
// Clone module cell calls, renaming with __{par1}_{par2}_...
// Substitute constants for cell's module's parameters.
// Relink pins and cell and ifacerefdtype to point to new module.
//
// For interface Parent's we have the AstIfaceRefDType::cellp()
// pointing to this module. If that parent cell's interface
// module gets parameterized, AstIfaceRefDType::cloneRelink
// will update AstIfaceRefDType::cellp(), and V3LinkDot will
// see the new interface.
//
// However if a submodule's AstIfaceRefDType::ifacep() points
// to the old (unparameterized) interface and needs correction.
// To detect this we must walk all pins looking for interfaces
// that the parent has changed and propagate down.
//
// Then process all modules called by that cell.
// (Cells never referenced after parameters expanded must be ignored.)
//
// After we complete parameters, the varp's will be wrong (point to old module)
// and must be relinked.
//
//*************************************************************************
#include "config_build.h"
#include "verilatedos.h"
#include "V3Global.h"
#include "V3Param.h"
#include "V3Ast.h"
#include "V3Case.h"
#include "V3Const.h"
#include "V3Os.h"
#include "V3Parse.h"
#include "V3Width.h"
#include "V3Unroll.h"
#include "V3Hasher.h"
#include <deque>
#include <map>
#include <memory>
#include <vector>
//######################################################################
// Hierarchical block and parameter db (modules without parameter is also handled)
class ParameterizedHierBlocks final {
using HierBlockOptsByOrigName = std::multimap<std::string, const V3HierarchicalBlockOption*>;
using HierMapIt = HierBlockOptsByOrigName::const_iterator;
using HierBlockModMap = std::map<const std::string, AstNodeModule*>;
using ParamConstMap = std::map<const std::string, std::unique_ptr<AstConst>>;
using GParamsMap = std::map<const std::string, AstVar*>; // key:parameter name value:parameter
// MEMBERS
// key:Original module name, value:HiearchyBlockOption*
// If a module is parameterized, the module is uniquiefied to overridden parameters.
// This is why HierBlockOptsByOrigName is multimap.
HierBlockOptsByOrigName m_hierBlockOptsByOrigName;
// key:mangled module name, value:AstNodeModule*
HierBlockModMap m_hierBlockMod;
// Overridden parameters of the hierarchical block
std::map<const V3HierarchicalBlockOption*, ParamConstMap> m_hierParams;
std::map<const std::string, GParamsMap>
m_modParams; // Parameter variables of hierarchical blocks
// METHODS
VL_DEBUG_FUNC; // Declare debug()
public:
ParameterizedHierBlocks(const V3HierBlockOptSet& hierOpts, AstNetlist* nodep) {
for (const auto& hierOpt : hierOpts) {
m_hierBlockOptsByOrigName.insert(
std::make_pair(hierOpt.second.origName(), &hierOpt.second));
const V3HierarchicalBlockOption::ParamStrMap& params = hierOpt.second.params();
ParamConstMap& consts = m_hierParams[&hierOpt.second];
for (V3HierarchicalBlockOption::ParamStrMap::const_iterator pIt = params.begin();
pIt != params.end(); ++pIt) {
std::unique_ptr<AstConst> constp{AstConst::parseParamLiteral(
new FileLine(FileLine::EmptySecret()), pIt->second)};
UASSERT(constp, pIt->second << " is not a valid parameter literal");
const bool inserted = consts.emplace(pIt->first, std::move(constp)).second;
UASSERT(inserted, pIt->first << " is already added");
}
// origName may be already registered, but it's fine.
m_modParams.insert({hierOpt.second.origName(), {}});
}
for (AstNodeModule* modp = nodep->modulesp(); modp;
modp = VN_CAST(modp->nextp(), NodeModule)) {
if (hierOpts.find(modp->prettyName()) != hierOpts.end()) {
m_hierBlockMod.emplace(modp->name(), modp);
}
const auto defParamIt = m_modParams.find(modp->name());
if (defParamIt != m_modParams.end()) {
// modp is the original of parameterized hierarchical block
for (AstNode* stmtp = modp->stmtsp(); stmtp; stmtp = stmtp->nextp()) {
if (AstVar* varp = VN_CAST(stmtp, Var)) {
if (varp->isGParam()) defParamIt->second.emplace(varp->name(), varp);
}
}
}
}
}
AstNodeModule* findByParams(const string& origName, AstPin* firstPinp,
const AstNodeModule* modp) {
if (m_hierBlockOptsByOrigName.find(origName) == m_hierBlockOptsByOrigName.end()) {
return nullptr;
}
// This module is a hierarchical block. Need to replace it by the protect-lib wrapper.
const std::pair<HierMapIt, HierMapIt> candidates
= m_hierBlockOptsByOrigName.equal_range(origName);
const auto paramsIt = m_modParams.find(origName);
UASSERT_OBJ(paramsIt != m_modParams.end(), modp, origName << " must be registered");
HierMapIt hierIt;
for (hierIt = candidates.first; hierIt != candidates.second; ++hierIt) {
bool found = true;
size_t paramIdx = 0;
const ParamConstMap& params = m_hierParams[hierIt->second];
UASSERT(params.size() == hierIt->second->params().size(), "not match");
for (AstPin* pinp = firstPinp; pinp; pinp = VN_CAST(pinp->nextp(), Pin)) {
if (!pinp->exprp()) continue;
UASSERT_OBJ(!pinp->modPTypep(), pinp,
"module with type parameter must not be a hierarchical block");
if (AstVar* modvarp = pinp->modVarp()) {
AstConst* constp = VN_CAST(pinp->exprp(), Const);
UASSERT_OBJ(constp, pinp,
"parameter for a hierarchical block must have been constified");
const auto paramIt = paramsIt->second.find(modvarp->name());
UASSERT_OBJ(paramIt != paramsIt->second.end(), modvarp, "must be registered");
AstConst* defValuep = VN_CAST(paramIt->second->valuep(), Const);
if (defValuep && areSame(constp, defValuep)) {
UINFO(5, "Setting default value of " << constp << " to " << modvarp
<< std::endl);
continue; // Skip this parameter because setting the same value
}
const auto pIt = vlstd::as_const(params).find(modvarp->name());
UINFO(5, "Comparing " << modvarp->name() << " " << constp << std::endl);
if (pIt == params.end() || paramIdx >= params.size()
|| !areSame(constp, pIt->second.get())) {
found = false;
break;
}
UINFO(5, "Matched " << modvarp->name() << " " << constp << " and "
<< pIt->second.get() << std::endl);
++paramIdx;
}
}
if (found && paramIdx == hierIt->second->params().size()) break;
}
UASSERT_OBJ(hierIt != candidates.second, firstPinp, "No protect-lib wrapper found");
// parameter settings will be removed in the bottom of caller visitCell().
const HierBlockModMap::const_iterator modIt
= m_hierBlockMod.find(hierIt->second->mangledName());
UASSERT_OBJ(modIt != m_hierBlockMod.end(), firstPinp,
hierIt->second->mangledName() << " is not found");
const auto it = vlstd::as_const(m_hierBlockMod).find(hierIt->second->mangledName());
if (it == m_hierBlockMod.end()) return nullptr;
return it->second;
}
static bool areSame(AstConst* pinValuep, AstConst* hierOptParamp) {
if (pinValuep->isString()) {
return pinValuep->num().toString() == hierOptParamp->num().toString();
}
// Bitwidth of hierOptParamp is accurate because V3Width already caluclated in the previous
// run. Bitwidth of pinValuep is before width analysis, so pinValuep is casted to
// hierOptParamp width.
V3Number varNum(pinValuep, hierOptParamp->num().width());
if (hierOptParamp->isDouble()) {
varNum.isDouble(true);
if (pinValuep->isDouble()) {
varNum.opAssign(pinValuep->num());
} else { // Cast from integer to real
varNum.opIToRD(pinValuep->num());
}
return v3EpsilonEqual(varNum.toDouble(), hierOptParamp->num().toDouble());
} else { // Now integer type is assumed
if (pinValuep->isDouble()) { // Need to cast to int
// Parameter is actually an integral type, but passed value is floating point.
// Conversion from real to integer uses rounding in V3Width.cpp
varNum.opRToIRoundS(pinValuep->num());
} else if (pinValuep->isSigned()) {
varNum.opExtendS(pinValuep->num(), pinValuep->num().width());
} else {
varNum.opAssign(pinValuep->num());
}
V3Number isEq(pinValuep, 1);
isEq.opEq(varNum, hierOptParamp->num());
return isEq.isNeqZero();
}
}
};
//######################################################################
// Remove parameters from cells and build new modules
class ParamProcessor final {
// NODE STATE - Local
// AstVar::user4() // int Global parameter number (for naming new module)
// // (0=not processed, 1=iterated, but no number,
// // 65+ parameter numbered)
// NODE STATE - Shared with ParamVisitor
// AstNodeModule::user5() // bool True if processed
// AstGenFor::user5() // bool True if processed
// AstVar::user5() // bool True if constant propagated
// AstCell::user5p() // string* Generate portion of hierarchical name
AstUser4InUse m_inuser4;
AstUser5InUse m_inuser5;
// User1/2/3 used by constant function simulations
// TYPES
// Note may have duplicate entries
using IfaceRefRefs = std::deque<std::pair<AstIfaceRefDType*, AstIfaceRefDType*>>;
// STATE
using CloneMap = std::unordered_map<const AstNode*, AstNode*>;
struct ModInfo {
AstNodeModule* m_modp; // Module with specified name
CloneMap m_cloneMap; // Map of old-varp -> new cloned varp
explicit ModInfo(AstNodeModule* modp)
: m_modp{modp} {}
};
std::map<const std::string, ModInfo> m_modNameMap; // Hash of created module flavors by name
std::map<const std::string, std::string>
m_longMap; // Hash of very long names to unique identity number
int m_longId = 0;
// All module names that are loaded from source code
// Generated modules by this visitor is not included
V3StringSet m_allModuleNames;
using ValueMapValue = std::pair<int, std::string>;
std::map<const V3Hash, ValueMapValue> m_valueMap; // Hash of node hash to (param value, name)
int m_nextValue = 1; // Next value to use in m_valueMap
AstNodeModule* m_modp = nullptr; // Current module being processed
// Database to get protect-lib wrapper that matches parameters in hierarchical Verilation
ParameterizedHierBlocks m_hierBlocks;
// Default parameter values key:parameter name, value:default value (can be nullptr)
using DefaultValueMap = std::map<std::string, AstConst*>;
// Default parameter values of hierarchical blocks
std::map<AstNodeModule*, DefaultValueMap> m_defaultParameterValues;
// METHODS
VL_DEBUG_FUNC; // Declare debug()
static void makeSmallNames(AstNodeModule* modp) {
std::vector<int> usedLetter;
usedLetter.resize(256);
// Pass 1, assign first letter to each gparam's name
for (AstNode* stmtp = modp->stmtsp(); stmtp; stmtp = stmtp->nextp()) {
if (AstVar* varp = VN_CAST(stmtp, Var)) {
if (varp->isGParam() || varp->isIfaceRef()) {
char ch = varp->name()[0];
ch = std::toupper(ch);
if (ch < 'A' || ch > 'Z') ch = 'Z';
varp->user4(usedLetter[static_cast<int>(ch)] * 256 + ch);
usedLetter[static_cast<int>(ch)]++;
}
} else if (AstParamTypeDType* typep = VN_CAST(stmtp, ParamTypeDType)) {
const char ch = 'T';
typep->user4(usedLetter[static_cast<int>(ch)] * 256 + ch);
usedLetter[static_cast<int>(ch)]++;
}
}
}
string paramSmallName(AstNodeModule* modp, AstNode* varp) {
if (varp->user4() <= 1) makeSmallNames(modp);
int index = varp->user4() / 256;
const char ch = varp->user4() & 255;
string st = cvtToStr(ch);
while (index) {
st += cvtToStr(char((index % 25) + 'A'));
index /= 26;
}
return st;
}
static string paramValueKey(const AstNode* nodep) {
if (const AstRefDType* const refp = VN_CAST_CONST(nodep, RefDType)) {
nodep = refp->skipRefp();
}
string key = nodep->name();
if (const AstIfaceRefDType* const ifrtp = VN_CAST_CONST(nodep, IfaceRefDType)) {
if (ifrtp->cellp() && ifrtp->cellp()->modp()) {
key = ifrtp->cellp()->modp()->name();
} else if (ifrtp->ifacep()) {
key = ifrtp->ifacep()->name();
} else {
nodep->v3fatalSrc("Can't parameterize interface without module name");
}
} else if (const AstNodeUOrStructDType* const dtypep
= VN_CAST_CONST(nodep, NodeUOrStructDType)) {
key += " ";
key += dtypep->verilogKwd();
key += " {";
for (const AstNode* memberp = dtypep->membersp(); memberp;
memberp = memberp->nextp()) {
key += paramValueKey(memberp);
key += ";";
}
key += "}";
} else if (const AstMemberDType* const dtypep = VN_CAST_CONST(nodep, MemberDType)) {
key += " ";
key += paramValueKey(dtypep->subDTypep());
} else if (const AstBasicDType* const dtypep = VN_CAST_CONST(nodep, BasicDType)) {
if (dtypep->isRanged()) {
key += "[" + cvtToStr(dtypep->left()) + ":" + cvtToStr(dtypep->right()) + "]";
}
}
return key;
}
string paramValueNumber(AstNode* nodep) {
// TODO: This parameter value number lookup via a constructed key string is not
// particularly robust for type parameters. We should really have a type
// equivalence predicate function.
const string key = paramValueKey(nodep);
V3Hash hash = V3Hasher::uncachedHash(nodep);
// Force hash collisions -- for testing only
if (VL_UNLIKELY(v3Global.opt.debugCollision())) hash = V3Hash();
int num;
const auto it = m_valueMap.find(hash);
if (it != m_valueMap.end() && it->second.second == key) {
num = it->second.first;
} else {
num = m_nextValue++;
m_valueMap[hash] = std::make_pair(num, key);
}
return string("z") + cvtToStr(num);
}
string moduleCalcName(AstNodeModule* srcModp, const string& longname) {
string newname = longname;
if (longname.length() > 30) {
const auto iter = m_longMap.find(longname);
if (iter != m_longMap.end()) {
newname = iter->second;
} else {
newname = srcModp->name();
// We use all upper case above, so lower here can't conflict
newname += "__pi" + cvtToStr(++m_longId);
m_longMap.emplace(longname, newname);
}
}
UINFO(4, "Name: " << srcModp->name() << "->" << longname << "->" << newname << endl);
return newname;
}
AstNodeDType* arraySubDTypep(AstNodeDType* nodep) {
// If an unpacked array, return the subDTypep under it
if (AstUnpackArrayDType* adtypep = VN_CAST(nodep, UnpackArrayDType)) {
return adtypep->subDTypep();
}
// We have not resolved parameter of the child yet, so still
// have BracketArrayDType's. We'll presume it'll end up as assignment
// compatible (or V3Width will complain).
if (AstBracketArrayDType* adtypep = VN_CAST(nodep, BracketArrayDType)) {
return adtypep->subDTypep();
}
return nullptr;
}
void collectPins(CloneMap* clonemapp, AstNodeModule* modp) {
// Grab all I/O so we can remap our pins later
for (AstNode* stmtp = modp->stmtsp(); stmtp; stmtp = stmtp->nextp()) {
if (AstVar* varp = VN_CAST(stmtp, Var)) {
if (varp->isIO() || varp->isGParam() || varp->isIfaceRef()) {
// Cloning saved a pointer to the new node for us, so just follow that link.
AstVar* oldvarp = varp->clonep();
// UINFO(8,"Clone list 0x"<<hex<<(uint32_t)oldvarp
// <<" -> 0x"<<(uint32_t)varp<<endl);
clonemapp->emplace(oldvarp, varp);
}
} else if (AstParamTypeDType* ptp = VN_CAST(stmtp, ParamTypeDType)) {
if (ptp->isGParam()) {
AstParamTypeDType* oldptp = ptp->clonep();
clonemapp->emplace(oldptp, ptp);
}
}
}
}
void relinkPins(const CloneMap* clonemapp, AstPin* startpinp) {
for (AstPin* pinp = startpinp; pinp; pinp = VN_CAST(pinp->nextp(), Pin)) {
if (pinp->modVarp()) {
// Find it in the clone structure
// UINFO(8,"Clone find 0x"<<hex<<(uint32_t)pinp->modVarp()<<endl);
const auto cloneiter = clonemapp->find(pinp->modVarp());
UASSERT_OBJ(cloneiter != clonemapp->end(), pinp,
"Couldn't find pin in clone list");
pinp->modVarp(VN_CAST(cloneiter->second, Var));
} else if (pinp->modPTypep()) {
const auto cloneiter = clonemapp->find(pinp->modPTypep());
UASSERT_OBJ(cloneiter != clonemapp->end(), pinp,
"Couldn't find pin in clone list");
pinp->modPTypep(VN_CAST(cloneiter->second, ParamTypeDType));
} else {
pinp->v3fatalSrc("Not linked?");
}
}
}
void relinkPinsByName(AstPin* startpinp, AstNodeModule* modp) {
std::map<const string, AstVar*> nameToPin;
for (AstNode* stmtp = modp->stmtsp(); stmtp; stmtp = stmtp->nextp()) {
if (AstVar* varp = VN_CAST(stmtp, Var)) {
if (varp->isIO() || varp->isGParam() || varp->isIfaceRef()) {
nameToPin.emplace(varp->name(), varp);
}
}
}
for (AstPin* pinp = startpinp; pinp; pinp = VN_CAST(pinp->nextp(), Pin)) {
if (AstVar* varp = pinp->modVarp()) {
const auto varIt = vlstd::as_const(nameToPin).find(varp->name());
UASSERT_OBJ(varIt != nameToPin.end(), varp,
"Not found in " << modp->prettyNameQ());
pinp->modVarp(varIt->second);
}
}
}
// Check if parameter setting during instantiation is simple enough for hierarchical verilation
void checkSupportedParam(AstNodeModule* modp, AstPin* pinp) const {
// InitArray and AstParamTypeDType are not supported because that can not be set via -G
// option.
if (pinp->modVarp()) {
bool supported = false;
if (AstConst* constp = VN_CAST(pinp->exprp(), Const)) {
supported = !constp->isOpaque();
}
if (!supported) {
pinp->v3error(AstNode::prettyNameQ(modp->origName())
<< " has hier_block metacomment, hierarchical verilation"
<< " supports only integer/floating point/string parameters");
}
} else if (VN_IS(pinp->modPTypep(), ParamTypeDType)) {
pinp->v3error(AstNode::prettyNameQ(modp->origName())
<< " has hier_block metacomment, but 'parameter type' is not supported");
}
}
bool moduleExists(const string& modName) const {
if (m_allModuleNames.find(modName) != m_allModuleNames.end()) return true;
if (m_modNameMap.find(modName) != m_modNameMap.end()) return true;
return false;
}
string parameterizedHierBlockName(AstNodeModule* modp, AstPin* paramPinsp) {
// Create a unique name in the following steps
// - Make a long name that includes all parameters, that appear
// in the alphabetical order.
// - Hash the long name to get valid Verilog symbol
UASSERT_OBJ(modp->hierBlock(), modp, "should be used for hierarchical block");
std::map<string, AstConst*> pins;
for (AstPin* pinp = paramPinsp; pinp; pinp = VN_CAST(pinp->nextp(), Pin)) {
checkSupportedParam(modp, pinp);
if (AstVar* varp = pinp->modVarp()) {
if (!pinp->exprp()) continue;
if (varp->isGParam()) {
AstConst* constp = VN_CAST(pinp->exprp(), Const);
pins.emplace(varp->name(), constp);
}
}
}
auto paramsIt = m_defaultParameterValues.find(modp);
if (paramsIt == m_defaultParameterValues.end()) { // Not cached yet, so check parameters
// Using map with key=string so that we can scan it in deterministic order
DefaultValueMap params;
for (AstNode* stmtp = modp->stmtsp(); stmtp; stmtp = stmtp->nextp()) {
if (AstVar* varp = VN_CAST(stmtp, Var)) {
if (varp->isGParam()) {
AstConst* constp = VN_CAST(varp->valuep(), Const);
// constp can be nullptr if the parameter is not used to instantiate sub
// module. varp->valuep() is not contified yet in the case.
// nullptr means that the parameter is using some default value.
params.emplace(varp->name(), constp);
}
}
}
paramsIt = m_defaultParameterValues.emplace(modp, std::move(params)).first;
}
if (paramsIt->second.empty()) return modp->name(); // modp has no parameter
string longname = modp->name();
for (auto&& defaultValue : paramsIt->second) {
const auto pinIt = pins.find(defaultValue.first);
AstConst* constp = pinIt == pins.end() ? defaultValue.second : pinIt->second;
// This longname is not valid as verilog symbol, but ok, because it will be hashed
longname += "_" + defaultValue.first + "=";
// constp can be nullptr
if (constp) longname += constp->num().ascii(false);
}
const auto iter = m_longMap.find(longname);
if (iter != m_longMap.end()) return iter->second; // Already calculated
VHashSha256 hash;
// Calculate hash using longname
// The hash is used as the module suffix to find a module name that is unique in the design
hash.insert(longname);
while (true) {
// Copy VHashSha256 just in case of hash collision
VHashSha256 hashStrGen = hash;
// Hex string must be a safe suffix for any symbol
const string hashStr = hashStrGen.digestHex();
for (string::size_type i = 1; i < hashStr.size(); ++i) {
string newName = modp->name();
// Don't use '__' not to be encoded when this module is loaded later by Verilator
if (newName.at(newName.size() - 1) != '_') newName += '_';
newName += hashStr.substr(0, i);
if (!moduleExists(newName)) {
m_longMap.emplace(longname, newName);
return newName;
}
}
// Hash collision. maybe just v3error is practically enough
hash.insert(V3Os::trueRandom(64));
}
}
void deepCloneModule(AstNodeModule* srcModp, AstNode* cellp, AstPin* paramsp,
const string& newname, const IfaceRefRefs& ifaceRefRefs) {
// Deep clone of new module
// Note all module internal variables will be re-linked to the new modules by clone
// However links outside the module (like on the upper cells) will not.
AstNodeModule* newmodp = srcModp->cloneTree(false);
newmodp->name(newname);
newmodp->user5(false); // We need to re-recurse this module once changed
newmodp->recursive(false);
newmodp->recursiveClone(false);
// Only the first generation of clone holds this property
newmodp->hierBlock(srcModp->hierBlock() && !srcModp->recursiveClone());
// Recursion may need level cleanups
if (newmodp->level() <= m_modp->level()) newmodp->level(m_modp->level() + 1);
if ((newmodp->level() - srcModp->level()) >= (v3Global.opt.moduleRecursionDepth() - 2)) {
cellp->v3error("Exceeded maximum --module-recursion-depth of "
<< v3Global.opt.moduleRecursionDepth());
}
// Keep tree sorted by level
AstNodeModule* insertp = srcModp;
while (VN_IS(insertp->nextp(), NodeModule)
&& VN_CAST(insertp->nextp(), NodeModule)->level() < newmodp->level()) {
insertp = VN_CAST(insertp->nextp(), NodeModule);
}
insertp->addNextHere(newmodp);
m_modNameMap.emplace(newmodp->name(), ModInfo(newmodp));
const auto iter = m_modNameMap.find(newname);
CloneMap* clonemapp = &(iter->second.m_cloneMap);
UINFO(4, " De-parameterize to new: " << newmodp << endl);
// Grab all I/O so we can remap our pins later
// Note we allow multiple users of a parameterized model,
// thus we need to stash this info.
collectPins(clonemapp, newmodp);
// Relink parameter vars to the new module
relinkPins(clonemapp, paramsp);
// Fix any interface references
for (auto it = ifaceRefRefs.cbegin(); it != ifaceRefRefs.cend(); ++it) {
AstIfaceRefDType* portIrefp = it->first;
AstIfaceRefDType* pinIrefp = it->second;
AstIfaceRefDType* cloneIrefp = portIrefp->clonep();
UINFO(8, " IfaceOld " << portIrefp << endl);
UINFO(8, " IfaceTo " << pinIrefp << endl);
UASSERT_OBJ(cloneIrefp, portIrefp, "parameter clone didn't hit AstIfaceRefDType");
UINFO(8, " IfaceClo " << cloneIrefp << endl);
cloneIrefp->ifacep(pinIrefp->ifaceViaCellp());
UINFO(8, " IfaceNew " << cloneIrefp << endl);
}
// Assign parameters to the constants specified
// DOES clone() so must be finished with module clonep() before here
for (AstPin* pinp = paramsp; pinp; pinp = VN_CAST(pinp->nextp(), Pin)) {
if (pinp->exprp()) {
if (AstVar* modvarp = pinp->modVarp()) {
AstNode* newp = pinp->exprp(); // Const or InitArray
AstConst* exprp = VN_CAST(newp, Const);
AstConst* origp = VN_CAST(modvarp->valuep(), Const);
const bool overridden
= !(origp && ParameterizedHierBlocks::areSame(exprp, origp));
// Remove any existing parameter
if (modvarp->valuep()) modvarp->valuep()->unlinkFrBack()->deleteTree();
// Set this parameter to value requested by cell
UINFO(9, " set param " << modvarp << " = " << newp << endl);
modvarp->valuep(newp->cloneTree(false));
modvarp->overriddenParam(overridden);
} else if (AstParamTypeDType* modptp = pinp->modPTypep()) {
AstNodeDType* dtypep = VN_CAST(pinp->exprp(), NodeDType);
UASSERT_OBJ(dtypep, pinp, "unlinked param dtype");
if (modptp->childDTypep()) modptp->childDTypep()->unlinkFrBack()->deleteTree();
// Set this parameter to value requested by cell
modptp->childDTypep(dtypep->cloneTree(false));
// Later V3LinkDot will convert the ParamDType to a Typedef
// Not done here as may be localparams, etc, that also need conversion
}
}
}
}
const ModInfo* moduleFindOrClone(AstNodeModule* srcModp, AstNode* cellp, AstPin* paramsp,
const string& newname, const IfaceRefRefs& ifaceRefRefs) {
// Already made this flavor?
auto it = m_modNameMap.find(newname);
if (it != m_modNameMap.end()) {
UINFO(4, " De-parameterize to old: " << it->second.m_modp << endl);
} else {
deepCloneModule(srcModp, cellp, paramsp, newname, ifaceRefRefs);
it = m_modNameMap.find(newname);
UASSERT(it != m_modNameMap.end(), "should find just-made module");
}
const ModInfo* modInfop = &(it->second);
return modInfop;
}
void cellPinCleanup(AstNode* nodep, AstPin* pinp, AstNodeModule* srcModp, string& longnamer,
bool& any_overridesr) {
if (!pinp->exprp()) return; // No-connect
if (AstVar* modvarp = pinp->modVarp()) {
if (!modvarp->isGParam()) {
pinp->v3error("Attempted parameter setting of non-parameter: Param "
<< pinp->prettyNameQ() << " of " << nodep->prettyNameQ());
} else if (VN_IS(pinp->exprp(), InitArray) && arraySubDTypep(modvarp->subDTypep())) {
// Array assigned to array
AstNode* exprp = pinp->exprp();
longnamer += "_" + paramSmallName(srcModp, modvarp) + paramValueNumber(exprp);
any_overridesr = true;
} else {
AstConst* exprp = VN_CAST(pinp->exprp(), Const);
AstConst* origp = VN_CAST(modvarp->valuep(), Const);
if (!exprp) {
// if (debug()) pinp->dumpTree(cout, "error:");
pinp->v3error("Can't convert defparam value to constant: Param "
<< pinp->prettyNameQ() << " of " << nodep->prettyNameQ());
pinp->exprp()->replaceWith(new AstConst(
pinp->fileline(), AstConst::WidthedValue(), modvarp->width(), 0));
} else if (origp && exprp->sameTree(origp)) {
// Setting parameter to its default value. Just ignore it.
// This prevents making additional modules, and makes coverage more
// obvious as it won't show up under a unique module page name.
} else if (exprp->num().isDouble() || exprp->num().isString()
|| exprp->num().isFourState() || exprp->num().width() != 32) {
longnamer
+= ("_" + paramSmallName(srcModp, modvarp) + paramValueNumber(exprp));
any_overridesr = true;
} else {
longnamer
+= ("_" + paramSmallName(srcModp, modvarp) + exprp->num().ascii(false));
any_overridesr = true;
}
}
} else if (AstParamTypeDType* modvarp = pinp->modPTypep()) {
AstNodeDType* exprp = VN_CAST(pinp->exprp(), NodeDType);
AstNodeDType* origp = modvarp->subDTypep();
if (!exprp) {
pinp->v3error("Parameter type pin value isn't a type: Param "
<< pinp->prettyNameQ() << " of " << nodep->prettyNameQ());
} else if (!origp) {
pinp->v3error("Parameter type variable isn't a type: Param "
<< modvarp->prettyNameQ());
} else {
UINFO(9, "Parameter type assignment expr=" << exprp << " to " << origp << endl);
if (exprp->sameTree(origp)) {
// Setting parameter to its default value. Just ignore it.
// This prevents making additional modules, and makes coverage more
// obvious as it won't show up under a unique module page name.
} else {
V3Const::constifyParamsEdit(exprp);
longnamer += "_" + paramSmallName(srcModp, modvarp) + paramValueNumber(exprp);
any_overridesr = true;
}
}
} else {
pinp->v3error("Parameter not found in sub-module: Param "
<< pinp->prettyNameQ() << " of " << nodep->prettyNameQ());
}
}
void cellInterfaceCleanup(AstCell* nodep, AstNodeModule* srcModp, string& longnamer,
bool& any_overridesr, IfaceRefRefs& ifaceRefRefs) {
for (AstPin* pinp = nodep->pinsp(); pinp; pinp = VN_CAST(pinp->nextp(), Pin)) {
AstVar* modvarp = pinp->modVarp();
if (modvarp->isIfaceRef()) {
AstIfaceRefDType* portIrefp = VN_CAST(modvarp->subDTypep(), IfaceRefDType);
if (!portIrefp && arraySubDTypep(modvarp->subDTypep())) {
portIrefp = VN_CAST(arraySubDTypep(modvarp->subDTypep()), IfaceRefDType);
}
AstIfaceRefDType* pinIrefp = nullptr;
AstNode* exprp = pinp->exprp();
AstVar* varp
= (exprp && VN_IS(exprp, VarRef)) ? VN_CAST(exprp, VarRef)->varp() : nullptr;
if (varp && varp->subDTypep() && VN_IS(varp->subDTypep(), IfaceRefDType)) {
pinIrefp = VN_CAST(varp->subDTypep(), IfaceRefDType);
} else if (varp && varp->subDTypep() && arraySubDTypep(varp->subDTypep())
&& VN_CAST(arraySubDTypep(varp->subDTypep()), IfaceRefDType)) {
pinIrefp = VN_CAST(arraySubDTypep(varp->subDTypep()), IfaceRefDType);
} else if (exprp && exprp->op1p() && VN_IS(exprp->op1p(), VarRef)
&& VN_CAST(exprp->op1p(), VarRef)->varp()
&& VN_CAST(exprp->op1p(), VarRef)->varp()->subDTypep()
&& arraySubDTypep(VN_CAST(exprp->op1p(), VarRef)->varp()->subDTypep())
&& VN_CAST(
arraySubDTypep(VN_CAST(exprp->op1p(), VarRef)->varp()->subDTypep()),
IfaceRefDType)) {
pinIrefp = VN_CAST(
arraySubDTypep(VN_CAST(exprp->op1p(), VarRef)->varp()->subDTypep()),
IfaceRefDType);
}
UINFO(9, " portIfaceRef " << portIrefp << endl);
if (!portIrefp) {
pinp->v3error("Interface port " << modvarp->prettyNameQ()
<< " is not an interface " << modvarp);
} else if (!pinIrefp) {
pinp->v3error("Interface port "
<< modvarp->prettyNameQ()
<< " is not connected to interface/modport pin expression");
} else {
UINFO(9, " pinIfaceRef " << pinIrefp << endl);
if (portIrefp->ifaceViaCellp() != pinIrefp->ifaceViaCellp()) {
UINFO(9, " IfaceRefDType needs reconnect " << pinIrefp << endl);
longnamer += ("_" + paramSmallName(srcModp, pinp->modVarp())
+ paramValueNumber(pinIrefp));
any_overridesr = true;
ifaceRefRefs.push_back(std::make_pair(portIrefp, pinIrefp));
if (portIrefp->ifacep() != pinIrefp->ifacep()
// Might be different only due to param cloning, so check names too
&& portIrefp->ifaceName() != pinIrefp->ifaceName()) {
pinp->v3error("Port " << pinp->prettyNameQ() << " expects "
<< AstNode::prettyNameQ(portIrefp->ifaceName())
<< " interface but pin connects "
<< AstNode::prettyNameQ(pinIrefp->ifaceName())
<< " interface");
}
}
}
}
}
}
public:
void cellDeparam(AstCell* nodep, AstNodeModule* modp, const string& hierName) {
m_modp = modp;
// Cell: Check for parameters in the instantiation.
// We always run this, even if no parameters, as need to look for interfaces,
// and remove any recursive references
UINFO(4, "De-parameterize: " << nodep << endl);
// Create new module name with _'s between the constants
if (debug() >= 10) nodep->dumpTree(cout, "-cell: ");
// Evaluate all module constants
V3Const::constifyParamsEdit(nodep);
AstNodeModule* srcModp = nodep->modp();
srcModp->hierName(hierName + "." + nodep->name());
// Make sure constification worked
// Must be a separate loop, as constant conversion may have changed some pointers.
// if (debug()) nodep->dumpTree(cout, "-cel2: ");
string longname = srcModp->name() + "_";
bool any_overrides = false;
// Must always clone __Vrcm (recursive modules)
if (nodep->recursive()) any_overrides = true;
if (debug() > 8) nodep->paramsp()->dumpTreeAndNext(cout, "-cellparams: ");
if (srcModp->hierBlock()) {
longname = parameterizedHierBlockName(srcModp, nodep->paramsp());
any_overrides = longname != srcModp->name();
} else {
for (AstPin* pinp = nodep->paramsp(); pinp; pinp = VN_CAST(pinp->nextp(), Pin)) {
cellPinCleanup(nodep, pinp, srcModp, longname /*ref*/, any_overrides /*ref*/);
}
}
IfaceRefRefs ifaceRefRefs;
cellInterfaceCleanup(nodep, srcModp, longname /*ref*/, any_overrides /*ref*/,
ifaceRefRefs /*ref*/);
if (!any_overrides) {
UINFO(8, "Cell parameters all match original values, skipping expansion.\n");
} else if (AstNodeModule* paramedModp
= m_hierBlocks.findByParams(srcModp->name(), nodep->paramsp(), m_modp)) {
nodep->modp(paramedModp);
nodep->modName(paramedModp->name());
paramedModp->dead(false);
// We need to relink the pins to the new module
relinkPinsByName(nodep->pinsp(), paramedModp);
} else {
const string newname
= srcModp->hierBlock() ? longname : moduleCalcName(srcModp, longname);
const ModInfo* modInfop
= moduleFindOrClone(srcModp, nodep, nodep->paramsp(), newname, ifaceRefRefs);
// Have child use this module instead.
nodep->modp(modInfop->m_modp);
nodep->modName(newname);
// We need to relink the pins to the new module
relinkPinsByName(nodep->pinsp(), modInfop->m_modp);
UINFO(8, " Done with " << modInfop->m_modp << endl);
}
nodep->recursive(false);
// Delete the parameters from the cell; they're not relevant any longer.
if (nodep->paramsp()) nodep->paramsp()->unlinkFrBackWithNext()->deleteTree();
UINFO(8, " Done with " << nodep << endl);
// if (debug() >= 10)
// v3Global.rootp()->dumpTreeFile(v3Global.debugFilename("param-out.tree"));
}
// CONSTRUCTORS
explicit ParamProcessor(AstNetlist* nodep)
: m_hierBlocks{v3Global.opt.hierBlocks(), nodep} {
for (AstNodeModule* modp = nodep->modulesp(); modp;
modp = VN_CAST(modp->nextp(), NodeModule)) {
m_allModuleNames.insert(modp->name());
}
}
~ParamProcessor() = default;
VL_UNCOPYABLE(ParamProcessor);
};
//######################################################################
// Process parameter visitor
class ParamVisitor final : public AstNVisitor {
// STATE
ParamProcessor m_processor; // De-parameterize a cell, build modules
UnrollStateful m_unroller; // Loop unroller
AstNodeModule* m_modp = nullptr; // Current module being processed
string m_generateHierName; // Generate portion of hierarchy name
string m_unlinkedTxt; // Text for AstUnlinkedRef
std::deque<AstCell*> m_cellps; // Cells left to process (in this module)
std::multimap<int, AstNodeModule*> m_todoModps; // Modules left to process
// METHODS
VL_DEBUG_FUNC; // Declare debug()
void visitCellDeparam(AstCell* nodep, const string& hierName) {
// Cell: Check for parameters in the instantiation.
iterateChildren(nodep);
UASSERT_OBJ(nodep->modp(), nodep, "Not linked?");
m_processor.cellDeparam(nodep, m_modp, hierName);
// Remember to process the child module at the end of the module
m_todoModps.emplace(nodep->modp()->level(), nodep->modp());
}
void visitModules() {
// Loop on all modules left to process
// Hitting a cell adds to the appropriate level of this level-sorted list,
// so since cells originally exist top->bottom we process in top->bottom order too.
while (!m_todoModps.empty()) {
const auto itm = m_todoModps.cbegin();
AstNodeModule* nodep = itm->second;
m_todoModps.erase(itm);
if (!nodep->user5SetOnce()) { // Process once; note clone() must clear so we do it
// again
m_modp = nodep;
UINFO(4, " MOD " << nodep << endl);
if (m_modp->hierName().empty()) m_modp->hierName(m_modp->origName());
iterateChildren(nodep);
// Note above iterate may add to m_todoModps
//
// Process interface cells, then non-interface which may ref an interface cell
for (int nonIf = 0; nonIf < 2; ++nonIf) {
for (AstCell* cellp : m_cellps) {
if ((nonIf == 0 && VN_IS(cellp->modp(), Iface))
|| (nonIf == 1 && !VN_IS(cellp->modp(), Iface))) {
string fullName(m_modp->hierName());
if (const string* genHierNamep = (string*)cellp->user5p()) {
fullName += *genHierNamep;
cellp->user5p(nullptr);
VL_DO_DANGLING(delete genHierNamep, genHierNamep);
}
VL_DO_DANGLING(visitCellDeparam(cellp, fullName), cellp);
}
}
}
m_cellps.clear();
m_modp = nullptr;
UINFO(4, " MOD-done\n");
}
}
}
// VISITORS
virtual void visit(AstNodeModule* nodep) override {
if (nodep->dead()) {
UINFO(4, " MOD-dead. " << nodep << endl); // Marked by LinkDot
return;
} else if (nodep->recursiveClone()) {
// Fake, made for recursive elimination
UINFO(4, " MOD-recursive-dead. " << nodep << endl);
nodep->dead(true); // So Dead checks won't count references to it
return;
}
//
if (!nodep->dead() && VN_IS(nodep, Class)) {
for (AstNode* stmtp = nodep->stmtsp(); stmtp; stmtp = stmtp->nextp()) {
if (AstVar* varp = VN_CAST(stmtp, Var)) {
if (varp->isParam()) {
varp->v3warn(E_UNSUPPORTED, "Unsupported: class parameters");
}
}
}
}
//
if (m_modp) {
UINFO(4, " MOD-under-MOD. " << nodep << endl);
iterateChildren(nodep);
} else if (nodep->level() <= 2 // Haven't added top yet, so level 2 is the top
|| VN_IS(nodep, Class) // Nor moved classes
|| VN_IS(nodep, Package)) { // Likewise haven't done wrapTopPackages yet
// Add request to END of modules left to process
m_todoModps.emplace(nodep->level(), nodep);
m_generateHierName = "";
visitModules();
} else if (nodep->user5()) {
UINFO(4, " MOD-done " << nodep << endl); // Already did it
} else {
// Should have been done by now, if not dead
UINFO(4, " MOD-dead? " << nodep << endl);
}
}
virtual void visit(AstCell* nodep) override {
// Must do ifaces first, so push to list and do in proper order
string* genHierNamep = new string(m_generateHierName);
nodep->user5p(genHierNamep);
m_cellps.push_back(nodep);
}
virtual void visit(AstClassRefDType* nodep) override {
if (nodep->paramsp()) {
nodep->paramsp()->v3warn(E_UNSUPPORTED, "Unsupported: parameterized classes");
pushDeletep(nodep->paramsp()->unlinkFrBackWithNext());
}
iterateChildren(nodep);
}
// Make sure all parameters are constantified
virtual void visit(AstVar* nodep) override {
if (nodep->user5SetOnce()) return; // Process once
iterateChildren(nodep);
if (nodep->isParam()) {
if (!nodep->valuep()) {
nodep->v3error("Parameter without initial value is never given value"
<< " (IEEE 1800-2017 6.20.1): " << nodep->prettyNameQ());
} else {
V3Const::constifyParamsEdit(nodep); // The variable, not just the var->init()
}
}
}
// Make sure varrefs cause vars to constify before things above
virtual void visit(AstVarRef* nodep) override {
// Might jump across functions, so beware if ever add a m_funcp
if (nodep->varp()) iterate(nodep->varp());
}
bool ifaceParamReplace(AstVarXRef* nodep, AstNode* candp) {
for (; candp; candp = candp->nextp()) {
if (nodep->name() == candp->name()) {
if (AstVar* varp = VN_CAST(candp, Var)) {
UINFO(9, "Found interface parameter: " << varp << endl);
nodep->varp(varp);
return true;
} else if (AstPin* pinp = VN_CAST(candp, Pin)) {
UINFO(9, "Found interface parameter: " << pinp << endl);
UASSERT_OBJ(pinp->exprp(), pinp, "Interface parameter pin missing expression");
VL_DO_DANGLING(nodep->replaceWith(pinp->exprp()->cloneTree(false)), nodep);
return true;
}
}
}
return false;
}
virtual void visit(AstVarXRef* nodep) override {
// Check to see if the scope is just an interface because interfaces are special
const string dotted = nodep->dotted();
if (!dotted.empty() && nodep->varp() && nodep->varp()->isParam()) {
AstNode* backp = nodep;
while ((backp = backp->backp())) {
if (VN_IS(backp, NodeModule)) {
UINFO(9, "Hit module boundary, done looking for interface" << endl);
break;
}
if (VN_IS(backp, Var) && VN_CAST(backp, Var)->isIfaceRef()
&& VN_CAST(backp, Var)->childDTypep()
&& (VN_CAST(VN_CAST(backp, Var)->childDTypep(), IfaceRefDType)
|| (VN_CAST(VN_CAST(backp, Var)->childDTypep(), UnpackArrayDType)
&& VN_CAST(VN_CAST(backp, Var)->childDTypep()->getChildDTypep(),
IfaceRefDType)))) {
AstIfaceRefDType* ifacerefp
= VN_CAST(VN_CAST(backp, Var)->childDTypep(), IfaceRefDType);
if (!ifacerefp) {
ifacerefp = VN_CAST(VN_CAST(backp, Var)->childDTypep()->getChildDTypep(),
IfaceRefDType);
}
// Interfaces passed in on the port map have ifaces
if (AstIface* ifacep = ifacerefp->ifacep()) {
if (dotted == backp->name()) {
UINFO(9, "Iface matching scope: " << ifacep << endl);
if (ifaceParamReplace(nodep, ifacep->stmtsp())) { //
return;
}<|fim▁hole|> }
}
// Interfaces declared in this module have cells
else if (AstCell* cellp = ifacerefp->cellp()) {
if (dotted == cellp->name()) {
UINFO(9, "Iface matching scope: " << cellp << endl);
if (ifaceParamReplace(nodep, cellp->paramsp())) { //
return;
}
}
}
}
}
}
nodep->varp(nullptr); // Needs relink, as may remove pointed-to var
}
virtual void visit(AstUnlinkedRef* nodep) override {
AstVarXRef* varxrefp = VN_CAST(nodep->op1p(), VarXRef);
AstNodeFTaskRef* taskrefp = VN_CAST(nodep->op1p(), NodeFTaskRef);
if (varxrefp) {
m_unlinkedTxt = varxrefp->dotted();
} else if (taskrefp) {
m_unlinkedTxt = taskrefp->dotted();
} else {
nodep->v3fatalSrc("Unexpected AstUnlinkedRef node");
return;
}
iterate(nodep->cellrefp());
if (varxrefp) {
varxrefp->dotted(m_unlinkedTxt);
} else {
taskrefp->dotted(m_unlinkedTxt);
}
nodep->replaceWith(nodep->op1p()->unlinkFrBack());
VL_DO_DANGLING(pushDeletep(nodep), nodep);
}
virtual void visit(AstCellArrayRef* nodep) override {
V3Const::constifyParamsEdit(nodep->selp());
if (const AstConst* constp = VN_CAST(nodep->selp(), Const)) {
const string index = AstNode::encodeNumber(constp->toSInt());
const string replacestr = nodep->name() + "__BRA__??__KET__";
const size_t pos = m_unlinkedTxt.find(replacestr);
UASSERT_OBJ(pos != string::npos, nodep,
"Could not find array index in unlinked text: '"
<< m_unlinkedTxt << "' for node: " << nodep);
m_unlinkedTxt.replace(pos, replacestr.length(),
nodep->name() + "__BRA__" + index + "__KET__");
} else {
nodep->v3error("Could not expand constant selection inside dotted reference: "
<< nodep->selp()->prettyNameQ());
return;
}
}
// Generate Statements
virtual void visit(AstGenIf* nodep) override {
UINFO(9, " GENIF " << nodep << endl);
iterateAndNextNull(nodep->condp());
// We suppress errors when widthing params since short-circuiting in
// the conditional evaluation may mean these error can never occur. We
// then make sure that short-circuiting is used by constifyParamsEdit.
V3Width::widthGenerateParamsEdit(nodep); // Param typed widthing will
// NOT recurse the body.
V3Const::constifyGenerateParamsEdit(nodep->condp()); // condp may change
if (const AstConst* constp = VN_CAST(nodep->condp(), Const)) {
AstNode* keepp = (constp->isZero() ? nodep->elsesp() : nodep->ifsp());
if (keepp) {
keepp->unlinkFrBackWithNext();
nodep->replaceWith(keepp);
} else {
nodep->unlinkFrBack();
}
VL_DO_DANGLING(nodep->deleteTree(), nodep);
// Normal edit rules will now recurse the replacement
} else {
nodep->condp()->v3error("Generate If condition must evaluate to constant");
}
}
//! Parameter substitution for generated for loops.
//! @todo Unlike generated IF, we don't have to worry about short-circuiting the conditional
//! expression, since this is currently restricted to simple comparisons. If we ever do
//! move to more generic constant expressions, such code will be needed here.
virtual void visit(AstBegin* nodep) override {
if (nodep->genforp()) {
AstGenFor* forp = VN_CAST(nodep->genforp(), GenFor);
UASSERT_OBJ(forp, nodep, "Non-GENFOR under generate-for BEGIN");
// We should have a GENFOR under here. We will be replacing the begin,
// so process here rather than at the generate to avoid iteration problems
UINFO(9, " BEGIN " << nodep << endl);
UINFO(9, " GENFOR " << forp << endl);
V3Width::widthParamsEdit(forp); // Param typed widthing will NOT recurse the body
// Outer wrapper around generate used to hold genvar, and to ensure genvar
// doesn't conflict in V3LinkDot resolution with other genvars
// Now though we need to change BEGIN("zzz", GENFOR(...)) to
// a BEGIN("zzz__BRA__{loop#}__KET__")
const string beginName = nodep->name();
// Leave the original Begin, as need a container for the (possible) GENVAR
// Note V3Unroll will replace some AstVarRef's to the loop variable with constants
// Don't remove any deleted nodes in m_unroller until whole process finishes,
// (are held in m_unroller), as some AstXRefs may still point to old nodes.
VL_DO_DANGLING(m_unroller.unrollGen(forp, beginName), forp);
// Blocks were constructed under the special begin, move them up
// Note forp is null, so grab statements again
if (AstNode* stmtsp = nodep->genforp()) {
stmtsp->unlinkFrBackWithNext();
nodep->addNextHere(stmtsp);
// Note this clears nodep->genforp(), so begin is no longer special
}
} else {
VL_RESTORER(m_generateHierName);
m_generateHierName += "." + nodep->prettyName();
iterateChildren(nodep);
}
}
virtual void visit(AstGenFor* nodep) override { // LCOV_EXCL_LINE
nodep->v3fatalSrc("GENFOR should have been wrapped in BEGIN");
}
virtual void visit(AstGenCase* nodep) override {
UINFO(9, " GENCASE " << nodep << endl);
AstNode* keepp = nullptr;
iterateAndNextNull(nodep->exprp());
V3Case::caseLint(nodep);
V3Width::widthParamsEdit(nodep); // Param typed widthing will NOT recurse the body,
// don't trigger errors yet.
V3Const::constifyParamsEdit(nodep->exprp()); // exprp may change
AstConst* exprp = VN_CAST(nodep->exprp(), Const);
// Constify
for (AstCaseItem* itemp = nodep->itemsp(); itemp;
itemp = VN_CAST(itemp->nextp(), CaseItem)) {
for (AstNode* ep = itemp->condsp(); ep;) {
AstNode* nextp = ep->nextp(); // May edit list
iterateAndNextNull(ep);
VL_DO_DANGLING(V3Const::constifyParamsEdit(ep), ep); // ep may change
ep = nextp;
}
}
// Item match
for (AstCaseItem* itemp = nodep->itemsp(); itemp;
itemp = VN_CAST(itemp->nextp(), CaseItem)) {
if (!itemp->isDefault()) {
for (AstNode* ep = itemp->condsp(); ep; ep = ep->nextp()) {
if (const AstConst* ccondp = VN_CAST(ep, Const)) {
V3Number match(nodep, 1);
match.opEq(ccondp->num(), exprp->num());
if (!keepp && match.isNeqZero()) keepp = itemp->bodysp();
} else {
itemp->v3error("Generate Case item does not evaluate to constant");
}
}
}
}
// Else default match
for (AstCaseItem* itemp = nodep->itemsp(); itemp;
itemp = VN_CAST(itemp->nextp(), CaseItem)) {
if (itemp->isDefault()) {
if (!keepp) keepp = itemp->bodysp();
}
}
// Replace
if (keepp) {
keepp->unlinkFrBackWithNext();
nodep->replaceWith(keepp);
} else {
nodep->unlinkFrBack();
}
VL_DO_DANGLING(nodep->deleteTree(), nodep);
}
virtual void visit(AstNode* nodep) override { iterateChildren(nodep); }
public:
// CONSTRUCTORS
explicit ParamVisitor(AstNetlist* nodep)
: m_processor{nodep} {
// Relies on modules already being in top-down-order
iterate(nodep);
}
virtual ~ParamVisitor() override = default;
VL_UNCOPYABLE(ParamVisitor);
};
//######################################################################
// Param class functions
void V3Param::param(AstNetlist* rootp) {
UINFO(2, __FUNCTION__ << ": " << endl);
{ ParamVisitor visitor{rootp}; } // Destruct before checking
V3Global::dumpCheckGlobalTree("param", 0, v3Global.opt.dumpTreeLevel(__FILE__) >= 6);
}<|fim▁end|> | |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import Permission
from django.contrib.auth.forms import PasswordResetForm
from .models import MyUser
from forms import CustomUserRegistrationForm
class PermissionAdmin(admin.ModelAdmin):
search_fields = ['name', 'codename']
class CustomUserAdmin(UserAdmin):
add_form = CustomUserRegistrationForm
list_display = ['email', 'first_name', 'last_name', 'is_active', 'confirmed', 'osf_id']
fieldsets = (
(None, {'fields': ('email', 'password',)}),
('Personal info', {'fields': ('first_name', 'last_name', 'email', 'date_joined', 'last_login', 'osf_id')}),
('Permissions', {'fields': ('is_active', 'is_staff', 'is_superuser', 'groups', 'user_permissions',)}),
)
add_fieldsets = (
(None, {'fields':<|fim▁hole|> actions = ['send_email_invitation']
# TODO - include alternative messages for warning/failure
def send_email_invitation(self, request, queryset):
for user in queryset:
reset_form = PasswordResetForm({'email': user.email}, request.POST)
assert reset_form.is_valid()
reset_form.save(
#subject_template_name='templates/emails/account_creation_subject.txt',
#email_template_name='templates/emails/invitation_email.html',
request=request
)
self.message_user(request, 'Email invitation successfully sent')
send_email_invitation.short_description = 'Send email invitation to selected users'
def save_model(self, request, obj, form, change):
if change:
pass
else:
obj.is_active = False
obj.save()
admin.site.register(MyUser, CustomUserAdmin)
admin.site.register(Permission, PermissionAdmin)<|fim▁end|> | ('email', 'first_name', 'last_name', 'password1', 'password2'),
}),)
search_fields = ('email', 'first_name', 'last_name',)
ordering = ('last_name', 'first_name',) |
<|file_name|>orchid.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Data preprocessing for ORCHID corpus
"""
from typing import List, Tuple
# defined strings for special characters,
# from Table 4 in ORCHID paper
CHAR_TO_ESCAPE = {
" ": "<space>",
"+": "<plus>",
"-": "<minus>",
"=": "<equal>",
",": "<comma>",
"$": "<dollar>",
".": "<full_stop>",
"(": "<left_parenthesis>",
")": "<right_parenthesis>",
'"': "<quotation>",
"@": "<at_mark>",
"&": "<ampersand>",
"{": "<left_curly_bracket>",
"^": "<circumflex_accent>",
"?": "<question_mark>",
"<": "<less_than>",
">": "<greater_than>",
"!": "<exclamation>",
"’": "<apostrophe>",
":": "<colon>",
"*": "<asterisk>",
";": "<semi_colon>",
"/": "<slash>",
}
ESCAPE_TO_CHAR = dict((v, k) for k, v in CHAR_TO_ESCAPE.items())
# map from ORCHID POS tag to Universal POS tag
# from Korakot Chaovavanich
TO_UD = {
"": "",
# NOUN
"NOUN": "NOUN",
"NCMN": "NOUN",
"NTTL": "NOUN",
"CNIT": "NOUN",
"CLTV": "NOUN",
"CMTR": "NOUN",
"CFQC": "NOUN",
"CVBL": "NOUN",
# VERB
"VACT": "VERB",
"VSTA": "VERB",
# PROPN
"PROPN": "PROPN",
"NPRP": "PROPN",
# ADJ
"ADJ": "ADJ",
"NONM": "ADJ",
"VATT": "ADJ",
"DONM": "ADJ",
# ADV
"ADV": "ADV",
"ADVN": "ADV",
"ADVI": "ADV",
"ADVP": "ADV",
"ADVS": "ADV",
# INT
"INT": "INTJ",
# PRON
"PRON": "PRON",
"PPRS": "PRON",
"PDMN": "PRON",
"PNTR": "PRON",
# DET
"DET": "DET",
"DDAN": "DET",
"DDAC": "DET",
"DDBQ": "DET",
"DDAQ": "DET",
"DIAC": "DET",
"DIBQ": "DET",
"DIAQ": "DET",
# NUM
"NUM": "NUM",
"NCNM": "NUM",
"NLBL": "NUM",
"DCNM": "NUM",
# AUX
"AUX": "AUX",
"XVBM": "AUX",
"XVAM": "AUX",
"XVMM": "AUX",
"XVBB": "AUX",
"XVAE": "AUX",
# ADP
"ADP": "ADP",
"RPRE": "ADP",
# CCONJ
"CCONJ": "CCONJ",
"JCRG": "CCONJ",
# SCONJ
"SCONJ": "SCONJ",
"PREL": "SCONJ",
"JSBR": "SCONJ",
"JCMP": "SCONJ",
# PART
"PART": "PART",
"FIXN": "PART",
"FIXV": "PART",
"EAFF": "PART",
"EITT": "PART",
"AITT": "PART",
"NEG": "PART",
# PUNCT
"PUNCT": "PUNCT",
"PUNC": "PUNCT",
}
def ud_exception(w: str, tag: str) -> str:
if w == "การ" or w == "ความ":
return "NOUN"
return tag
def pre_process(words: List[str]) -> List[str]:
"""
Convert signs and symbols with their defined strings.
This function is to be used as a preprocessing step,
before the actual POS tagging.
"""
keys = CHAR_TO_ESCAPE.keys()
words = [CHAR_TO_ESCAPE[word] if word in keys else word for word in words]
return words
def post_process(
word_tags: List[Tuple[str, str]], to_ud: bool = False
) -> List[Tuple[str, str]]:
"""
Convert defined strings back to corresponding signs and symbols.
This function is to be used as a post-processing step,
after the actual POS tagging.
"""
keys = ESCAPE_TO_CHAR.keys()
if not to_ud:
word_tags = [
(ESCAPE_TO_CHAR[word], tag) if word in keys else (word, tag)
for word, tag in word_tags
]
else:
word_tags = [
(ESCAPE_TO_CHAR[word], ud_exception(word, TO_UD[tag]))
if word in keys<|fim▁hole|> else (word, ud_exception(word, TO_UD[tag]))
for word, tag in word_tags
]
return word_tags<|fim▁end|> | |
<|file_name|>test_pd_client.rs<|end_file_name|><|fim▁begin|>// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.
use grpcio::EnvBuilder;
use kvproto::metapb::*;
use pd_client::{PdClient, RegionInfo, RegionStat, RpcClient};
use security::{SecurityConfig, SecurityManager};
use test_pd::{mocker::*, util::*, Server as MockServer};
use tikv_util::config::ReadableDuration;
use std::sync::{mpsc, Arc};
use std::thread;
use std::time::Duration;
fn new_test_server_and_client(
update_interval: ReadableDuration,
) -> (MockServer<Service>, RpcClient) {
let server = MockServer::new(1);
let eps = server.bind_addrs();
let client = new_client_with_update_interval(eps, None, update_interval);
(server, client)
}
macro_rules! request {
($client: ident => block_on($func: tt($($arg: expr),*))) => {
(stringify!($func), {
let client = $client.clone();
Box::new(move || {
let _ = futures::executor::block_on(client.$func($($arg),*));
})
})
};
($client: ident => $func: tt($($arg: expr),*)) => {
(stringify!($func), {
let client = $client.clone();
Box::new(move || {
let _ = client.$func($($arg),*);
})
})
};
}
#[test]
fn test_pd_client_deadlock() {
let (_server, client) = new_test_server_and_client(ReadableDuration::millis(100));
let client = Arc::new(client);
let pd_client_reconnect_fp = "pd_client_reconnect";
// It contains all interfaces of PdClient.
let test_funcs: Vec<(_, Box<dyn FnOnce() + Send>)> = vec![
request!(client => reconnect()),
request!(client => get_cluster_id()),
request!(client => bootstrap_cluster(Store::default(), Region::default())),
request!(client => is_cluster_bootstrapped()),
request!(client => alloc_id()),
request!(client => put_store(Store::default())),
request!(client => get_store(0)),
request!(client => get_all_stores(false)),
request!(client => get_cluster_config()),
request!(client => get_region(b"")),
request!(client => get_region_info(b"")),
request!(client => block_on(get_region_async(b""))),
request!(client => block_on(get_region_info_async(b""))),
request!(client => block_on(get_region_by_id(0))),
request!(client => block_on(region_heartbeat(0, Region::default(), Peer::default(), RegionStat::default(), None))),
request!(client => block_on(ask_split(Region::default()))),
request!(client => block_on(ask_batch_split(Region::default(), 1))),
request!(client => block_on(store_heartbeat(Default::default()))),
request!(client => block_on(report_batch_split(vec![]))),
request!(client => scatter_region(RegionInfo::new(Region::default(), None))),
request!(client => block_on(get_gc_safe_point())),
request!(client => block_on(get_store_stats_async(0))),
request!(client => get_operator(0)),
request!(client => block_on(get_tso())),
];
for (name, func) in test_funcs {
fail::cfg(pd_client_reconnect_fp, "pause").unwrap();
// Wait for the PD client thread blocking on the fail point.
// The GLOBAL_RECONNECT_INTERVAL is 0.1s so sleeps 0.2s here.
thread::sleep(Duration::from_millis(200));
let (tx, rx) = mpsc::channel();
let handle = thread::spawn(move || {
func();
tx.send(()).unwrap();
});
// Only allow to reconnect once for a func.
client.handle_reconnect(move || {
fail::cfg(pd_client_reconnect_fp, "return").unwrap();
});
// Remove the fail point to let the PD client thread go on.
fail::remove(pd_client_reconnect_fp);
let timeout = Duration::from_millis(500);
if rx.recv_timeout(timeout).is_err() {
panic!("PdClient::{}() hangs", name);
}
handle.join().unwrap();
}
drop(client);
fail::remove(pd_client_reconnect_fp);
}
// Updating pd leader may be slow, we need to make sure it does not block other
// RPC in the same gRPC Environment.
#[test]
fn test_slow_periodical_update() {
let pd_client_reconnect_fp = "pd_client_reconnect";
let server = MockServer::new(1);
let eps = server.bind_addrs();
let mut cfg = new_config(eps);
let env = Arc::new(EnvBuilder::new().cq_count(1).build());
let mgr = Arc::new(SecurityManager::new(&SecurityConfig::default()).unwrap());
// client1 updates leader frequently (100ms).
cfg.update_interval = ReadableDuration(Duration::from_millis(100));
let _client1 = RpcClient::new(&cfg, Some(env.clone()), mgr.clone()).unwrap();
// client2 never updates leader in the test.<|fim▁hole|> let client2 = RpcClient::new(&cfg, Some(env), mgr).unwrap();
fail::cfg(pd_client_reconnect_fp, "pause").unwrap();
// Wait for the PD client thread blocking on the fail point.
// The GLOBAL_RECONNECT_INTERVAL is 0.1s so sleeps 0.2s here.
thread::sleep(Duration::from_millis(200));
let (tx, rx) = mpsc::channel();
let handle = thread::spawn(move || {
client2.alloc_id().unwrap();
tx.send(()).unwrap();
});
let timeout = Duration::from_millis(500);
if rx.recv_timeout(timeout).is_err() {
panic!("pd client2 is blocked");
}
// Clean up the fail point.
fail::remove(pd_client_reconnect_fp);
handle.join().unwrap();
}
// Reconnection will be speed limited.
#[test]
fn test_reconnect_limit() {
let pd_client_reconnect_fp = "pd_client_reconnect";
let (_server, client) = new_test_server_and_client(ReadableDuration::secs(100));
// The GLOBAL_RECONNECT_INTERVAL is 0.1s so sleeps 0.2s here.
thread::sleep(Duration::from_millis(200));
// The first reconnection will succeed, and the last_update will not be updated.
fail::cfg(pd_client_reconnect_fp, "return").unwrap();
client.reconnect().unwrap();
// The subsequent reconnection will be cancelled.
for _ in 0..10 {
let ret = client.reconnect();
assert!(format!("{:?}", ret.unwrap_err()).contains("cancel reconnection"));
}
fail::remove(pd_client_reconnect_fp);
}<|fim▁end|> | cfg.update_interval = ReadableDuration(Duration::from_secs(100)); |
<|file_name|>icon.py<|end_file_name|><|fim▁begin|>import cStringIO
import zlib
import wx
#----------------------------------------------------------------------
def getMailData():
return zlib.decompress(
"x\xda\x01M\x01\xb2\xfe\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00 \x00\
\x00\x00 \x08\x06\x00\x00\x00szz\xf4\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\
\x08d\x88\x00\x00\x01\x04IDATX\x85\xed\x941\x0e\x82@\x10E\x9f\xc6`,\x88\xad\
\x8d\x8d\x89r\x02B\xc1\t\xbc\x94\x857\xf04\x9e\xc0C\x00\x95\xb1\xb1\xa52\xda\
h\xc1N\xe1\xc8f5j\x9cD^Ev\x98\x81\xffv\x01::\xfe\x9d^\x91e\xd7\xb6\xc2d\xb9\
\x04`\xb8X\xbc\xf5\x80sY\x02p\xdcn[\xeb\xfd\xb7\xa6\x7f\x80\x81\xaf o<O\xd3f\
\xc1\x19y\x1a\xd7\xbf\xf7$\x17\xec\x19\x90\xbd?\x15\x05\x00\xd5z\r\xc0\\n\
\x08\x99p\x89\xa5o<\x9b\x010J\x12\xe0\xf1,\xd83\x10\xafV\xcd\x85K \x04M\x04\
\x92\xcb\\\xfb\x06\x84\xa7M\xa8u_r\x1fv\r\x08\xb1\xfc\x07\x14\x952\xf3\x90\
\xdc\xd3\xa71l\xe0p\x00\xe0R\xd7@8\x91N.}\x91\x9b\xc3t\xda\xdag\xd0\x80$\xdf\
\xed\x00\x88\xf2\xbcYw\tb\xf9\xfe\xd5\x19\xd0\xa7=\xf2\xcdQ\xd83\xe0K\xae\t}\
\xdf\xd2'sd\xae\xc6\x9e\x81P\xf2\x97Q&\xd8l\xee\xca\xf6\x0c\xf8\xf6\xea[\xfc\
\xdc@G\xc7\rv\x18V\xd3#+\xef\x8c\x00\x00\x00\x00IEND\xaeB`\x82\xb38\x8e\xb0"\
)
<|fim▁hole|>def getMailBitmap():
return wx.BitmapFromImage(getMailImage())
def getMailImage():
stream = cStringIO.StringIO(getMailData())
return wx.ImageFromStream(stream)
def getMailIcon():
icon = wx.EmptyIcon()
icon.CopyFromBitmap(getMailBitmap())
return icon
#----------------------------------------------------------------------
def getNoMailData():
return zlib.decompress(
'x\xda\x01G\x04\xb8\xfb\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00 \x00\
\x00\x00 \x08\x06\x00\x00\x00szz\xf4\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\
\x08d\x88\x00\x00\x03\xfeIDATX\x85\xed\x97[o\xdb8\x10F\x0fu\xa1$\xeb\x96(A\
\x92\x1a}\xe8\xcf\xdc\xdd?\xeb\xa0h\x12\'\xa9#;\xba\x8b\x12\xb5\x0f\x81\x88\
\xba\xb6w\xb37\xf4a;\x80!\x98\xb09gf8\xdfPBX6?\xd2\xac\x1f\xea\xfd\'\xc0O\
\x00\xc0\xf9\xed\xd7_\xa6i\x9a\xf6\x16\xb3,\xe3\xea\xea\x8a8\x8eY,\x16X\xd6\
\xdf\xe3\x1c\xc7\x91\xba\xae\xa9\xaa\x8a\xa7\xa7\'6\x9b\xcd!@\x92$\x07\x8b\
\xbe\xef\x9b\xe7\xe5\xe5%a\x18"\xa5\xc4\xb6\xdf\xd7\xb2\xe38\xd2\xf7=UU\xf1\
\xf8\xf8HUUx\x9eG\x9a\xa6\x87\x00\xc76\xa8\xeb\x9a\xae\xeb\xf0}\x9f\xeb\xebk\
\xc20$MS\\\xd7}\x17\x80R\x8a\xddnG]\xd7\x94e\xc9\xd3\xd3\x13\xe38\x1e\xfd\
\xed\x1e\x80\x94\x12\xdf\xf7\xd1Z3\x0c\x03M\xd3\xb0^\xaf\x11B\xe0\xba.q\x1c#\
\xa5\xc4q\x8er3\x0c\x03}\xdfS\xd75_\xbf~e\xbd^\xd34\r\x8e\xe3\xe0\xfb>\xb6m\
\xd3\xb6-]\xd7\x1d\x07\x08\xc3\x90\x8b\x8b\x0b\x94R4MC\xd7u\xacV+\xba\xae\
\xc3q\x1c\x84\x10\xa4iz\x12`\x1cG\xca\xb2\xe4\xf9\xf9\x99\xdb\xdb[\xee\xef\
\xef\rx\x10\x04x\x9e\xc7f\xb39\r\x90$\t\x1f?~\xa4\xaek6\x9b\rEQ\xd0u\x1d\xbb\
\xdd\x8e\xbb\xbb;\xc6qd\x9a\xa6\x83L\xcc\x91\x17E\xc1z\xbdf\xbd^\xb3\xdb\xed\
\xd0Z\x1b\x80,\xcb\x88\xa2\x08\xa5\x14///\xc7\x01\xd24\xe5\xd3\xa7O\xbc\xbc\
\xbc\xd0\xf7=sw\xf4}\xcf\xed\xed-M\xd3`Y\x16B\x08\x92$\xd9\x03\x98k\xbdZ\xad\
x||\xc4\xb2,\xa2("\x0cC\x92$\xe1\xc3\x87\x0fdY\xb6\xe7\xfc\x00\xc0\xf3<\xe28\
6N]\xd7\xc5\xb2,^__)\xcb\x92\xedv\xcb\xfd\xfd=Zk\xa6ib\x18\x06\x00\xaa\xaa2\
\x91o\xb7[\xfa\xbe\'\x8a"\x13\xf9\xe5\xe5%Y\x96\x99\xcc\x9d\x04\xf8\xb6\x14R\
J\xa4\x94\x0c\xc3\x80\xd6\xdaD\xfa\xf9\xf3g\x9a\xa6A\x08\xc1\xf9\xf99\x00y\
\x9e\xb3Z\xadx~~F\x08A\x14EDQD\x9a\xa6,\x97Knnn\xf0<\x8f\xef\xf5\xe6$\x80\
\xef\xfb\xf8\xbeO\xd34\xa6\x96\x00eYR\x96%y\x9e\xf3\xf0\xf0@Q\x14f=\xcfs\xba\
\xae\xdbK{\x92$\xa4ij\xfa\xbfi\x9a\xf7\x01\xcc&\xa5$I\x12\x93\xf2\xd9\x94R|\
\xf9\xf2\x05!\x04\x00\xd34\xa1\xb5&\x0cC\xe3<MS\xe28\xfeS\xed8\n0\x9f\xf6\
\xb9\xff\x83 `\x1cG\xe3\xb0(\n\xaa\xaa\xa2\xef{\x03\x1a\x86!q\x1c\x13\xc71Q\
\x14\xe1\xfb>\xae\xeb"\x84`\x18\x06\xf3\xdfw\x01h\xad\xe9\xfb\x9e\xae\xebPJa\
Y\x16q\x1cc\xdb\xb6\xc9\x84\x10\xe2(@\x9a\xa6\x04A\x80\x10\x02\xa5\x14]\xd7\
\xd1u\xdd\xc9L\xec\x01h\xad\x19\xc7\x11\xad5u]\x1b\xe7s4\xf3SJ\x89eY\xb4m\
\x0b\xbcu\xcf\xd9\xd9\x19gggDQ\x84\x94\x12\xa5\x14\xd34\xa1\x94\xa2\xaek\x82\
0>N\x02\xccCd\x18\x06^__\xb1m\x9b0\x0c\xf1<\x0f\xd7u\x99\xa6\x89\xf3\xf3s\
\xf2<\x07\xde\x0e\x1f@\x14E,\x97K...L\xa4s\xf4\xf3\\\x98\xa6\t\xc7q\x0ef\xc2\
\x1e\xc0L\xab\xb5F)\x85\xeb\xba,\x16\x0b\x82 \xc0u]#<\x8e\xe3\xd0\xb6-\x9e\
\xe7\x01\x10\xc71WWWdY\x06\xbc\xb5\xabR\n\xdb\xb6)\x8a\x82\xb6mi\xdb\x16\xcb\
\xb2PJ\x9d\x06\x98ew\xb1X\x18\xfd\x0e\x82\xc0\xcc\x81\xd9\x82 `\xb9\\\x9a\
\xcd\xa4\x94&\xc5\xf0v>\x1c\xc7!\x08\x02\xa6i\xc2\xb6m\x94RF\xdaO\x02\xcc\
\x9a>\x0b\x89\xe7yx\x9ewp!\x99\xc1N\x99m\xdb\xe63\x7f\xdf\xedv\xf4}\xff\xc7%\
\xf0}\x9f4MM\xddOM\xbd\xbfb\xf3\x1eQ\x141\x8e\xa3)\xdbQ\x80yn\xcf\xa7\xfc[\
\xbd\xff\'fY\x96\xb9k|\x1f\xd4\xd130\xcf\xff\x7f\xd3\xc6q4w\x8c=\x80\xa6i\
\x8c\xb8\xe4yn.\x11\xff\x85)\xa5\xd8n\xb7\xd4um\xd6\xc4\xcfw\xc3\xff=\xc0\
\xefa\x89?u1\xd3\xf5 \x00\x00\x00\x00IEND\xaeB`\x82\xc4\x1f\x08\x9f' )
def getNoMailBitmap():
return wx.BitmapFromImage(getNoMailImage())
def getNoMailImage():
stream = cStringIO.StringIO(getNoMailData())
return wx.ImageFromStream(stream)
def getNoMailIcon():
icon = wx.EmptyIcon()
icon.CopyFromBitmap(getNoMailBitmap())
return icon
#----------------------------------------------------------------------
def getErrMailData():
return zlib.decompress(
'x\xda\x01W\x05\xa8\xfa\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00 \x00\
\x00\x00 \x08\x06\x00\x00\x00szz\xf4\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\
\x08d\x88\x00\x00\x05\x0eIDATX\x85\xcd\x97\xcf\x8f\xdb\xd4\x16\xc7?v\xae\x7f\
\xc5N&\x8e\xd3L\x92\xceL%T\x15\rbQQ!\xe8\x0e\xc4\x92\xff\x80%H\xac\xdeC\xf0\
\xfe\x94\x07\xdb\xf7\x96\xac\xfa\x1f TT\t\x06\x90\xa0,*UB#\x90f:i"\'\x99L\
\xec\xd8\xf1\xaf\x98\xc5LLC\x92\x8aH\xa0r$/|t\xef9\x1f\xdf\xfb\xbd\xe7\\K\
\x92\\\xe2E\x9a\xfcB\xb3\x03b\xdb\t\x9f}\xfa\xdf\xfc\xf5\xd1\x88\x83\xcf?\
\xa7\xf2\xf81\x00\xde\xe1!\xa7\xef\xbd\xc7\xf7\xf5:\xff\xfa\xf7G\xd2\xdf\n\
\xb0w\xff>\xd7\x83\x80\xeah\x84q\xe5\x93F#:GG\xec\x95\xcb\xdb\x86C\xdaV\x03\
\xdfjj\xfeZ\x9e#\xc71\xf2|\x0e\xc0\\\x96\x99\xab*?J\x12oF\xf1V+\xb0\xb5\x06\
\x1cUE\xccfEr\x00y>G\xccf8\xaa\xbam8\xc4\x7f>\xf98\xcf\xf3|\xc9\xd9n\xb7\xd9\
\xdb\xdbCQ\x94%\xff\xf5\xef\xbe\xa3~\xef\x1e\\\\\xac\rV\xaf\xd7\xf9\xe6\xc3\
\x0f\xf3\xb37\xdeX\xf2\'I\xc2\x93\'Ox\xfa\xf4\xe9*@\xa5RYu\nA\x92$\xe8\xba\
\x8eeY\xc5cw\xbb\xe8\xba\xbe\xf1kt]g\x7f\x7f\x1f\xeb\xe5\x97\xf1}\xbfx\x82 @\
\x08A\xb5Z]\xcd\xb5.\x90\xe7y\x84a\xc8\xee\xee.\x86a`\x9a&\xedv\x1b\xab\xd1@\
<g\x99UU\xa5\xd1h\xa0\xb7\xdbt\xbb]...\x18\x8dF\xf4\xfb}\xd24];g\t`\x91L\x92\
.u\x94\xe79\xc3\xe1\x10UU)\x97\xcb\x94\xc2\x90r\x96\xb1I\xb6Y\x96\x11\x86!\
\xe3\xf1\x98\xc1`\xc0p8$\xcfsvvv\x8ax\xd3\xe9\x940\x0c\xd7\x03T\xabU:\x9d\
\x0e\xa5\xd2e\x8a\xf3\xf3s\xfa\xfd>I\x92\x000w]\xdaq\xcc\xa65\x88\xe3\x18\
\xd7uyrr\xc2\xc9\xc9\t\xa3\xd1\x88k\xd7\xae\xd1j\xb5\n\xc0n\xb7\xfb|\x80\xfd\
\xfd}\xd24%\x08\x02\xe28&\x08\x02\x92$\xa1\xd7\xeb\xa1\xb9.N\x1coH\xff;@\xaf\
\xd7#I\x12L\xd3\xc44M,\xcb\xa2\\.#\x84\xc0\xf7}\xfa\xfd\xfef\x80\xbd\xbd=&\
\x93\tQ\x14aY\x16\xaa\xaa2\x1e\x8fq]\x97\xb2\xeb\xf2\xd2\x9f\x00p]\x17\xc7q\
\xa8\xd5j\xa8\xaaJ\xa9T\xa2^\xafS\xadV9;;[\x9a\xb3\x04\xa0\xaa*\x96e!I\x12Q\
\x14\x15\xfb\x15\xc71\xbe\xef#\x84(\xf4\xb1\xce$IB\x08\x81\xa6i\x94\xcbe*\
\x95J\xa1\xabj\xb5Z|\xd0F\x80\x85U*\x15TUe0\x18\xd0\xeb\xf50M\x93N\xa7C\xb3\
\xd9D\xd3\xb4\x8d\x00\x9a\xa6\xd1l6\x99w:h\x9a\x86\x10\x02\xc7qh4\x1a\xa8\
\xaa\xca\x1f\xeb\xcdF\x00M\xd3\xd04\x8d\xe9t\x8a,\xcb\xc5\xbbh\xb7\x99\xbe\
\xf2\n%IB\xef\xf5P\xa6S\x00\x12\xd3d\xd6j1=<D\xb4\xdb\xc5y\x97e\x19\xc30\x8a\
\xf7g\xc5\xf7\\\x80M\x16\x1c\x1c\xd0{\xf7]f\xad\x16\xbb_|Q\x00D\x8d\x06\xee\
\xdbos~\xe7\x0e\xb3+\xc5\xffY\xdb\n \xb5m|\xdbF\xb9\xb8 ;:*\xfc\x99e1\xbdy\
\x13\xff\xf0p\xab\xe4\xf0O\xbd\x90DQD\x1c\xc7dY\x86a\x18\x08\xb1<Lq\x1c\xa2\
\x1b7\x98\\\x1d\xc9\xe8\xc6\r\x84\xe3`\x9a\xe6\xf28E!\xcb2<\xcf[Q\xffs\x01|\
\xdf\xc7u]\x84\x104\x9b\xcd\xa22.,\x06\xce\xb3\x8c\xe4\xaa\xa0(\xbb\xbbX\xb7\
o\xe3\x1c\x1c,\x8d\xcb\xb2\x8c\xe9t\x8a\xef\xfb4\x1a\x8d\x15\xc0\x15\x80$I\
\x08\x82\xa0xj\xb5\x1a\xb6m\xaft\xc0sE\xe1\xc20\x08\xaeDh\x9a&V\xa7\x83m\xdb\
K\xe3f\xb3\x19a\x18\x16\xf1$I*\xca\xfaZ\x80\xc9d\xc2\xe9\xe9)\x95J\x85V\xab\
\x85i\x9a+\xcb\x0f\x97M\xab\xd5j\x15\xc1\x14E\xc10\x8c\x95q\x8b:\xa0\xeb:\
\xb3\xd9\x8c\xd3\xd3S&\x93\xc9f\x80(\x8a\xf0<\x8fj\xb5\x8a\xe38+E\'MS\xd24E\
\nCjA\x80\xbchR\x8aB*\xcb\xcc\xae\x92.\xa0\x85\x10\xec\xec\xec\xa0\xeb:\xddn\
\x17\xcf\xf3\x88\xa2h3\xc0\xa2\x19\xd5j\xb5\x95}\x07\x08\x82\x80\xe1p\x88x\
\xfc\x18\xe7\xe8\x08\xa3\xdb\xbd\x04\xeft\x18\xdd\xbdKrx\x88\xe38+\x17\x8fE/\
\x90$\t\xd7u7\x03\x18\x86\x81m\xdbh\x9aV|\xed\xb36\x1d\x8d\x18\x1f\x1f\xa3}\
\xfd5;\xf7\xee!\xfd\xfc\xf3\xe5\xca\xdc\xba\x857\x9f3S\x14tIZ\xabxM\xd3\xb0m\
{e\xab\xd6j`\xd3\x91)=z\x84\xf3\xe5\x97\x94\x1f>D\x1b\x0c~\x0f>\x18p\xed\xfe\
}\x82\xf1\x98\xe0\x9dw\xf0^}u\xed\xfc8\x8eW5\x10\x86a\xd1$\xfa\xfd>\xaa\xaa\
\xae\x15\x1e@\xeb\xa7\x9fx\xe9\xc1\x03v\x8e\x8f\x91\x9fi\xcb\xcaxL\xed\xe1C$\
\xcf\xe3\x17\xc7\xa1\xf7\x87\xcb\xec\xc2\xd24\xa5\xdf\xef\x13\x04A\xe1\xdb\
\xfa\xbf\xe0\xab\x0f\xde\xcfo\x9e\x9da\xff\xf0\x03\xc6U\x1d\x08ww9\xbfs\x87\
\xe3\xeb\xd7y\xeb\x7f\xff\xff{\xff\x8c\x1e\xdd\xbe\x8dqp@\xe9\xd7_\xc9\xaf\
\x00\xbcz\x9d\xee\xdd\xbb<\xaa\xd7\xb7\r\xb7\xfd\n\xfc\xd5\xf6\xc2\x9b\xd1o\
\xd1r.\xaf\xfe\x90\x016\x00\x00\x00\x00IEND\xaeB`\x82\x8a\x1a\x9f\x99' )
def getErrMailBitmap():
return wx.BitmapFromImage(getErrMailImage())
def getErrMailImage():
stream = cStringIO.StringIO(getErrMailData())
return wx.ImageFromStream(stream)
def getErrMailIcon():
icon = wx.EmptyIcon()
icon.CopyFromBitmap(getErrMailBitmap())
return icon<|fim▁end|> | |
<|file_name|>test_configexc.py<|end_file_name|><|fim▁begin|># vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2015-2017 Florian Bruhin (The Compiler) <[email protected]>
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Tests for qutebrowser.config.configexc."""
import textwrap
import pytest
from qutebrowser.config import configexc
from qutebrowser.utils import usertypes
def test_validation_error():
e = configexc.ValidationError('val', 'msg')
assert e.option is None
assert str(e) == "Invalid value 'val' - msg"
@pytest.mark.parametrize('deleted, renamed, expected', [
(False, None, "No option 'opt'"),
(True, None, "No option 'opt' (this option was removed from qutebrowser)"),
(False, 'new', "No option 'opt' (this option was renamed to 'new')"),
])
def test_no_option_error(deleted, renamed, expected):
e = configexc.NoOptionError('opt', deleted=deleted, renamed=renamed)
assert e.option == 'opt'
assert str(e) == expected
def test_no_option_error_clash():
with pytest.raises(AssertionError):
configexc.NoOptionError('opt', deleted=True, renamed='foo')
def test_backend_error():
e = configexc.BackendError(usertypes.Backend.QtWebKit)
assert str(e) == "This setting is not available with the QtWebKit backend!"
def test_desc_with_text():
"""Test ConfigErrorDesc.with_text."""
old = configexc.ConfigErrorDesc("Error text", Exception("Exception text"))
new = old.with_text("additional text")
assert str(new) == 'Error text (additional text): Exception text'
@pytest.fixture
def errors():
"""Get a ConfigFileErrors object."""
err1 = configexc.ConfigErrorDesc("Error text 1", Exception("Exception 1"))
err2 = configexc.ConfigErrorDesc("Error text 2", Exception("Exception 2"),
"Fake traceback")
return configexc.ConfigFileErrors("config.py", [err1, err2])
def test_config_file_errors_str(errors):
assert str(errors).splitlines() == [
'Errors occurred while reading config.py:',
' Error text 1: Exception 1',
' Error text 2: Exception 2',
]
def test_config_file_errors_html(errors):
html = errors.to_html()
assert textwrap.dedent(html) == textwrap.dedent("""
Errors occurred while reading config.py:
<ul>
<li>
<b>Error text 1</b>: Exception 1
</li>
<li>
<b>Error text 2</b>: Exception 2
<|fim▁hole|> </li>
</ul>
""")
# Make sure the traceback is not indented
assert '<pre>\nFake traceback\n' in html<|fim▁end|> | <pre>
Fake traceback
</pre>
|
<|file_name|>upnp_listener.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! UPnP listener for IP camera.
//!
extern crate url;
use std::sync::Arc;
use foxbox_taxonomy::manager::*;
use config_store::ConfigService;
use super::IPCameraAdapter;
use super::IpCameraServiceMap;
use upnp::{UpnpListener, UpnpService};
pub struct IpCameraUpnpListener {
manager: Arc<AdapterManager>,
services: IpCameraServiceMap,
config: Arc<ConfigService>,
}
impl IpCameraUpnpListener {
pub fn new(manager: &Arc<AdapterManager>, services: IpCameraServiceMap, config: &Arc<ConfigService>) -> Box<Self> {
Box::new(IpCameraUpnpListener {
manager: manager.clone(),
services: services,
config: config.clone(),
})
}
}
impl UpnpListener for IpCameraUpnpListener {<|fim▁hole|> // The D-Link cameras post an advertisement once when we do our search
// (when the adapter is started) and 4 times in a row about once every
// 3 minutes when they're running.
fn upnp_discover(&self, service: &UpnpService) -> bool {
macro_rules! try_get {
($hash:expr, $key:expr) => (match $hash.get($key) {
Some(val) => val,
None => return false
})
}
let model_name = try_get!(service.description, "/root/device/modelName");
let known_models = ["DCS-5010L", "DCS-5020L", "DCS-5025L", "Link-IpCamera"];
let model_name_str: &str = &model_name;
if !known_models.contains(&model_name_str) {
return false;
}
let url = try_get!(service.description, "/root/device/presentationURL");
let mut udn = try_get!(service.description, "/root/device/UDN").clone();
// The UDN is typically of the for uuid:SOME-UID-HERE, but some devices
// response with just a UUID. We strip off the uuid: prefix, if it exists
// and use the resulting UUID as the service id.
if udn.starts_with("uuid:") {
udn = String::from(&udn[5..]);
}
// TODO: We really need to update the IP/camera name in the event that
// it changed. I'll add this once we start persisting the camera
// information in a database.
let name = try_get!(service.description, "/root/device/friendlyName").clone();
let manufacturer = try_get!(service.description, "/root/device/manufacturer");
IPCameraAdapter::init_service(&self.manager, self.services.clone(), &self.config,
&udn, &url, &name, &manufacturer, &model_name).unwrap();
true
}
}<|fim▁end|> | // This will called each time that the device advertises itself using UPNP. |
<|file_name|>guiappwizarddialog.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing
**
** This file is part of Qt Creator.
**
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms and
** conditions see http://www.qt.io/terms-conditions. For further information
** use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
****************************************************************************/
#include "guiappwizarddialog.h"
#include "filespage.h"
#include <projectexplorer/projectexplorerconstants.h>
namespace QmakeProjectManager {
namespace Internal {
GuiAppParameters::GuiAppParameters()
: designerForm(true),
isMobileApplication(false)
{
}
GuiAppWizardDialog::GuiAppWizardDialog(const Core::BaseFileWizardFactory *factory,
const QString &templateName,
const QIcon &icon, QWidget *parent,
const Core::WizardDialogParameters ¶meters) :
BaseQmakeProjectWizardDialog(factory, false, parent, parameters),
m_filesPage(new FilesPage)
{
setWindowIcon(icon);
setWindowTitle(templateName);
setSelectedModules(QLatin1String("core gui"), true);
setIntroDescription(tr("This wizard generates a Qt Widgets Application "
"project. The application derives by default from QApplication "
"and includes an empty widget."));
addModulesPage();
if (!parameters.extraValues().contains(QLatin1String(ProjectExplorer::Constants::PROJECT_KIT_IDS)))
addTargetSetupPage();
m_filesPage->setFormInputCheckable(true);
m_filesPage->setClassTypeComboVisible(false);
addPage(m_filesPage);
addExtensionPages(extensionPages());
}
void GuiAppWizardDialog::setBaseClasses(const QStringList &baseClasses)
{
m_filesPage->setBaseClassChoices(baseClasses);
if (!baseClasses.empty())
m_filesPage->setBaseClassName(baseClasses.front());
}
void GuiAppWizardDialog::setSuffixes(const QString &header, const QString &source, const QString &form)
{
m_filesPage->setSuffixes(header, source, form);<|fim▁hole|> m_filesPage->setLowerCaseFiles(l);
}
QtProjectParameters GuiAppWizardDialog::projectParameters() const
{
QtProjectParameters rc;
rc.type = QtProjectParameters::GuiApp;
rc.flags |= QtProjectParameters::WidgetsRequiredFlag;
rc.fileName = projectName();
rc.path = path();
rc.selectedModules = selectedModulesList();
rc.deselectedModules = deselectedModulesList();
return rc;
}
GuiAppParameters GuiAppWizardDialog::parameters() const
{
GuiAppParameters rc;
rc.className = m_filesPage->className();
rc.baseClassName = m_filesPage->baseClassName();
rc.sourceFileName = m_filesPage->sourceFileName();
rc.headerFileName = m_filesPage->headerFileName();
rc.formFileName = m_filesPage->formFileName();
rc.designerForm = m_filesPage->formInputChecked();
if (isQtPlatformSelected("Android.Device.Type")) { // FIXME: Is this really necessary?
rc.isMobileApplication = true;
rc.widgetWidth = 800;
rc.widgetHeight = 480;
} else {
rc.isMobileApplication = false;
rc.widgetWidth = 400;
rc.widgetHeight = 300;
}
return rc;
}
} // namespace Internal
} // namespace QmakeProjectManager<|fim▁end|> | }
void GuiAppWizardDialog::setLowerCaseFiles(bool l)
{ |
<|file_name|>config.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
try:
# Python 2.7
from collections import OrderedDict
except:
# Python 2.6
from gluon.contrib.simplejson.ordered_dict import OrderedDict
from gluon import current
from gluon.storage import Storage
from gluon.validators import IS_NOT_EMPTY, IS_EMPTY_OR, IS_IN_SET
from s3 import s3_date, S3Represent
T = current.T
settings = current.deployment_settings
"""
Template settings
All settings which are to configure a specific template are located here
Deployers should ideally not need to edit any other files outside of their template folder
"""
# Pre-Populate
# http://eden.sahanafoundation.org/wiki/DeveloperGuidelines/PrePopulate
# Configure/disable pre-population of the database.
# To pre-populate the database On 1st run should specify directory(s) in
# /private/templates/
# eg:
# ["default"] (1 is a shortcut for this)
# ["Standard"]
# ["IFRC_Train"]
# ["roles", "user"]
# Unless doing a manual DB migration, where prepopulate = 0
# In Production, prepopulate = 0 (to save 1x DAL hit every page)
settings.base.prepopulate = ["EVASS"]
# Theme (folder to use for views/layout.html)
settings.base.theme = "EVASS"
settings.ui.formstyle = "foundation"
settings.ui.filter_formstyle = "foundation_inline"
# Enable Guided Tours
#settings.base.guided_tour = True
# Authentication settings
# These settings should be changed _after_ the 1st (admin) user is
# registered in order to secure the deployment
# Should users be allowed to register themselves?
#settings.security.self_registration = False
# Do new users need to verify their email address?
#settings.auth.registration_requires_verification = True
# Do new users need to be approved by an administrator prior to being able to login?
#settings.auth.registration_requires_approval = True
# Allow a new user to be linked to a record (and a new record will be created if it doesn't already exist)
#settings.auth.registration_link_user_to = {"staff":T("Staff"),
# "volunteer":T("Volunteer"),
# "member":T("Member")}
# Always notify the approver of a new (verified) user, even if the user is automatically approved
settings.auth.always_notify_approver = False
# The name of the teams that users are added to when they opt-in to receive alerts
#settings.auth.opt_in_team_list = ["Updates"]
# Uncomment this to set the opt in default to True
#settings.auth.opt_in_default = True
# Uncomment this to request the Mobile Phone when a user registers
#settings.auth.registration_requests_mobile_phone = True
# Uncomment this to have the Mobile Phone selection during registration be mandatory
#settings.auth.registration_mobile_phone_mandatory = True
# Uncomment this to request the Organisation when a user registers
#settings.auth.registration_requests_organisation = True
# Uncomment this to have the Organisation selection during registration be mandatory
#settings.auth.registration_organisation_required = True
# Uncomment this to have the Organisation input hidden unless the user enters a non-whitelisted domain
#settings.auth.registration_organisation_hidden = True
# Uncomment this to default the Organisation during registration
#settings.auth.registration_organisation_default = "My Organisation"
# Uncomment this to request the Organisation Group when a user registers
#settings.auth.registration_requests_organisation_group = True
# Uncomment this to have the Organisation Group selection during registration be mandatory
#settings.auth.registration_organisation_group_required = True
# Uncomment this to request the Site when a user registers
#settings.auth.registration_requests_site = True
# Uncomment this to allow Admin to see Organisations in user Admin even if the Registration doesn't request this
#settings.auth.admin_sees_organisation = True
# Uncomment to set the default role UUIDs assigned to newly-registered users
# This is a dictionary of lists, where the key is the realm that the list of roles applies to
# The key 0 implies not realm restricted
# The keys "organisation_id" and "site_id" can be used to indicate the user's "organisation_id" and "site_id"
#settings.auth.registration_roles = { 0: ["STAFF", "PROJECT_EDIT"]}
# Uncomment this to enable record approval
#settings.auth.record_approval = True
# Uncomment this and specify a list of tablenames for which record approval is required
#settings.auth.record_approval_required_for = ["project_project"]
# Uncomment this to request an image when users register
#settings.auth.registration_requests_image = True
# Uncomment this to direct newly-registered users to their volunteer page to be able to add extra details
# NB This requires Verification/Approval to be Off
# @ToDo: Extend to all optional Profile settings: Homepage, Twitter, Facebook, Mobile Phone, Image
#settings.auth.registration_volunteer = True
# Terms of Service to be able to Register on the system
# uses <template>/views/tos.html
#settings.auth.terms_of_service = True
# Uncomment this to allow users to Login using Gmail's SMTP
#settings.auth.gmail_domains = ["gmail.com"]
# Uncomment this to allow users to Login using OpenID
#settings.auth.openid = True
# Uncomment this to enable presence records on login based on HTML5 geolocations
#settings.auth.set_presence_on_login = True
# Uncomment this and specify a list of location levels to be ignored by presence records
#settings.auth.ignore_levels_for_presence = ["L0", "L1", "L2", "L3"]
# Uncomment this to enable the creation of new locations if a user logs in from an unknown location. Warning: This may lead to many useless location entries
#settings.auth.create_unknown_locations = True
# L10n settings
# Default timezone for users
#settings.L10n.utc_offset = "UTC +0000"
# Uncomment these to use US-style dates in English (localisations can still convert to local format)
#settings.L10n.time_format = T("%H:%M:%S")
settings.L10n.date_format = T("%d/%m/%Y")
# Start week on Sunday
#settings.L10n.firstDOW = 0
# Number formats (defaults to ISO 31-0)
# Decimal separator for numbers (defaults to ,)
settings.L10n.decimal_separator = ","
# Thousands separator for numbers (defaults to space)
settings.L10n.thousands_separator = "."
# Default Country Code for telephone numbers
settings.L10n.default_country_code = +39
# Make last name in person/user records mandatory
settings.L10n.mandatory_lastname = True
# Configure the list of Religions
settings.L10n.religions = OrderedDict([("unknown", T("Unknown")),
("bahai", T("Bahai")),
("buddhist", T("Buddhist")),
("christian", T("Christian")),
("hindu", T("Hindu")),
("jewish", T("Jewish")),
("muslim", T("Muslim")),
("other", T("other"))
])
# Configure the list of measurement units
# Edit the options order to change the measurement unit.
# option "1" will be used within EDEN.
settings.L10n.measurement_lenght_m = {1: "m",
2: "yd",
}
settings.L10n.measurement_lenght_cm = {1: "cm",
2: "in",
3: "ft",
}
settings.L10n.measurement_lenght_km = {1: "Km",
2: "mi",
3: "naut mi",
}
settings.L10n.measurement_area_m = {1: "m²",
2: "yd²",
3: "ac",
}
settings.L10n.measurement_area_km = {1: "Km²",
2: "mi²",
3: "ac",
}
settings.L10n.measurement_vol_l = {1: "l",
2: "US_gal",
3: "Uk_gal",
}
settings.L10n.measurement_weight_g = {1: "g",
2: "oz",
3: "ozt",
}
settings.L10n.measurement_weight_kg = {1: "kg",
2: "lb",
}
# Configure marital status options
settings.L10n.maritalStatus_default = 1
settings.L10n.maritalStatus = {1: T("Unknown"),
2: T("Single"),
3: T("Cohabit"),
4: T("Married"),
5: T("Divorced"),
6: T("Separated"),
7: T("Widowed"),
8: T("Other"),
}
# Uncomment this to Translate CMS Series Names
#settings.L10n.translate_cms_series = True
# Uncomment this to Translate Layer Names
#settings.L10n.translate_gis_layer = True
# Uncomment this to Translate Location Names
#settings.L10n.translate_gis_location = True
# Finance settings
settings.fin.currency_default = "EUR"
settings.fin.currencies = {
"EUR": T("Euros"),
"GBP": T("Great British Pounds"),
"USD": T("United States Dollars"),
}
#settings.fin.currency_writable = False # False currently breaks things
# PDF settings
# Default page size for reports (defaults to A4)
#settings.base.paper_size = T("Letter")
# Location of Logo used in pdfs headers
#settings.ui.pdf_logo = "static/img/mylogo.png"
# GIS (Map) settings
# Size of the Embedded Map
# Change this if-required for your theme
# NB API can override this in specific modules
#settings.gis.map_height = 600
#settings.gis.map_width = 1000
# Restrict the Location Selector to just certain countries
# NB This can also be over-ridden for specific contexts later
# e.g. Activities filtered to those of parent Project
settings.gis.countries = ["IT"]
# Uncomment to pass Addresses imported from CSV to a Geocoder to try and automate Lat/Lon
#settings.gis.geocode_imported_addresses = "google"
# Hide the Map-based selection tool in the Location Selector
#settings.gis.map_selector = False
# Hide LatLon boxes in the Location Selector
#settings.gis.latlon_selector = False
# Use Building Names as a separate field in Street Addresses?
settings.gis.building_name = False
# Use a non-default fillColor for Clustered points
#settings.gis.cluster_fill = "8087ff"
# Use a non-default strokeColor for Clustered points
#settings.gis.cluster_stroke = "2b2f76"
# Use a non-default fillColor for Selected points
#settings.gis.select_fill = "ffdc33"
# Use a non-default strokeColor for Selected points
#settings.gis.select_stroke = "ff9933"
# Display Resources recorded to Admin-Level Locations on the map
# @ToDo: Move into gis_config?
# Uncomment to fall back to country LatLon to show resources, if nothing better available
#settings.gis.display_L0 = True
# Currently unused
#settings.gis.display_L1 = False
# Uncomemnt this to do deduplicate lookups on Imports via PCode (as alternative to Name)
#settings.gis.lookup_pcode = True
# Set this if there will be multiple areas in which work is being done,
# and a menu to select among them is wanted.
#settings.gis.menu = "Maps"
# Maximum Marker Size
# (takes effect only on display)
#settings.gis.marker_max_height = 35
#settings.gis.marker_max_width = 30
# Duplicate Features so that they show wrapped across the Date Line?
# Points only for now
# lon<0 have a duplicate at lon+360
# lon>0 have a duplicate at lon-360
#settings.gis.duplicate_features = True
# Uncomment to use CMS to provide Metadata on Map Layers
#settings.gis.layer_metadata = True
# Uncomment to hide Layer Properties tool
#settings.gis.layer_properties = False
# Uncomment to hide the Base Layers folder in the LayerTree
#settings.gis.layer_tree_base = False
# Uncomment to hide the Overlays folder in the LayerTree
#settings.gis.layer_tree_overlays = False
# Uncomment to not expand the folders in the LayerTree by default
#settings.gis.layer_tree_expanded = False
# Uncomment to have custom folders in the LayerTree use Radio Buttons
#settings.gis.layer_tree_radio = True
# Uncomment to display the Map Legend as a floating DIV
#settings.gis.legend = "float"
# Mouse Position: 'normal', 'mgrs' or None
#settings.gis.mouse_position = "mgrs"
# Uncomment to hide the Overview map
#settings.gis.overview = False
# Uncomment to hide the permalink control
#settings.gis.permalink = False
# Uncomment to disable the ability to add PoIs to the main map
#settings.gis.pois = False
# PoIs to export in KML/OSM feeds from Admin locations
#settings.gis.poi_resources = ["cr_shelter", "hms_hospital", "org_office"]
# Uncomment to hide the ScaleLine control
#settings.gis.scaleline = False
# Uncomment to modify the Simplify Tolerance
#settings.gis.simplify_tolerance = 0.001
# Uncomment to hide the Zoom control
#settings.gis.zoomcontrol = False
# Messaging Settings
# If you wish to use a parser.py in another folder than "default"
#settings.msg.parser = "mytemplatefolder"
# Use 'soft' deletes
#settings.security.archive_not_delete = False
# AAA Settings
# Security Policy
# http://eden.sahanafoundation.org/wiki/S3AAA#System-widePolicy
# 1: Simple (default): Global as Reader, Authenticated as Editor
# 2: Editor role required for Update/Delete, unless record owned by session
# 3: Apply Controller ACLs
# 4: Apply both Controller & Function ACLs
# 5: Apply Controller, Function & Table ACLs
# 6: Apply Controller, Function, Table ACLs and Entity Realm
# 7: Apply Controller, Function, Table ACLs and Entity Realm + Hierarchy
# 8: Apply Controller, Function, Table ACLs, Entity Realm + Hierarchy and Delegations
#
settings.security.policy = 5
# Ownership-rule for records without owner:
# True = not owned by any user (strict ownership, default)
# False = owned by any authenticated user
#settings.security.strict_ownership = False
# Audit
# - can be a callable for custom hooks (return True to also perform normal logging, or False otherwise)
# NB Auditing (especially Reads) slows system down & consumes diskspace
#settings.security.audit_read = True
#settings.security.audit_write = True
# Lock-down access to Map Editing
#settings.security.map = True
# Allow non-MapAdmins to edit hierarchy locations? Defaults to True if not set.
# (Permissions can be set per-country within a gis_config)
#settings.gis.edit_Lx = False
# Allow non-MapAdmins to edit group locations? Defaults to False if not set.
#settings.gis.edit_GR = True
# Note that editing of locations used as regions for the Regions menu is always
# restricted to MapAdmins.
# Uncomment to disable that LatLons are within boundaries of their parent
#settings.gis.check_within_parent_boundaries = False
# Enable this for a UN-style deployment
#settings.ui.cluster = True
# Enable Social Media share buttons
#settings.ui.social_buttons = True
# Enable this to show pivot table options form by default
#settings.ui.hide_report_options = False
# Uncomment to show created_by/modified_by using Names not Emails
#settings.ui.auth_user_represent = "name"
# Uncomment to restrict the export formats available
#settings.ui.export_formats = ["kml", "pdf", "rss", "xls", "xml"]
# Uncomment to include an Interim Save button on CRUD forms
#settings.ui.interim_save = True
# -----------------------------------------------------------------------------
# CMS
# Uncomment to use Bookmarks in Newsfeed
#settings.cms.bookmarks = True
# Uncomment to use Rich Text editor in Newsfeed
#settings.cms.richtext = True
# Uncomment to show tags in Newsfeed
#settings.cms.show_tags = True
# -----------------------------------------------------------------------------
# Organisations
# Uncomment to use an Autocomplete for Organisation lookup fields
#settings.org.autocomplete = True
# Enable the use of Organisation Branches
settings.org.branches = True
# Enable the use of Organisation Groups & what their name is
#settings.org.groups = "Coalition"
#settings.org.groups = "Network"
# Enable the use of Organisation Regions
settings.org.regions = True
# Set the length of the auto-generated org/site code the default is 10
#settings.org.site_code_len = 3
# Set the label for Sites
#settings.org.site_label = "Facility"
# Uncomment to show the date when a Site (Facilities-only for now) was last contacted
#settings.org.site_last_contacted = True
# Uncomment to use an Autocomplete for Site lookup fields
#settings.org.site_autocomplete = True
# Extra fields to show in Autocomplete Representations
#settings.org.site_autocomplete_fields = ["instance_type", "location_id$L1", "organisation_id$name"]
# Uncomment to have Site Autocompletes search within Address fields
#settings.org.site_address_autocomplete = True
# Uncomment to hide inv & req tabs from Sites
#settings.org.site_inv_req_tabs = False
# Uncomment to add summary fields for Organisations/Offices for # National/International staff
#settings.org.summary = True
# Enable certain fields just for specific Organisations
# Requires a call to settings.set_org_dependent_field(field)
# empty list => disabled for all (including Admin)
#settings.org.dependent_fields = \
# {#"<table name>.<field name>" : ["<Organisation Name>"],
# "pr_person_details.mother_name" : [],
# "pr_person_details.father_name" : [],
# "pr_person_details.company" : [],
# "pr_person_details.affiliations" : [],
# "vol_volunteer.active" : [],
# "vol_volunteer_cluster.vol_cluster_type_id" : [],
# "vol_volunteer_cluster.vol_cluster_id" : [],
# "vol_volunteer_cluster.vol_cluster_position_id" : [],
# }
# -----------------------------------------------------------------------------
# Human Resource Management
# Uncomment to change the label for 'Staff'
#settings.hrm.staff_label = "Contacts"
# Uncomment to allow Staff & Volunteers to be registered without an email address
settings.hrm.email_required = False
# Uncomment to allow Staff & Volunteers to be registered without an Organisation
settings.hrm.org_required = False
# Uncomment to allow HR records to be deletable rather than just marking them as obsolete
settings.hrm.deletable = True
# Uncomment to filter certificates by (root) Organisation & hence not allow Certificates from other orgs to be added to a profile (except by Admin)
#settings.hrm.filter_certificates = True
# Uncomment to allow HRs to have multiple Job Titles
settings.hrm.multiple_job_titles = True
# Uncomment to hide the Staff resource
settings.hrm.show_staff = False
# Uncomment to allow hierarchical categories of Skills, which each need their own set of competency levels.
#settings.hrm.skill_types = True
# Uncomment to disable Staff experience
settings.hrm.staff_experience = False
# Uncomment to disable Volunteer experience
settings.hrm.vol_experience = False
# Uncomment to show the Organisation name in HR represents
settings.hrm.show_organisation = True
# Uncomment to consolidate tabs into a single CV
#settings.hrm.cv_tab = True
# Uncomment to consolidate tabs into Staff Record
#settings.hrm.record_tab = True
# Uncomment to disable the use of Volunteer Awards
#settings.hrm.use_awards = False
# Uncomment to disable the use of HR Certificates
#settings.hrm.use_certificates = False
# Uncomment to disable the use of HR Credentials
#settings.hrm.use_credentials = False
# Uncomment to disable the use of HR Description
#settings.hrm.use_description = False
# Uncomment to enable the use of HR Education
#settings.hrm.use_education = True
# Uncomment to disable the use of HR ID
#settings.hrm.use_id = False
# Uncomment to disable the use of HR Skills
#settings.hrm.use_skills = False
# Uncomment to disable the use of HR Teams
#settings.hrm.teams = False
# Uncomment to disable the use of HR Trainings
#settings.hrm.use_trainings = False
# -----------------------------------------------------------------------------
# Inventory Management
#settings.inv.collapse_tabs = False
# Uncomment to customise the label for Facilities in Inventory Management
#settings.inv.facility_label = "Facility"
# Uncomment if you need a simpler (but less accountable) process for managing stock levels
#settings.inv.direct_stock_edits = True
# Uncomment to call Stock Adjustments, 'Stock Counts'
#settings.inv.stock_count = True
# Use the term 'Order' instead of 'Shipment'
#settings.inv.shipment_name = "order"
# Uncomment to not track pack values
#settings.inv.track_pack_values = False
#settings.inv.show_mode_of_transport = True
#settings.inv.send_show_org = False
#settings.inv.send_show_time_in = True
#settings.inv.send_form_name = "Tally Out Sheet"
#settings.inv.send_short_name = "TO"
#settings.inv.send_ref_field_name = "Tally Out Number"
#settings.inv.recv_form_name = "Acknowledgement Receipt for Donations Received Form"
#settings.inv.recv_shortname = "ARDR"
# Types common to both Send and Receive
#settings.inv.shipment_types = {
# 0: T("-"),
# 1: T("Other Warehouse"),
# 2: T("Donation"),
# 3: T("Foreign Donation"),
# 4: T("Local Purchases"),
# 5: T("Confiscated Goods from Bureau Of Customs")
# }
#settings.inv.send_types = {
# 21: T("Distribution")
# }
#settings.inv.send_type_default = 1
#settings.inv.recv_types = {
# 32: T("Donation"),
# 34: T("Purchase"),
# }
#settings.inv.item_status = {
# 0: current.messages["NONE"],
# 1: T("Dump"),
# 2: T("Sale"),
# 3: T("Reject"),
# 4: T("Surplus")
# }
# -----------------------------------------------------------------------------
# Requests Management
# Uncomment to disable Inline Forms in Requests module
#settings.req.inline_forms = False
# Label for Inventory Requests
#settings.req.type_inv_label = "Donations"
# Label for People Requests
#settings.req.type_hrm_label = "Volunteers"
# Label for Requester
#settings.req.requester_label = "Site Contact"
#settings.req.requester_optional = True
# Uncomment if the User Account logging the Request is NOT normally the Requester
#settings.req.requester_is_author = False
# Filter Requester as being from the Site
#settings.req.requester_from_site = True
# Set the Requester as being an HR for the Site if no HR record yet & as Site contact if none yet exists
#settings.req.requester_to_site = True
#settings.req.date_writable = False
# Allow the status for requests to be set manually,
# rather than just automatically from commitments and shipments
#settings.req.status_writable = False
#settings.req.item_quantities_writable = True
#settings.req.skill_quantities_writable = True
#settings.req.show_quantity_transit = False
#settings.req.multiple_req_items = False
#settings.req.prompt_match = False
#settings.req.items_ask_purpose = False
# Uncomment to disable the Commit step in the workflow & simply move direct to Ship
#settings.req.use_commit = False
# Uncomment to have Donations include a 'Value' field
#settings.req.commit_value = True
# Uncomment to allow Donations to be made without a matching Request
#settings.req.commit_without_request = True
# Uncomment if the User Account logging the Commitment is NOT normally the Committer
#settings.req.comittter_is_author = False
# Should Requests ask whether Security is required?
#settings.req.ask_security = True
# Should Requests ask whether Transportation is required?
#settings.req.ask_transport = True
#settings.req.use_req_number = False
#settings.req.generate_req_number = False
#settings.req.req_form_name = "Request Issue Form"
#settings.req.req_shortname = "RIS"
# Restrict the type of requests that can be made, valid values in the
# list are ["Stock", "People", "Other"]. If this is commented out then
# all types will be valid.
#settings.req.req_type = ["Stock"]
# Uncomment to enable Summary 'Site Needs' tab for Offices/Facilities
#settings.req.summary = True
# Uncomment to restrict adding new commits to Completed commits
#settings.req.req_restrict_on_complete = True
# Custom Crud Strings for specific req_req types
#settings.req.req_crud_strings = dict()
#ADD_ITEM_REQUEST = T("Make a Request for Donations")
# req_req Crud Strings for Item Request (type=1)
#settings.req.req_crud_strings[1] = Storage(
# title_create = ADD_ITEM_REQUEST,
# title_display = T("Request for Donations Details"),
# title_list = T("Requests for Donations"),
# title_update = T("Edit Request for Donations"),
# title_search = T("Search Requests for Donations"),
# subtitle_create = ADD_ITEM_REQUEST,
# label_list_button = T("List Requests for Donations"),
# label_create_button = ADD_ITEM_REQUEST,
# label_delete_button = T("Delete Request for Donations"),
# msg_record_created = T("Request for Donations Added"),
# msg_record_modified = T("Request for Donations Updated"),
# msg_record_deleted = T("Request for Donations Canceled"),
# msg_list_empty = T("No Requests for Donations"))
#ADD_PEOPLE_REQUEST = T("Make a Request for Volunteers")
# req_req Crud Strings for People Request (type=3)
#settings.req.req_crud_strings[3] = Storage(
# title_create = ADD_PEOPLE_REQUEST,
# title_display = T("Request for Volunteers Details"),
# title_list = T("Requests for Volunteers"),
# title_update = T("Edit Request for Volunteers"),
# title_search = T("Search Requests for Volunteers"),
# subtitle_create = ADD_PEOPLE_REQUEST,
# label_list_button = T("List Requests for Volunteers"),
# label_create_button = ADD_PEOPLE_REQUEST,
# label_delete_button = T("Delete Request for Volunteers"),
# msg_record_created = T("Request for Volunteers Added"),
# msg_record_modified = T("Request for Volunteers Updated"),
# msg_record_deleted = T("Request for Volunteers Canceled"),
# msg_list_empty = T("No Requests for Volunteers"))
# -----------------------------------------------------------------------------
# Supply
#settings.supply.use_alt_name = False
# Do not edit after deployment
#settings.supply.catalog_default = T("Default")
# -----------------------------------------------------------------------------
# Projects
# Uncomment this to use settings suitable for a global/regional organisation (e.g. DRR)
settings.project.mode_3w = True
# Uncomment this to use DRR (Disaster Risk Reduction) extensions
settings.project.mode_drr = True
# Uncomment this to use settings suitable for detailed Task management
settings.project.mode_task = True
# Uncomment this to use Activities for projects
settings.project.activities = True
# Uncomment this to use Activity Types for Activities/Projects
settings.project.activity_types = True
# Uncomment this to use Codes for projects
settings.project.codes = True
# Uncomment this to call project locations 'Communities'
settings.project.community = True
# Uncomment this to enable Hazards in 3W projects
settings.project.hazards = True
# Uncomment this to enable Milestones in projects
settings.project.milestones = True
# Uncomment this to link Activities to Projects
settings.project.projects = True
# Uncomment this to disable Sectors in projects
#settings.project.sectors = False
# Uncomment this to enable Themes in 3W projects
#settings.project.themes = True
# Uncomment this to use Theme Percentages for projects
settings.project.theme_percentages = True
# Uncomment this to use multiple Budgets per project
settings.project.multiple_budgets = True
# Uncomment this to use multiple Organisations per project
settings.project.multiple_organisations = True
# Uncomment this to customise
# Links to Filtered Components for Donors & Partners
#settings.project.organisation_roles = {
# 1: T("Lead Implementer"), # T("Host National Society")
# 2: T("Partner"), # T("Partner National Society")
# 3: T("Donor"),
# 4: T("Customer"), # T("Beneficiary")?
# 5: T("Super"), # T("Beneficiary")?
#}
#settings.project.organisation_lead_role = 1
# -----------------------------------------------------------------------------
# Filter Manager
#settings.search.filter_manager = False
# if you want to have videos appearing in /default/video
#settings.base.youtube_id = [dict(id = "introduction",
# title = T("Introduction"),
# video_id = "HR-FtR2XkBU"),]
# -----------------------------------------------------------------------------
def customise_pr_person_resource(r, tablename):
""" Customise pr_person resource """
T = current.T
s3db = current.s3db
table = r.resource.table
# Disallow "unknown" gender and defaults to "male"
evr_gender_opts = dict((k, v) for k, v in s3db.pr_gender_opts.items()
if k in (2, 3))
gender = table.gender
gender.requires = IS_IN_SET(evr_gender_opts, zero=None)
gender.default = 3
if r.controller == "evr":
# Last name and date of birth mandatory in EVR module
table.last_name.requires = IS_NOT_EMPTY(
error_message = T("Please enter a last name"))
dob_requires = s3_date("dob",
future = 0,
past = 1320,
empty = False).requires
dob_requires.error_message = T("Please enter a date of birth")
table.date_of_birth.requires = dob_requires
# Disable unneeded physical details
pdtable = s3db.pr_physical_description
hide_fields = [
"race",
"complexion",
"height",
"hair_length",
"hair_style",
"hair_baldness",
"facial_hair_type",
"facial_hair_length",
"facial_hair_color",
"facial_hair_comment",
"body_hair",
"skin_marks",
"medical_conditions"
]
for fname in hide_fields:
field = pdtable[fname]
field.readable = field.writable = False
# This set is suitable for Italy
ethnicity_opts = ("Italian",
"Chinese",
"Albanese",
"Philippine",
"Pakistani",
"English",
"African",
"Other",
"Unknown",
)
ethnicity_opts = dict((v, T(v)) for v in ethnicity_opts)
ethnicity = pdtable.ethnicity
ethnicity.readable = ethnicity.writable = True
ethnicity.requires = IS_EMPTY_OR(IS_IN_SET(ethnicity_opts,
sort=True))
ethnicity.represent = S3Represent(options=ethnicity_opts,
translate=True)
# Enable place of birth
place_of_birth = s3db.pr_person_details.place_of_birth
place_of_birth.readable = place_of_birth.writable = True
settings.customise_pr_person_resource = customise_pr_person_resource
# -----------------------------------------------------------------------------
# Comment/uncomment modules here to disable/enable them
# @ToDo: Have the system automatically enable migrate if a module is enabled
# Modules menu is defined in modules/eden/menu.py
settings.modules = OrderedDict([
# Core modules which shouldn't be disabled
("default", Storage(
name_nice = T("Home"),
restricted = False, # Use ACLs to control access to this module
access = None, # All Users (inc Anonymous) can see this module in the default menu & access the controller
module_type = None # This item is not shown in the menu
)),
("admin", Storage(
name_nice = T("Administration"),
#description = "Site Administration",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
module_type = None # This item is handled separately for the menu
)),
("appadmin", Storage(
name_nice = T("Administration"),
#description = "Site Administration",
restricted = True,
module_type = None # No Menu
)),
("errors", Storage(
name_nice = T("Ticket Viewer"),
#description = "Needed for Breadcrumbs",
restricted = False,
module_type = None # No Menu
)),
("sync", Storage(
name_nice = T("Synchronization"),
#description = "Synchronization",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
module_type = None # This item is handled separately for the menu
)),
("tour", Storage(
name_nice = T("Guided Tour Functionality"),
module_type = None,
)),
("translate", Storage(
name_nice = T("Translation Functionality"),
#description = "Selective translation of strings based on module.",
module_type = None,
)),
# Uncomment to enable internal support requests
#("support", Storage(
# name_nice = T("Support"),
# #description = "Support Requests",
# restricted = True,
# module_type = None # This item is handled separately for the menu
# )),
("gis", Storage(
name_nice = T("Map"),
#description = "Situation Awareness & Geospatial Analysis",
restricted = True,
module_type = 6, # 6th item in the menu
)),
("pr", Storage(
name_nice = T("Person Registry"),
#description = "Central point to record details on People",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu (access to controller is possible to all still)
module_type = 10
)),
("org", Storage(
name_nice = T("Organizations"),
#description = 'Lists "who is doing what & where". Allows relief agencies to coordinate their activities',
restricted = True,
module_type = 2
)),
# All modules below here should be possible to disable safely
("hrm", Storage(
name_nice = T("Staff"),
#description = "Human Resources Management",
restricted = True,
module_type = 3,
)),
("vol", Storage(
name_nice = T("Volunteers"),
#description = "Human Resources Management",
restricted = True,
module_type = 4,
)),
("cms", Storage(
name_nice = T("Content Management"),
#description = "Content Management System",
restricted = True,
module_type = 9,
)),
("doc", Storage(
name_nice = T("Documents"),
#description = "A library of digital resources, such as photos, documents and reports",
restricted = True,
module_type = None,
)),
("msg", Storage(
name_nice = T("Messaging"),
#description = "Sends & Receives Alerts via Email & SMS",
restricted = True,
# The user-visible functionality of this module isn't normally required. Rather it's main purpose is to be accessed from other modules.
module_type = 1,
)),
("supply", Storage(
name_nice = T("Supply Chain Management"),
#description = "Used within Inventory Management, Request Management and Asset Management",
restricted = True,
module_type = None, # Not displayed
)),
("inv", Storage(
name_nice = T("Warehouses"),
#description = "Receiving and Sending Items",
restricted = True,
module_type = 4
)),
#("asset", Storage(
#name_nice = T("Assets"),
##description = "Recording and Assigning Assets",
#restricted = True,
#module_type = 5,
#)),
#("req", Storage(
# name_nice = T("Requests"),
# #description = "Manage requests for supplies, assets, staff or other resources. Matches against Inventories where supplies are requested.",
# restricted = True,
# module_type = 10,
#)),
("cr", Storage(
name_nice = T("Shelters"),
#description = "Tracks the location, capacity and breakdown of victims in Shelters",
restricted = True,
module_type = 5<|fim▁hole|> #description = "Helps to monitor status of hospitals",
restricted = True,
module_type = 10
)),
("irs", Storage(
name_nice = T("Incidents"),
#description = "Incident Reporting System",
restricted = True,
module_type = 6
)),
#("dvi", Storage(
#name_nice = T("Disaster Victim Identification"),
##description = "Disaster Victim Identification",
#restricted = True,
#module_type = 10,
##access = "|DVI|", # Only users with the DVI role can see this module in the default menu & access the controller
#)),
#("dvr", Storage(
#name_nice = T("Disaster Victim Registry"),
##description = "Allow affected individuals & households to register to receive compensation and distributions",
#restricted = True,
#module_type = 10,
#)),
# @todo: implement evr module
# ("evr", Storage(
# name_nice = T("Evacuees"),
# #description = "Evacuees Registry",
# restricted = True, # use Access Control Lists to see this module
# module_type = 7
# )),
("event", Storage(
name_nice = T("Events"),
#description = "Activate Events (e.g. from Scenario templates) for allocation of appropriate Resources (Human, Assets & Facilities).",
restricted = True,
module_type = 8,
)),
("stats", Storage(
name_nice = T("Statistics"),
#description = "Manages statistics",
restricted = True,
module_type = None,
)),
# @ToDo: Rewrite in a modern style
#("budget", Storage(
# name_nice = T("Budgeting Module"),
# #description = "Allows a Budget to be drawn up",
# restricted = True,
# module_type = 10
# )),
])<|fim▁end|> | )),
("hms", Storage(
name_nice = T("Hospitals"), |
<|file_name|>cloned_instead_of_copied.rs<|end_file_name|><|fim▁begin|>// run-rustfix
#![warn(clippy::cloned_instead_of_copied)]
fn main() {
// yay
let _ = [1].iter().cloned();
let _ = vec!["hi"].iter().cloned();
let _ = Some(&1).cloned();
let _ = Box::new([1].iter()).cloned();<|fim▁hole|> // nay
let _ = [String::new()].iter().cloned();
let _ = Some(&String::new()).cloned();
}<|fim▁end|> | let _ = Box::new(Some(&1)).cloned();
|
<|file_name|>main.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python
#encoding=UTF-8
'''
Created on 2014-5-15
@author: XIAO Zhen
'''
'''哈哈'''
import Tkinter as tk
import time
import random
class Application(tk.Frame):
def __init__(self, master=None):
tk.Frame.__init__(self, master)
self.winfo_toplevel().rowconfigure(0,minsize = 1)
self.winfo_toplevel().columnconfigure(0,minsize = 1)
self.grid()
self.createWidgets()
self.random()
self.random()
self.focus_set()
self.bind("<Up>", self.callback)
self.bind("<Down>", self.callback)
self.bind("<Left>", self.callback)
self.bind("<Right>", self.callback)
self.pack()
def createWidgets(self):
#direction buttons, up down left and right
self.direction = {}
self.direction['up'] = tk.Button(self, text = '⇩', height = 2)
self.direction['up'].configure(command = (lambda dir = 'to_down': self.todirection(dir)))
self.direction['up'].grid(row = 0,column = 1, columnspan = 4, sticky = tk.W + tk.E)
self.direction['down'] = tk.Button(self, text = '⇧', height = 2)
self.direction['down'].configure(command = (lambda dir = 'to_up': self.todirection(dir)))
self.direction['down'].grid(row = 5,column = 1, columnspan = 4, sticky = tk.W + tk.E)
self.direction['left'] = tk.Button(self, text = '⇨', width = 3)
self.direction['left'].configure(command = (lambda dir = 'to_right': self.todirection(dir)))
self.direction['left'].grid(row = 1,column = 0, rowspan = 4, sticky = tk.N + tk.S)
self.direction['right'] = tk.Button(self, text = '⇦', width = 3)
self.direction['right'].configure(command = (lambda dir = 'to_left': self.todirection(dir)))
self.direction['right'].grid(row = 1,column = 5, rowspan = 4, sticky = tk.N + tk.S)
self.buttons = []
for i in range(0,16):
self.buttons.append(tk.Button(self, text = '0', height = 2, width = 5, background = "#FFFFFF", fg = '#FFFFFF'))
self.buttons[i].configure(command = (lambda b = self.buttons[i]: self.setNumber(b)))
self.buttons[i].grid(row = i/4 + 1,column=i%4 + 1)
#self.triggerButton = tk.Button(self, text = 'Print')
#self.triggerButton.grid(row = 0, column=1,ipadx = 100)
#control buttons, including mainly start and mode selections
self.controls = {}
self.controls['startgame'] = tk.Button(self, text = 'Start', height = 2, width = 5, command=self.startgame)
self.controls['startgame'].grid(row = 6, column = 4)
self.controls['test1'] = tk.Button(self, text = 'Test1', height = 2, width = 5, command=self.random)
self.controls['test1'].grid(row = 6,column = 1)
self.controls['test2'] = tk.Button(self, text = 'Test2', height = 2, width = 5, command=self.test2)
self.controls['test2'].grid(row = 6,column = 2)
self.controls['test3'] = tk.Button(self, text = 'Test3', height = 2, width = 5, command=self.test3)
self.controls['test3'].grid(row = 6,column = 3)
def setNumber(self,button):
pass
def startgame(self):
print('start game!')
def random(self):
empty = []
rand = -1
for i in range(0,16):
if self.buttons[i]['text'] == '0':
empty.append(i)
if len(empty) != 0:
rand = random.randrange(0,len(empty))
self.buttons[empty[rand]]['text'] = str(random.randrange(1,3) * 2)
self.setColors()
else:
print("no more fields")
if rand != -1:
self.buttons[empty[rand]].configure(background = '#0404B4', fg = '#000000')
def test2(self):
print('test2')
self.buttons[0]['text'] = '2'
self.buttons[1]['text'] = '2'
self.buttons[2]['text'] = '4'
self.buttons[3]['text'] = '8'
self.buttons[4]['text'] = '4'
self.buttons[5]['text'] = '2'
self.buttons[6]['text'] = '2'
self.buttons[7]['text'] = '8'
self.buttons[8]['text'] = '4'
self.buttons[9]['text'] = '2'
self.buttons[10]['text'] = '2'
self.buttons[11]['text'] = '8'
self.buttons[12]['text'] = '8'
self.buttons[13]['text'] = '8'
self.buttons[14]['text'] = '8'
self.buttons[15]['text'] = '8'
self.setColors()
def test3(self):
print('test3')
def callback(self,event):
if event.keysym == 'Up':
self.todirection('to_up')
elif event.keysym == 'Down':
self.todirection('to_down')
elif event.keysym == 'Left':
self.todirection('to_left')
elif event.keysym == 'Right':
self.todirection('to_right')
def sum(self,list):
for i in range (len(list),5):
list.append(0)
for i in range(0,3):
if list[i] == list[i+1] and list[i] != 0:
list[i] += list[i+1]
list[i+1] = 0
re = []
for i in range(0,4):
if list[i] != 0:<|fim▁hole|> re.append(0)
return re
def todirection(self, direction):
flag = 0
if direction == 'to_right':
#rows
for i in range(0, 4):
#columns:
list = []
for j in range(3, -1, -1):
if self.buttons[i*4 + j] != '0':
list.append(int(self.buttons[i*4 + j]['text']))
re = self.sum(list)
k = 0
for j in range(3, -1, -1):
if self.buttons[i*4 + j]['text'] != str(re[k]):
flag = 1
self.buttons[i*4 + j]['text'] = str(re[k])
k += 1
elif direction == 'to_left':
#rows
for i in range(0, 4):
#columns:
list = []
for j in range(0, 4):
if self.buttons[i*4 + j] != '0':
list.append(int(self.buttons[i*4 + j]['text']))
re = self.sum(list)
k = 0
for j in range(0, 4):
if self.buttons[i*4 + j]['text'] != str(re[k]):
flag = 1
self.buttons[i*4 + j]['text'] = str(re[k])
k += 1
elif direction == 'to_up':
#column
for i in range(0, 4):
#row:
list = []
for j in range(0, 4):
if self.buttons[i + j*4] != '0':
list.append(int(self.buttons[i + j*4]['text']))
re = self.sum(list)
k = 0
for j in range(0, 4):
if self.buttons[i + j*4]['text'] != str(re[k]):
flag = 1
self.buttons[i + j*4]['text'] = str(re[k])
k += 1
elif direction == 'to_down':
#column
for i in range(0, 4):
#rows:
list = []
for j in range(3, -1, -1):
if self.buttons[i + j*4] != '0':
list.append(int(self.buttons[i + j*4]['text']))
re = self.sum(list)
k = 0
for j in range(3, -1, -1):
if self.buttons[i + j*4]['text'] != str(re[k]):
flag = 1
self.buttons[i + j*4]['text'] = str(re[k])
k += 1
if flag != 0:
self.random()
def setColors(self):
for i in range(0,16):
self.setColor(self.buttons[i])
def setColor(self,button):
tmp = button['text']
if tmp == '0':
button.configure(background = '#FFFFFF', fg = '#FFFFFF')
elif tmp == '2':
button.configure(background = '#F7F2E0', fg = '#000000')
elif tmp == '4':
button.configure(background = '#F3E2A9', fg = '#000000')
elif tmp == '8':
button.configure(background = '#F7BE81', fg = '#000000')
elif tmp == '16':
button.configure(background = '#FF8000', fg = '#000000')
elif tmp == '32':
button.configure(background = '#FF4000', fg = '#000000')
elif tmp == '64':
button.configure(background = '#FF0000', fg = '#000000')
elif tmp == '128':
button.configure(background = '#B18904', fg = '#000000')
elif tmp == '256':
button.configure(background = '#8A4B08', fg = '#000000')
elif tmp == '512':
button.configure(background = '#8A0808', fg = '#000000')
elif tmp == '1024':
button.configure(background = '#00FFFF', fg = '#000000')
elif tmp == '2048':
button.configure(background = '#00FFFF', fg = '#000000')
elif tmp == '4096':
button.configure(background = '#01DFD7', fg = '#000000')
else:
button.configure(background = '#0404B4', fg = '#000000')
if __name__ == '__main__':
print("Hello World!")
app = Application()
app.master.title('Sample application')
app.mainloop()
pass<|fim▁end|> | re.append(list[i])
for i in range (len(re),5): |
<|file_name|>window-header-menu.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core';
@Component({
selector: 'window-header-menu',
templateUrl: './window-header-menu.component.html',
styleUrls: ['./window-header-menu.component.scss']
})
export class WindowHeaderMenuComponent implements OnInit {
constructor() {
}
ngOnInit() {
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>mock.py<|end_file_name|><|fim▁begin|># mock.py
# Test tools for mocking and patching.
# Copyright (C) 2007-2012 Michael Foord & the mock team
# E-mail: fuzzyman AT voidspace DOT org DOT uk
# mock 0.8.0
# http://www.voidspace.org.uk/python/mock/
# Released subject to the BSD License
# Please see http://www.voidspace.org.uk/python/license.shtml
# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml
# Comments, suggestions and bug reports welcome.
__all__ = (
'Mock',
'MagicMock',
'mocksignature',
'patch',
'sentinel',
'DEFAULT',
'ANY',
'call',
'create_autospec',
'FILTER_DIR',
'NonCallableMock',
'NonCallableMagicMock',
)
__version__ = '0.8.0'
import pprint
import sys
try:
import inspect
except ImportError:
# for alternative platforms that
# may not have inspect
inspect = None
try:
from functools import wraps
except ImportError:
# Python 2.4 compatibility
def wraps(original):
def inner(f):
f.__name__ = original.__name__
f.__doc__ = original.__doc__
f.__module__ = original.__module__
return f
return inner
try:
unicode
except NameError:
# Python 3
basestring = unicode = str
try:
long
except NameError:
# Python 3
long = int
try:
BaseException
except NameError:
# Python 2.4 compatibility
BaseException = Exception
try:
next
except NameError:
def next(obj):
return obj.next()
BaseExceptions = (BaseException,)
if 'java' in sys.platform:
# jython
import java
BaseExceptions = (BaseException, java.lang.Throwable)
try:
_isidentifier = str.isidentifier
except AttributeError:
# Python 2.X
import keyword
import re
regex = re.compile(r'^[a-z_][a-z0-9_]*$', re.I)
def _isidentifier(string):
if string in keyword.kwlist:
return False
return regex.match(string)
inPy3k = sys.version_info[0] == 3
# Needed to work around Python 3 bug where use of "super" interferes with
# defining __class__ as a descriptor
_super = super
self = 'im_self'
builtin = '__builtin__'
if inPy3k:
self = '__self__'
builtin = 'builtins'
FILTER_DIR = True
def _is_instance_mock(obj):
# can't use isinstance on Mock objects because they override __class__
# The base class for all mocks is NonCallableMock
return issubclass(type(obj), NonCallableMock)
def _is_exception(obj):
return (
isinstance(obj, BaseExceptions) or
isinstance(obj, ClassTypes) and issubclass(obj, BaseExceptions)
)
class _slotted(object):
__slots__ = ['a']
DescriptorTypes = (
type(_slotted.a),
property,
)
# getsignature and mocksignature heavily "inspired" by
# the decorator module: http://pypi.python.org/pypi/decorator/
# by Michele Simionato
def _getsignature(func, skipfirst):
if inspect is None:
raise ImportError('inspect module not available')
if inspect.isclass(func):
func = func.__init__
# will have a self arg
skipfirst = True
elif not (inspect.ismethod(func) or inspect.isfunction(func)):
func = func.__call__
regargs, varargs, varkwargs, defaults = inspect.getargspec(func)
# instance methods need to lose the self argument
if getattr(func, self, None) is not None:
regargs = regargs[1:]
_msg = ("_mock_ is a reserved argument name, can't mock signatures using "
"_mock_")
assert '_mock_' not in regargs, _msg
if varargs is not None:
assert '_mock_' not in varargs, _msg
if varkwargs is not None:
assert '_mock_' not in varkwargs, _msg
if skipfirst:
regargs = regargs[1:]
signature = inspect.formatargspec(regargs, varargs, varkwargs, defaults,
formatvalue=lambda value: "")
return signature[1:-1], func
def _getsignature2(func, skipfirst, instance=False):
if inspect is None:
raise ImportError('inspect module not available')
if isinstance(func, ClassTypes) and not instance:
try:
func = func.__init__
except AttributeError:
return
skipfirst = True
elif not isinstance(func, FunctionTypes):
# for classes where instance is True we end up here too
try:
func = func.__call__
except AttributeError:
return
try:
regargs, varargs, varkwargs, defaults = inspect.getargspec(func)
except TypeError:
# C function / method, possibly inherited object().__init__
return
# instance methods and classmethods need to lose the self argument
if getattr(func, self, None) is not None:
regargs = regargs[1:]
if skipfirst:
# this condition and the above one are never both True - why?
regargs = regargs[1:]
signature = inspect.formatargspec(regargs, varargs, varkwargs, defaults,
formatvalue=lambda value: "")
return signature[1:-1], func
def _check_signature(func, mock, skipfirst, instance=False):
if not _callable(func):
return
result = _getsignature2(func, skipfirst, instance)
if result is None:
return
signature, func = result
# can't use self because "self" is common as an argument name
# unfortunately even not in the first place
src = "lambda _mock_self, %s: None" % signature
checksig = eval(src, {})
_copy_func_details(func, checksig)
type(mock)._mock_check_sig = checksig
def _copy_func_details(func, funcopy):
funcopy.__name__ = func.__name__
funcopy.__doc__ = func.__doc__
#funcopy.__dict__.update(func.__dict__)
funcopy.__module__ = func.__module__
if not inPy3k:
funcopy.func_defaults = func.func_defaults
return
funcopy.__defaults__ = func.__defaults__
funcopy.__kwdefaults__ = func.__kwdefaults__
def _callable(obj):
if isinstance(obj, ClassTypes):
return True
if getattr(obj, '__call__', None) is not None:
return True
return False
def _is_list(obj):
# checks for list or tuples
# XXXX badly named!
return type(obj) in (list, tuple)
def _instance_callable(obj):
"""Given an object, return True if the object is callable.
For classes, return True if instances would be callable."""
if not isinstance(obj, ClassTypes):
# already an instance
return getattr(obj, '__call__', None) is not None
klass = obj
# uses __bases__ instead of __mro__ so that we work with old style classes
if klass.__dict__.get('__call__') is not None:
return True
for base in klass.__bases__:
if _instance_callable(base):
return True
return False
def _set_signature(mock, original, instance=False):
# creates a function with signature (*args, **kwargs) that delegates to a
# mock. It still does signature checking by calling a lambda with the same
# signature as the original. This is effectively mocksignature2.
if not _callable(original):
return
skipfirst = isinstance(original, ClassTypes)
result = _getsignature2(original, skipfirst, instance)
if result is None:
# was a C function (e.g. object().__init__ ) that can't be mocked
return
signature, func = result
src = "lambda %s: None" % signature
context = {'_mock_': mock}
checksig = eval(src, context)
_copy_func_details(func, checksig)
name = original.__name__
if not _isidentifier(name):
name = 'funcopy'
context = {'checksig': checksig, 'mock': mock}
src = """def %s(*args, **kwargs):
checksig(*args, **kwargs)
return mock(*args, **kwargs)""" % name
exec (src, context)
funcopy = context[name]
_setup_func(funcopy, mock)
return funcopy
def mocksignature(func, mock=None, skipfirst=False):
"""
mocksignature(func, mock=None, skipfirst=False)
Create a new function with the same signature as `func` that delegates
to `mock`. If `skipfirst` is True the first argument is skipped, useful
for methods where `self` needs to be omitted from the new function.
If you don't pass in a `mock` then one will be created for you.
The mock is set as the `mock` attribute of the returned function for easy
access.
Functions returned by `mocksignature` have many of the same attributes
and assert methods as a mock object.
`mocksignature` can also be used with classes. It copies the signature of
the `__init__` method.
When used with callable objects (instances) it copies the signature of the
`__call__` method.
"""
if mock is None:
mock = Mock()
signature, func = _getsignature(func, skipfirst)
src = "lambda %(signature)s: _mock_(%(signature)s)" % {
'signature': signature
}
funcopy = eval(src, dict(_mock_=mock))
_copy_func_details(func, funcopy)
_setup_func(funcopy, mock)
return funcopy
def _setup_func(funcopy, mock):
funcopy.mock = mock
# can't use isinstance with mocks
if not _is_instance_mock(mock):
return
def assert_called_with(*args, **kwargs):
return mock.assert_called_with(*args, **kwargs)
def assert_called_once_with(*args, **kwargs):
return mock.assert_called_once_with(*args, **kwargs)
def assert_has_calls(*args, **kwargs):
return mock.assert_has_calls(*args, **kwargs)
def assert_any_call(*args, **kwargs):
return mock.assert_any_call(*args, **kwargs)
def reset_mock():
funcopy.method_calls = _CallList()
funcopy.mock_calls = _CallList()
mock.reset_mock()
ret = funcopy.return_value
if _is_instance_mock(ret) and not ret is mock:
ret.reset_mock()
funcopy.called = False
funcopy.call_count = 0
funcopy.call_args = None
funcopy.call_args_list = _CallList()
funcopy.method_calls = _CallList()
funcopy.mock_calls = _CallList()
funcopy.return_value = mock.return_value
funcopy.side_effect = mock.side_effect
funcopy._mock_children = mock._mock_children
funcopy.assert_called_with = assert_called_with
funcopy.assert_called_once_with = assert_called_once_with
funcopy.assert_has_calls = assert_has_calls
funcopy.assert_any_call = assert_any_call
funcopy.reset_mock = reset_mock
mock._mock_signature = funcopy
def _is_magic(name):
return '__%s__' % name[2:-2] == name
class _SentinelObject(object):
"A unique, named, sentinel object."
def __init__(self, name):
self.name = name
def __repr__(self):
return 'sentinel.%s' % self.name
class _Sentinel(object):
"""Access attributes to return a named object, usable as a sentinel."""
def __init__(self):
self._sentinels = {}
def __getattr__(self, name):
if name == '__bases__':
# Without this help(mock) raises an exception
raise AttributeError
return self._sentinels.setdefault(name, _SentinelObject(name))
sentinel = _Sentinel()
DEFAULT = sentinel.DEFAULT
class OldStyleClass:
pass
ClassType = type(OldStyleClass)
def _copy(value):
if type(value) in (dict, list, tuple, set):
return type(value)(value)
return value
ClassTypes = (type,)
if not inPy3k:
ClassTypes = (type, ClassType)
_allowed_names = set(
[
'return_value', '_mock_return_value', 'side_effect',
'_mock_side_effect', '_mock_parent', '_mock_new_parent',
'_mock_name', '_mock_new_name'
]
)
def _mock_signature_property(name):
_allowed_names.add(name)
_the_name = '_mock_' + name
def _get(self, name=name, _the_name=_the_name):
sig = self._mock_signature
if sig is None:
return getattr(self, _the_name)
return getattr(sig, name)
def _set(self, value, name=name, _the_name=_the_name):
sig = self._mock_signature
if sig is None:
self.__dict__[_the_name] = value
else:
setattr(sig, name, value)
return property(_get, _set)
class _CallList(list):
def __contains__(self, value):
if not isinstance(value, list):
return list.__contains__(self, value)
len_value = len(value)
len_self = len(self)
if len_value > len_self:
return False
for i in range(0, len_self - len_value + 1):
sub_list = self[i:i+len_value]
if sub_list == value:
return True
return False
def __repr__(self):
return pprint.pformat(list(self))
def _check_and_set_parent(parent, value, name, new_name):
if not _is_instance_mock(value):
return False
if ((value._mock_name or value._mock_new_name) or
(value._mock_parent is not None) or
(value._mock_new_parent is not None)):
return False
_parent = parent
while _parent is not None:
# setting a mock (value) as a child or return value of itself
# should not modify the mock
if _parent is value:
return False
_parent = _parent._mock_new_parent
if new_name:
value._mock_new_parent = parent
value._mock_new_name = new_name
if name:
value._mock_parent = parent
value._mock_name = name
return True
class Base(object):
_mock_return_value = DEFAULT
_mock_side_effect = None
def __init__(self, *args, **kwargs):
pass
class NonCallableMock(Base):
"""A non-callable version of `Mock`"""
def __new__(cls, *args, **kw):
# every instance has its own class
# so we can create magic methods on the
# class without stomping on other mocks
new = type(cls.__name__, (cls,), {'__doc__': cls.__doc__})
instance = object.__new__(new)
return instance
def __init__(
self, spec=None, wraps=None, name=None, spec_set=None,
parent=None, _spec_state=None, _new_name='', _new_parent=None,
**kwargs
):
if _new_parent is None:
_new_parent = parent
__dict__ = self.__dict__
__dict__['_mock_parent'] = parent
__dict__['_mock_name'] = name
__dict__['_mock_new_name'] = _new_name
__dict__['_mock_new_parent'] = _new_parent
if spec_set is not None:
spec = spec_set
spec_set = True
self._mock_add_spec(spec, spec_set)
__dict__['_mock_children'] = {}
__dict__['_mock_wraps'] = wraps
__dict__['_mock_signature'] = None
__dict__['_mock_called'] = False
__dict__['_mock_call_args'] = None
__dict__['_mock_call_count'] = 0
__dict__['_mock_call_args_list'] = _CallList()
__dict__['_mock_mock_calls'] = _CallList()
__dict__['method_calls'] = _CallList()
if kwargs:
self.configure_mock(**kwargs)
_super(NonCallableMock, self).__init__(
spec, wraps, name, spec_set, parent,
_spec_state
)
def attach_mock(self, mock, attribute):
"""
Attach a mock as an attribute of this one, replacing its name and
parent. Calls to the attached mock will be recorded in the
`method_calls` and `mock_calls` attributes of this one."""
mock._mock_parent = None
mock._mock_new_parent = None
mock._mock_name = ''
mock._mock_new_name = None
setattr(self, attribute, mock)
def mock_add_spec(self, spec, spec_set=False):
"""Add a spec to a mock. `spec` can either be an object or a
list of strings. Only attributes on the `spec` can be fetched as
attributes from the mock.
If `spec_set` is True then only attributes on the spec can be set."""
self._mock_add_spec(spec, spec_set)
def _mock_add_spec(self, spec, spec_set):
_spec_class = None
if spec is not None and not _is_list(spec):
if isinstance(spec, ClassTypes):
_spec_class = spec
else:
_spec_class = _get_class(spec)
spec = dir(spec)
__dict__ = self.__dict__
__dict__['_spec_class'] = _spec_class
__dict__['_spec_set'] = spec_set
__dict__['_mock_methods'] = spec
def __get_return_value(self):
ret = self._mock_return_value
if self._mock_signature is not None:
ret = self._mock_signature.return_value
if ret is DEFAULT:
ret = self._get_child_mock(
_new_parent=self, _new_name='()'
)
self.return_value = ret
return ret
def __set_return_value(self, value):
if self._mock_signature is not None:
self._mock_signature.return_value = value
else:
self._mock_return_value = value
_check_and_set_parent(self, value, None, '()')
__return_value_doc = "The value to be returned when the mock is called."
return_value = property(__get_return_value, __set_return_value,
__return_value_doc)
@property
def __class__(self):
if self._spec_class is None:
return type(self)
return self._spec_class
called = _mock_signature_property('called')
call_count = _mock_signature_property('call_count')
call_args = _mock_signature_property('call_args')
call_args_list = _mock_signature_property('call_args_list')
mock_calls = _mock_signature_property('mock_calls')
def __get_side_effect(self):
sig = self._mock_signature
if sig is None:
return self._mock_side_effect
return sig.side_effect
def __set_side_effect(self, value):
value = _try_iter(value)
sig = self._mock_signature
if sig is None:
self._mock_side_effect = value
else:
sig.side_effect = value
side_effect = property(__get_side_effect, __set_side_effect)
def reset_mock(self):
"Restore the mock object to its initial state."
self.called = False
self.call_args = None
self.call_count = 0
self.mock_calls = _CallList()
self.call_args_list = _CallList()
self.method_calls = _CallList()
for child in self._mock_children.values():
child.reset_mock()
ret = self._mock_return_value
if _is_instance_mock(ret) and ret is not self:
ret.reset_mock()
def configure_mock(self, **kwargs):
"""Set attributes on the mock through keyword arguments.
Attributes plus return values and side effects can be set on child
mocks using standard dot notation and unpacking a dictionary in the
method call:
>>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError}
>>> mock.configure_mock(**attrs)"""
for arg, val in sorted(kwargs.items(),
# we sort on the number of dots so that
# attributes are set before we set attributes on
# attributes
key=lambda entry: entry[0].count('.')):
args = arg.split('.')
final = args.pop()
obj = self
for entry in args:
obj = getattr(obj, entry)
setattr(obj, final, val)
def __getattr__(self, name):
if name == '_mock_methods':
raise AttributeError(name)
elif self._mock_methods is not None:
if name not in self._mock_methods or name in _all_magics:
raise AttributeError("Mock object has no attribute %r" % name)
elif _is_magic(name):
raise AttributeError(name)
result = self._mock_children.get(name)
if result is None:
wraps = None
if self._mock_wraps is not None:
# XXXX should we get the attribute without triggering code
# execution?
wraps = getattr(self._mock_wraps, name)
result = self._get_child_mock(
parent=self, name=name, wraps=wraps, _new_name=name,
_new_parent=self
)
self._mock_children[name] = result
elif isinstance(result, _SpecState):
result = create_autospec(
result.spec, result.spec_set, result.instance,
result.parent, result.name
)
self._mock_children[name] = result
return result
def __repr__(self):
_name_list = [self._mock_new_name]
_parent = self._mock_new_parent
last = self
dot = '.'
if _name_list == ['()']:
dot = ''
seen = set()
while _parent is not None:
last = _parent
_name_list.append(_parent._mock_new_name + dot)
dot = '.'
if _parent._mock_new_name == '()':
dot = ''
_parent = _parent._mock_new_parent
# use ids here so as not to call __hash__ on the mocks
if id(_parent) in seen:
break
seen.add(id(_parent))
_name_list = list(reversed(_name_list))
_first = last._mock_name or 'mock'
if len(_name_list) > 1:
if _name_list[1] not in ('()', '().'):
_first += '.'
_name_list[0] = _first
name = ''.join(_name_list)
name_string = ''
if name not in ('mock', 'mock.'):
name_string = ' name=%r' % name
spec_string = ''
if self._spec_class is not None:
spec_string = ' spec=%r'
if self._spec_set:
spec_string = ' spec_set=%r'
spec_string = spec_string % self._spec_class.__name__
return "<%s%s%s id='%s'>" % (
type(self).__name__,
name_string,
spec_string,
id(self)
)
def __dir__(self):
"""Filter the output of `dir(mock)` to only useful members.
XXXX
"""
extras = self._mock_methods or []
from_type = dir(type(self))
from_dict = list(self.__dict__)
if FILTER_DIR:
from_type = [e for e in from_type if not e.startswith('_')]
from_dict = [e for e in from_dict if not e.startswith('_') or
_is_magic(e)]
return sorted(set(extras + from_type + from_dict +
list(self._mock_children)))
def __setattr__(self, name, value):
if name in _allowed_names:
# property setters go through here
return object.__setattr__(self, name, value)
elif (self._spec_set and self._mock_methods is not None and
name not in self._mock_methods and
name not in self.__dict__):
raise AttributeError("Mock object has no attribute '%s'" % name)
elif name in _unsupported_magics:
msg = 'Attempting to set unsupported magic method %r.' % name
raise AttributeError(msg)
elif name in _all_magics:
if self._mock_methods is not None and name not in self._mock_methods:
raise AttributeError("Mock object has no attribute '%s'" % name)
if not _is_instance_mock(value):
setattr(type(self), name, _get_method(name, value))
original = value
real = lambda *args, **kw: original(self, *args, **kw)
value = mocksignature(value, real, skipfirst=True)
else:
# only set _new_name and not name so that mock_calls is tracked
# but not method calls
_check_and_set_parent(self, value, None, name)
setattr(type(self), name, value)
else:
if _check_and_set_parent(self, value, name, name):
self._mock_children[name] = value
return object.__setattr__(self, name, value)
def __delattr__(self, name):
if name in _all_magics and name in type(self).__dict__:
delattr(type(self), name)
if name not in self.__dict__:
# for magic methods that are still MagicProxy objects and
# not set on the instance itself
return
return object.__delattr__(self, name)
def _format_mock_call_signature(self, args, kwargs):
name = self._mock_name or 'mock'
return _format_call_signature(name, args, kwargs)
def _format_mock_failure_message(self, args, kwargs):
message = 'Expected call: %s\nActual call: %s'
expected_string = self._format_mock_call_signature(args, kwargs)
call_args = self.call_args
if len(call_args) == 3:
call_args = call_args[1:]
actual_string = self._format_mock_call_signature(*call_args)
return message % (expected_string, actual_string)
def assert_called_with(_mock_self, *args, **kwargs):
"""assert that the mock was called with the specified arguments.
Raises an AssertionError if the args and keyword args passed in are
different to the last call to the mock."""
self = _mock_self
if self.call_args is None:
expected = self._format_mock_call_signature(args, kwargs)
raise AssertionError('Expected call: %s\nNot called' % (expected,))
if self.call_args != (args, kwargs):
msg = self._format_mock_failure_message(args, kwargs)
raise AssertionError(msg)
def assert_called_once_with(_mock_self, *args, **kwargs):
"""assert that the mock was called exactly once and with the specified
arguments."""
self = _mock_self
if not self.call_count == 1:
msg = ("Expected to be called once. Called %s times." %
self.call_count)
raise AssertionError(msg)
return self.assert_called_with(*args, **kwargs)
def assert_has_calls(self, calls, any_order=False):
"""assert the mock has been called with the specified calls.
The `mock_calls` list is checked for the calls.
If `any_order` is False (the default) then the calls must be
sequential. There can be extra calls before or after the
specified calls.
If `any_order` is True then the calls can be in any order, but
they must all appear in `mock_calls`."""
if not any_order:
if calls not in self.mock_calls:
raise AssertionError(
'Calls not found.\nExpected: %r\n'
'Actual: %r' % (calls, self.mock_calls)
)
return
all_calls = list(self.mock_calls)
not_found = []
for kall in calls:
try:
all_calls.remove(kall)
except ValueError:
not_found.append(kall)
if not_found:
raise AssertionError(
'%r not all found in call list' % (tuple(not_found),)
)
def assert_any_call(self, *args, **kwargs):
"""assert the mock has been called with the specified arguments.
The assert passes if the mock has *ever* been called, unlike
`assert_called_with` and `assert_called_once_with` that only pass if
the call is the most recent one."""
kall = call(*args, **kwargs)
if kall not in self.call_args_list:
expected_string = self._format_mock_call_signature(args, kwargs)
raise AssertionError(
'%s call not found' % expected_string
)
def _get_child_mock(self, **kw):
"""Create the child mocks for attributes and return value.
By default child mocks will be the same type as the parent.
Subclasses of Mock may want to override this to customize the way
child mocks are made.
For non-callable mocks the callable variant will be used (rather than
any custom subclass)."""
_type = type(self)
if not issubclass(_type, CallableMixin):
if issubclass(_type, NonCallableMagicMock):
klass = MagicMock
elif issubclass(_type, NonCallableMock) :
klass = Mock
else:
klass = _type.__mro__[1]
return klass(**kw)
def _try_iter(obj):
if obj is None:
return obj
if _is_exception(obj):
return obj
if _callable(obj):
return obj
try:
return iter(obj)
except TypeError:
# XXXX backwards compatibility
# but this will blow up on first call - so maybe we should fail early?
return obj
class CallableMixin(Base):
def __init__(self, spec=None, side_effect=None, return_value=DEFAULT,
wraps=None, name=None, spec_set=None, parent=None,
_spec_state=None, _new_name='', _new_parent=None, **kwargs):
self.__dict__['_mock_return_value'] = return_value
_super(CallableMixin, self).__init__(
spec, wraps, name, spec_set, parent,
_spec_state, _new_name, _new_parent, **kwargs
)
self.side_effect = side_effect
def _mock_check_sig(self, *args, **kwargs):
# stub method that can be replaced with one with a specific signature
pass
def __call__(_mock_self, *args, **kwargs):
# can't use self in-case a function / method we are mocking uses self
# in the signature
_mock_self._mock_check_sig(*args, **kwargs)
return _mock_self._mock_call(*args, **kwargs)
def _mock_call(_mock_self, *args, **kwargs):
self = _mock_self
self.called = True
self.call_count += 1
self.call_args = _Call((args, kwargs), two=True)
self.call_args_list.append(_Call((args, kwargs), two=True))
_new_name = self._mock_new_name
_new_parent = self._mock_new_parent
self.mock_calls.append(_Call(('', args, kwargs)))
seen = set()
skip_next_dot = _new_name == '()'
do_method_calls = self._mock_parent is not None
name = self._mock_name
while _new_parent is not None:
this_mock_call = _Call((_new_name, args, kwargs))
if _new_parent._mock_new_name:
dot = '.'
if skip_next_dot:
dot = ''
skip_next_dot = False
if _new_parent._mock_new_name == '()':
skip_next_dot = True
_new_name = _new_parent._mock_new_name + dot + _new_name
if do_method_calls:
if _new_name == name:
this_method_call = this_mock_call
else:
this_method_call = _Call((name, args, kwargs))
_new_parent.method_calls.append(this_method_call)
do_method_calls = _new_parent._mock_parent is not None
if do_method_calls:
name = _new_parent._mock_name + '.' + name
_new_parent.mock_calls.append(this_mock_call)
_new_parent = _new_parent._mock_new_parent
# use ids here so as not to call __hash__ on the mocks
_new_parent_id = id(_new_parent)
if _new_parent_id in seen:
break
seen.add(_new_parent_id)
ret_val = DEFAULT
effect = self.side_effect
if effect is not None:
if _is_exception(effect):
raise effect
if not _callable(effect):
return next(effect)
ret_val = effect(*args, **kwargs)
if ret_val is DEFAULT:
ret_val = self.return_value
if (self._mock_wraps is not None and
self._mock_return_value is DEFAULT):
return self._mock_wraps(*args, **kwargs)
if ret_val is DEFAULT:
ret_val = self.return_value
return ret_val
class Mock(CallableMixin, NonCallableMock):
"""
Create a new `Mock` object. `Mock` takes several optional arguments
that specify the behaviour of the Mock object:
* `spec`: This can be either a list of strings or an existing object (a
class or instance) that acts as the specification for the mock object. If
you pass in an object then a list of strings is formed by calling dir on
the object (excluding unsupported magic attributes and methods). Accessing
any attribute not in this list will raise an `AttributeError`.
If `spec` is an object (rather than a list of strings) then
`mock.__class__` returns the class of the spec object. This allows mocks
to pass `isinstance` tests.
* `spec_set`: A stricter variant of `spec`. If used, attempting to *set*
or get an attribute on the mock that isn't on the object passed as
`spec_set` will raise an `AttributeError`.
* `side_effect`: A function to be called whenever the Mock is called. See
the `side_effect` attribute. Useful for raising exceptions or
dynamically changing return values. The function is called with the same
arguments as the mock, and unless it returns `DEFAULT`, the return
value of this function is used as the return value.
Alternatively `side_effect` can be an exception class or instance. In
this case the exception will be raised when the mock is called.
If `side_effect` is an iterable then each call to the mock will return
the next value from the iterable.
* `return_value`: The value returned when the mock is called. By default
this is a new Mock (created on first access). See the
`return_value` attribute.
* `wraps`: Item for the mock object to wrap. If `wraps` is not None
then calling the Mock will pass the call through to the wrapped object
(returning the real result and ignoring `return_value`). Attribute
access on the mock will return a Mock object that wraps the corresponding
attribute of the wrapped object (so attempting to access an attribute that
doesn't exist will raise an `AttributeError`).
If the mock has an explicit `return_value` set then calls are not passed
to the wrapped object and the `return_value` is returned instead.
* `name`: If the mock has a name then it will be used in the repr of the
mock. This can be useful for debugging. The name is propagated to child
mocks.
Mocks can also be called with arbitrary keyword arguments. These will be
used to set attributes on the mock after it is created.
"""
def _dot_lookup(thing, comp, import_path):
try:
return getattr(thing, comp)
except AttributeError:
__import__(import_path)
return getattr(thing, comp)
def _importer(target):
components = target.split('.')
import_path = components.pop(0)
thing = __import__(import_path)
for comp in components:
import_path += ".%s" % comp
thing = _dot_lookup(thing, comp, import_path)
return thing
def _is_started(patcher):
# XXXX horrible
return hasattr(patcher, 'is_local')
class _patch(object):
attribute_name = None
def __init__(
self, getter, attribute, new, spec, create,
mocksignature, spec_set, autospec, new_callable, kwargs
):
if new_callable is not None:
if new is not DEFAULT:
raise ValueError(
"Cannot use 'new' and 'new_callable' together"
)
if autospec is not False:
raise ValueError(
"Cannot use 'autospec' and 'new_callable' together"
)
self.getter = getter
self.attribute = attribute
self.new = new
self.new_callable = new_callable
self.spec = spec
self.create = create
self.has_local = False
self.mocksignature = mocksignature
self.spec_set = spec_set
self.autospec = autospec
self.kwargs = kwargs
self.additional_patchers = []
def copy(self):
patcher = _patch(
self.getter, self.attribute, self.new, self.spec,
self.create, self.mocksignature, self.spec_set,
self.autospec, self.new_callable, self.kwargs
)
patcher.attribute_name = self.attribute_name
patcher.additional_patchers = [
p.copy() for p in self.additional_patchers
]
return patcher
def __call__(self, func):
if isinstance(func, ClassTypes):
return self.decorate_class(func)
return self.decorate_callable(func)
def decorate_class(self, klass):
for attr in dir(klass):
if not attr.startswith(patch.TEST_PREFIX):
continue
attr_value = getattr(klass, attr)
if not hasattr(attr_value, "__call__"):
continue
patcher = self.copy()
setattr(klass, attr, patcher(attr_value))
return klass
def decorate_callable(self, func):
if hasattr(func, 'patchings'):
func.patchings.append(self)
return func
@wraps(func)
def patched(*args, **keywargs):
# don't use a with here (backwards compatability with Python 2.4)
extra_args = []
entered_patchers = []
# can't use try...except...finally because of Python 2.4
# compatibility
try:
try:
for patching in patched.patchings:
arg = patching.__enter__()
entered_patchers.append(patching)
if patching.attribute_name is not None:
keywargs.update(arg)
elif patching.new is DEFAULT:
extra_args.append(arg)
args += tuple(extra_args)
return func(*args, **keywargs)
except:
if (patching not in entered_patchers and
_is_started(patching)):
# the patcher may have been started, but an exception
# raised whilst entering one of its additional_patchers
entered_patchers.append(patching)
# re-raise the exception
raise
finally:
for patching in reversed(entered_patchers):
patching.__exit__()
patched.patchings = [self]
if hasattr(func, 'func_code'):
# not in Python 3
patched.compat_co_firstlineno = getattr(
func, "compat_co_firstlineno",
func.func_code.co_firstlineno
)
return patched
def get_original(self):
target = self.getter()
name = self.attribute
original = DEFAULT
local = False
try:
original = target.__dict__[name]
except (AttributeError, KeyError):
original = getattr(target, name, DEFAULT)
else:
local = True
if not self.create and original is DEFAULT:
raise AttributeError(
"%s does not have the attribute %r" % (target, name)
)
return original, local
def __enter__(self):
"""Perform the patch."""
new, spec, spec_set = self.new, self.spec, self.spec_set
autospec, kwargs = self.autospec, self.kwargs
new_callable = self.new_callable
self.target = self.getter()
original, local = self.get_original()
if new is DEFAULT and autospec is False:
inherit = False
if spec_set == True:
spec_set = original
elif spec == True:
# set spec to the object we are replacing
spec = original
if (spec or spec_set) is not None:
if isinstance(original, ClassTypes):
# If we're patching out a class and there is a spec
inherit = True
Klass = MagicMock
_kwargs = {}
if new_callable is not None:
Klass = new_callable
elif (spec or spec_set) is not None:
if not _callable(spec or spec_set):
Klass = NonCallableMagicMock
if spec is not None:
_kwargs['spec'] = spec
if spec_set is not None:
_kwargs['spec_set'] = spec_set
# add a name to mocks
if (isinstance(Klass, type) and
issubclass(Klass, NonCallableMock) and self.attribute):
_kwargs['name'] = self.attribute
_kwargs.update(kwargs)
new = Klass(**_kwargs)
if inherit and _is_instance_mock(new):
# we can only tell if the instance should be callable if the
# spec is not a list
if (not _is_list(spec or spec_set) and not
_instance_callable(spec or spec_set)):
Klass = NonCallableMagicMock
_kwargs.pop('name')
new.return_value = Klass(_new_parent=new, _new_name='()',
**_kwargs)
elif autospec is not False:
# spec is ignored, new *must* be default, spec_set is treated
# as a boolean. Should we check spec is not None and that spec_set
# is a bool? mocksignature should also not be used. Should we
# check this?
if new is not DEFAULT:
raise TypeError(
"autospec creates the mock for you. Can't specify "
"autospec and new."
)
spec_set = bool(spec_set)
if autospec is True:
autospec = original
new = create_autospec(autospec, spec_set=spec_set,
_name=self.attribute, **kwargs)
elif kwargs:
# can't set keyword args when we aren't creating the mock
# XXXX If new is a Mock we could call new.configure_mock(**kwargs)
raise TypeError("Can't pass kwargs to a mock we aren't creating")
new_attr = new
if self.mocksignature:
new_attr = mocksignature(original, new)
self.temp_original = original
self.is_local = local
setattr(self.target, self.attribute, new_attr)
if self.attribute_name is not None:
extra_args = {}
if self.new is DEFAULT:
extra_args[self.attribute_name] = new
for patching in self.additional_patchers:
arg = patching.__enter__()
if patching.new is DEFAULT:
extra_args.update(arg)
return extra_args
return new
def __exit__(self, *_):
"""Undo the patch."""
if not _is_started(self):
raise RuntimeError('stop called on unstarted patcher')
if self.is_local and self.temp_original is not DEFAULT:
setattr(self.target, self.attribute, self.temp_original)
else:
delattr(self.target, self.attribute)
if not self.create and not hasattr(self.target, self.attribute):
# needed for proxy objects like django settings
setattr(self.target, self.attribute, self.temp_original)
del self.temp_original
del self.is_local
del self.target
for patcher in reversed(self.additional_patchers):
if _is_started(patcher):
patcher.__exit__()
start = __enter__
stop = __exit__
def _get_target(target):
try:
target, attribute = target.rsplit('.', 1)
except (TypeError, ValueError):
raise TypeError("Need a valid target to patch. You supplied: %r" %
(target,))
getter = lambda: _importer(target)
return getter, attribute
def _patch_object(
target, attribute, new=DEFAULT, spec=None,
create=False, mocksignature=False, spec_set=None, autospec=False,
new_callable=None, **kwargs
):
"""
patch.object(target, attribute, new=DEFAULT, spec=None, create=False,
mocksignature=False, spec_set=None, autospec=False,
new_callable=None, **kwargs)
patch the named member (`attribute`) on an object (`target`) with a mock
object.
`patch.object` can be used as a decorator, class decorator or a context
manager. Arguments `new`, `spec`, `create`, `mocksignature`, `spec_set`,
`autospec` and `new_callable` have the same meaning as for `patch`. Like
`patch`, `patch.object` takes arbitrary keyword arguments for configuring
the mock object it creates.
When used as a class decorator `patch.object` honours `patch.TEST_PREFIX`
for choosing which methods to wrap.
"""
getter = lambda: target
return _patch(
getter, attribute, new, spec, create, mocksignature,
spec_set, autospec, new_callable, kwargs
)
def _patch_multiple(target, spec=None, create=False,
mocksignature=False, spec_set=None, autospec=False,
new_callable=None, **kwargs
):
"""Perform multiple patches in a single call. It takes the object to be
patched (either as an object or a string to fetch the object by importing)
and keyword arguments for the patches::
with patch.multiple(settings, FIRST_PATCH='one', SECOND_PATCH='two'):
...
Use `DEFAULT` as the value if you want `patch.multiple` to create
mocks for you. In this case the created mocks are passed into a decorated
function by keyword, and a dictionary is returned when `patch.multiple` is
used as a context manager.
`patch.multiple` can be used as a decorator, class decorator or a context
manager. The arguments `spec`, `spec_set`, `create`, `mocksignature`,
`autospec` and `new_callable` have the same meaning as for `patch`. These
arguments will be applied to *all* patches done by `patch.multiple`.
When used as a class decorator `patch.multiple` honours `patch.TEST_PREFIX`
for choosing which methods to wrap.
"""
if type(target) in (unicode, str):
getter = lambda: _importer(target)
else:
getter = lambda: target
if not kwargs:
raise ValueError(
'Must supply at least one keyword argument with patch.multiple'
)
# need to wrap in a list for python 3, where items is a view
items = list(kwargs.items())
attribute, new = items[0]
patcher = _patch(
getter, attribute, new, spec, create, mocksignature, spec_set,
autospec, new_callable, {}
)
patcher.attribute_name = attribute
for attribute, new in items[1:]:
this_patcher = _patch(
getter, attribute, new, spec, create, mocksignature, spec_set,
autospec, new_callable, {}
)
this_patcher.attribute_name = attribute
patcher.additional_patchers.append(this_patcher)
return patcher
def patch(
target, new=DEFAULT, spec=None, create=False,
mocksignature=False, spec_set=None, autospec=False,
new_callable=None, **kwargs
):
"""
`patch` acts as a function decorator, class decorator or a context
manager. Inside the body of the function or with statement, the `target`
(specified in the form `'package.module.ClassName'`) is patched
with a `new` object. When the function/with statement exits the patch is
undone.
The `target` is imported and the specified attribute patched with the new
object, so it must be importable from the environment you are calling the
decorator from. The target is imported when the decorated function is
executed, not at decoration time.
If `new` is omitted, then a new `MagicMock` is created and passed in as an
extra argument to the decorated function.
The `spec` and `spec_set` keyword arguments are passed to the `MagicMock`
if patch is creating one for you.
In addition you can pass `spec=True` or `spec_set=True`, which causes
patch to pass in the object being mocked as the spec/spec_set object.
`new_callable` allows you to specify a different class, or callable object,
that will be called to create the `new` object. By default `MagicMock` is
used.
A more powerful form of `spec` is `autospec`. If you set `autospec=True`
then the mock with be created with a spec from the object being replaced.
All attributes of the mock will also have the spec of the corresponding
attribute of the object being replaced. Methods and functions being mocked
will have their arguments checked and will raise a `TypeError` if they are
called with the wrong signature (similar to `mocksignature`). For mocks
replacing a class, their return value (the 'instance') will have the same
spec as the class.
Instead of `autospec=True` you can pass `autospec=some_object` to use an
arbitrary object as the spec instead of the one being replaced.
If `mocksignature` is True then the patch will be done with a function
created by mocking the one being replaced. If the object being replaced is
a class then the signature of `__init__` will be copied. If the object
being replaced is a callable object then the signature of `__call__` will
be copied.
By default `patch` will fail to replace attributes that don't exist. If
you pass in `create=True`, and the attribute doesn't exist, patch will
create the attribute for you when the patched function is called, and
delete it again afterwards. This is useful for writing tests against
attributes that your production code creates at runtime. It is off by by
default because it can be dangerous. With it switched on you can write
passing tests against APIs that don't actually exist!
Patch can be used as a `TestCase` class decorator. It works by
decorating each test method in the class. This reduces the boilerplate
code when your test methods share a common patchings set. `patch` finds
tests by looking for method names that start with `patch.TEST_PREFIX`.
By default this is `test`, which matches the way `unittest` finds tests.
You can specify an alternative prefix by setting `patch.TEST_PREFIX`.
Patch can be used as a context manager, with the with statement. Here the
patching applies to the indented block after the with statement. If you
use "as" then the patched object will be bound to the name after the
"as"; very useful if `patch` is creating a mock object for you.
`patch` takes arbitrary keyword arguments. These will be passed to
the `Mock` (or `new_callable`) on construction.
`patch.dict(...)`, `patch.multiple(...)` and `patch.object(...)` are
available for alternate use-cases.
"""
getter, attribute = _get_target(target)
return _patch(
getter, attribute, new, spec, create, mocksignature,
spec_set, autospec, new_callable, kwargs
)
class _patch_dict(object):
"""
Patch a dictionary, or dictionary like object, and restore the dictionary
to its original state after the test.
`in_dict` can be a dictionary or a mapping like container. If it is a<|fim▁hole|> mapping then it must at least support getting, setting and deleting items
plus iterating over keys.
`in_dict` can also be a string specifying the name of the dictionary, which
will then be fetched by importing it.
`values` can be a dictionary of values to set in the dictionary. `values`
can also be an iterable of `(key, value)` pairs.
If `clear` is True then the dictionary will be cleared before the new
values are set.
`patch.dict` can also be called with arbitrary keyword arguments to set
values in the dictionary::
with patch.dict('sys.modules', mymodule=Mock(), other_module=Mock()):
...
`patch.dict` can be used as a context manager, decorator or class
decorator. When used as a class decorator `patch.dict` honours
`patch.TEST_PREFIX` for choosing which methods to wrap.
"""
def __init__(self, in_dict, values=(), clear=False, **kwargs):
if isinstance(in_dict, basestring):
in_dict = _importer(in_dict)
self.in_dict = in_dict
# support any argument supported by dict(...) constructor
self.values = dict(values)
self.values.update(kwargs)
self.clear = clear
self._original = None
def __call__(self, f):
if isinstance(f, ClassTypes):
return self.decorate_class(f)
@wraps(f)
def _inner(*args, **kw):
self._patch_dict()
try:
return f(*args, **kw)
finally:
self._unpatch_dict()
return _inner
def decorate_class(self, klass):
for attr in dir(klass):
attr_value = getattr(klass, attr)
if (attr.startswith(patch.TEST_PREFIX) and
hasattr(attr_value, "__call__")):
decorator = _patch_dict(self.in_dict, self.values, self.clear)
decorated = decorator(attr_value)
setattr(klass, attr, decorated)
return klass
def __enter__(self):
"""Patch the dict."""
self._patch_dict()
def _patch_dict(self):
values = self.values
in_dict = self.in_dict
clear = self.clear
try:
original = in_dict.copy()
except AttributeError:
# dict like object with no copy method
# must support iteration over keys
original = {}
for key in in_dict:
original[key] = in_dict[key]
self._original = original
if clear:
_clear_dict(in_dict)
try:
in_dict.update(values)
except AttributeError:
# dict like object with no update method
for key in values:
in_dict[key] = values[key]
def _unpatch_dict(self):
in_dict = self.in_dict
original = self._original
_clear_dict(in_dict)
try:
in_dict.update(original)
except AttributeError:
for key in original:
in_dict[key] = original[key]
def __exit__(self, *args):
"""Unpatch the dict."""
self._unpatch_dict()
return False
start = __enter__
stop = __exit__
def _clear_dict(in_dict):
try:
in_dict.clear()
except AttributeError:
keys = list(in_dict)
for key in keys:
del in_dict[key]
patch.object = _patch_object
patch.dict = _patch_dict
patch.multiple = _patch_multiple
patch.TEST_PREFIX = 'test'
magic_methods = (
"lt le gt ge eq ne "
"getitem setitem delitem "
"len contains iter "
"hash str sizeof "
"enter exit "
"divmod neg pos abs invert "
"complex int float index "
"trunc floor ceil "
)
numerics = "add sub mul div floordiv mod lshift rshift and xor or pow "
inplace = ' '.join('i%s' % n for n in numerics.split())
right = ' '.join('r%s' % n for n in numerics.split())
extra = ''
if inPy3k:
extra = 'bool next '
else:
extra = 'unicode long nonzero oct hex truediv rtruediv '
# not including __prepare__, __instancecheck__, __subclasscheck__
# (as they are metaclass methods)
# __del__ is not supported at all as it causes problems if it exists
_non_defaults = set('__%s__' % method for method in [
'cmp', 'getslice', 'setslice', 'coerce', 'subclasses',
'format', 'get', 'set', 'delete', 'reversed',
'missing', 'reduce', 'reduce_ex', 'getinitargs',
'getnewargs', 'getstate', 'setstate', 'getformat',
'setformat', 'repr', 'dir'
])
def _get_method(name, func):
"Turns a callable object (like a mock) into a real function"
def method(self, *args, **kw):
return func(self, *args, **kw)
method.__name__ = name
return method
_magics = set(
'__%s__' % method for method in
' '.join([magic_methods, numerics, inplace, right, extra]).split()
)
_all_magics = _magics | _non_defaults
_unsupported_magics = set([
'__getattr__', '__setattr__',
'__init__', '__new__', '__prepare__'
'__instancecheck__', '__subclasscheck__',
'__del__'
])
_calculate_return_value = {
'__hash__': lambda self: object.__hash__(self),
'__str__': lambda self: object.__str__(self),
'__sizeof__': lambda self: object.__sizeof__(self),
'__unicode__': lambda self: unicode(object.__str__(self)),
}
_return_values = {
'__int__': 1,
'__contains__': False,
'__len__': 0,
'__exit__': False,
'__complex__': 1j,
'__float__': 1.0,
'__bool__': True,
'__nonzero__': True,
'__oct__': '1',
'__hex__': '0x1',
'__long__': long(1),
'__index__': 1,
}
def _get_eq(self):
def __eq__(other):
ret_val = self.__eq__._mock_return_value
if ret_val is not DEFAULT:
return ret_val
return self is other
return __eq__
def _get_ne(self):
def __ne__(other):
if self.__ne__._mock_return_value is not DEFAULT:
return DEFAULT
return self is not other
return __ne__
def _get_iter(self):
def __iter__():
ret_val = self.__iter__._mock_return_value
if ret_val is DEFAULT:
return iter([])
# if ret_val was already an iterator, then calling iter on it should
# return the iterator unchanged
return iter(ret_val)
return __iter__
_side_effect_methods = {
'__eq__': _get_eq,
'__ne__': _get_ne,
'__iter__': _get_iter,
}
def _set_return_value(mock, method, name):
fixed = _return_values.get(name, DEFAULT)
if fixed is not DEFAULT:
method.return_value = fixed
return
return_calulator = _calculate_return_value.get(name)
if return_calulator is not None:
try:
return_value = return_calulator(mock)
except AttributeError:
# XXXX why do we return AttributeError here?
# set it as a side_effect instead?
return_value = AttributeError(name)
method.return_value = return_value
return
side_effector = _side_effect_methods.get(name)
if side_effector is not None:
method.side_effect = side_effector(mock)
class MagicMixin(object):
def __init__(self, *args, **kw):
_super(MagicMixin, self).__init__(*args, **kw)
self._mock_set_magics()
def _mock_set_magics(self):
these_magics = _magics
if self._mock_methods is not None:
these_magics = _magics.intersection(self._mock_methods)
remove_magics = set()
remove_magics = _magics - these_magics
for entry in remove_magics:
if entry in type(self).__dict__:
# remove unneeded magic methods
delattr(self, entry)
# don't overwrite existing attributes if called a second time
these_magics = these_magics - set(type(self).__dict__)
_type = type(self)
for entry in these_magics:
setattr(_type, entry, MagicProxy(entry, self))
class NonCallableMagicMock(MagicMixin, NonCallableMock):
"""A version of `MagicMock` that isn't callable."""
def mock_add_spec(self, spec, spec_set=False):
"""Add a spec to a mock. `spec` can either be an object or a
list of strings. Only attributes on the `spec` can be fetched as
attributes from the mock.
If `spec_set` is True then only attributes on the spec can be set."""
self._mock_add_spec(spec, spec_set)
self._mock_set_magics()
class MagicMock(MagicMixin, Mock):
"""
MagicMock is a subclass of Mock with default implementations
of most of the magic methods. You can use MagicMock without having to
configure the magic methods yourself.
If you use the `spec` or `spec_set` arguments then *only* magic
methods that exist in the spec will be created.
Attributes and the return value of a `MagicMock` will also be `MagicMocks`.
"""
def mock_add_spec(self, spec, spec_set=False):
"""Add a spec to a mock. `spec` can either be an object or a
list of strings. Only attributes on the `spec` can be fetched as
attributes from the mock.
If `spec_set` is True then only attributes on the spec can be set."""
self._mock_add_spec(spec, spec_set)
self._mock_set_magics()
class MagicProxy(object):
def __init__(self, name, parent):
self.name = name
self.parent = parent
def __call__(self, *args, **kwargs):
m = self.create_mock()
return m(*args, **kwargs)
def create_mock(self):
entry = self.name
parent = self.parent
m = parent._get_child_mock(name=entry, _new_name=entry,
_new_parent=parent)
setattr(parent, entry, m)
_set_return_value(parent, m, entry)
return m
def __get__(self, obj, _type=None):
return self.create_mock()
class _ANY(object):
"A helper object that compares equal to everything."
def __eq__(self, other):
return True
def __ne__(self, other):
return False
def __repr__(self):
return '<ANY>'
ANY = _ANY()
def _format_call_signature(name, args, kwargs):
message = '%s(%%s)' % name
formatted_args = ''
args_string = ', '.join([repr(arg) for arg in args])
kwargs_string = ', '.join([
'%s=%r' % (key, value) for key, value in kwargs.items()
])
if args_string:
formatted_args = args_string
if kwargs_string:
if formatted_args:
formatted_args += ', '
formatted_args += kwargs_string
return message % formatted_args
class _Call(tuple):
"""
A tuple for holding the results of a call to a mock, either in the form
`(args, kwargs)` or `(name, args, kwargs)`.
If args or kwargs are empty then a call tuple will compare equal to
a tuple without those values. This makes comparisons less verbose::
_Call(('name', (), {})) == ('name',)
_Call(('name', (1,), {})) == ('name', (1,))
_Call(((), {'a': 'b'})) == ({'a': 'b'},)
The `_Call` object provides a useful shortcut for comparing with call::
_Call(((1, 2), {'a': 3})) == call(1, 2, a=3)
_Call(('foo', (1, 2), {'a': 3})) == call.foo(1, 2, a=3)
If the _Call has no name then it will match any name.
"""
def __new__(cls, value=(), name=None, parent=None, two=False,
from_kall=True):
name = ''
args = ()
kwargs = {}
_len = len(value)
if _len == 3:
name, args, kwargs = value
elif _len == 2:
first, second = value
if isinstance(first, basestring):
name = first
if isinstance(second, tuple):
args = second
else:
kwargs = second
else:
args, kwargs = first, second
elif _len == 1:
value, = value
if isinstance(value, basestring):
name = value
elif isinstance(value, tuple):
args = value
else:
kwargs = value
if two:
return tuple.__new__(cls, (args, kwargs))
return tuple.__new__(cls, (name, args, kwargs))
def __init__(self, value=(), name=None, parent=None, two=False,
from_kall=True):
self.name = name
self.parent = parent
self.from_kall = from_kall
def __eq__(self, other):
if other is ANY:
return True
try:
len_other = len(other)
except TypeError:
return False
self_name = ''
if len(self) == 2:
self_args, self_kwargs = self
else:
self_name, self_args, self_kwargs = self
other_name = ''
if len_other == 0:
other_args, other_kwargs = (), {}
elif len_other == 3:
other_name, other_args, other_kwargs = other
elif len_other == 1:
value, = other
if isinstance(value, tuple):
other_args = value
other_kwargs = {}
elif isinstance(value, basestring):
other_name = value
other_args, other_kwargs = (), {}
else:
other_args = ()
other_kwargs = value
else:
# len 2
# could be (name, args) or (name, kwargs) or (args, kwargs)
first, second = other
if isinstance(first, basestring):
other_name = first
if isinstance(second, tuple):
other_args, other_kwargs = second, {}
else:
other_args, other_kwargs = (), second
else:
other_args, other_kwargs = first, second
if self_name and other_name != self_name:
return False
# this order is important for ANY to work!
return (other_args, other_kwargs) == (self_args, self_kwargs)
def __ne__(self, other):
return not self.__eq__(other)
def __call__(self, *args, **kwargs):
if self.name is None:
return _Call(('', args, kwargs), name='()')
name = self.name + '()'
return _Call((self.name, args, kwargs), name=name, parent=self)
def __getattr__(self, attr):
if self.name is None:
return _Call(name=attr, from_kall=False)
name = '%s.%s' % (self.name, attr)
return _Call(name=name, parent=self, from_kall=False)
def __repr__(self):
if not self.from_kall:
name = self.name or 'call'
if name.startswith('()'):
name = 'call%s' % name
return name
if len(self) == 2:
name = 'call'
args, kwargs = self
else:
name, args, kwargs = self
if not name:
name = 'call'
elif not name.startswith('()'):
name = 'call.%s' % name
else:
name = 'call%s' % name
return _format_call_signature(name, args, kwargs)
def call_list(self):
"""For a call object that represents multiple calls, `call_list`
returns a list of all the intermediate calls as well as the
final call."""
vals = []
thing = self
while thing is not None:
if thing.from_kall:
vals.append(thing)
thing = thing.parent
return _CallList(reversed(vals))
call = _Call(from_kall=False)
def create_autospec(spec, spec_set=False, instance=False, _parent=None,
_name=None, **kwargs):
"""Create a mock object using another object as a spec. Attributes on the
mock will use the corresponding attribute on the `spec` object as their
spec.
Functions or methods being mocked will have their arguments checked in a
similar way to `mocksignature` to check that they are called with the
correct signature.
If `spec_set` is True then attempting to set attributes that don't exist
on the spec object will raise an `AttributeError`.
If a class is used as a spec then the return value of the mock (the
instance of the class) will have the same spec. You can use a class as the
spec for an instance object by passing `instance=True`. The returned mock
will only be callable if instances of the mock are callable.
`create_autospec` also takes arbitrary keyword arguments that are passed to
the constructor of the created mock."""
if _is_list(spec):
# can't pass a list instance to the mock constructor as it will be
# interpreted as a list of strings
spec = type(spec)
is_type = isinstance(spec, ClassTypes)
_kwargs = {'spec': spec}
if spec_set:
_kwargs = {'spec_set': spec}
elif spec is None:
# None we mock with a normal mock without a spec
_kwargs = {}
_kwargs.update(kwargs)
Klass = MagicMock
if type(spec) in DescriptorTypes:
# descriptors don't have a spec
# because we don't know what type they return
_kwargs = {}
elif not _callable(spec):
Klass = NonCallableMagicMock
elif is_type and instance and not _instance_callable(spec):
Klass = NonCallableMagicMock
_new_name = _name
if _parent is None:
# for a top level object no _new_name should be set
_new_name = ''
mock = Klass(parent=_parent, _new_parent=_parent, _new_name=_new_name,
name=_name, **_kwargs)
if isinstance(spec, FunctionTypes):
# should only happen at the top level because we don't
# recurse for functions
mock = _set_signature(mock, spec)
else:
_check_signature(spec, mock, is_type, instance)
if _parent is not None and not instance:
_parent._mock_children[_name] = mock
if is_type and not instance and 'return_value' not in kwargs:
# XXXX could give a name to the return_value mock?
mock.return_value = create_autospec(spec, spec_set, instance=True,
_name='()', _parent=mock)
for entry in dir(spec):
if _is_magic(entry):
# MagicMock already does the useful magic methods for us
continue
if isinstance(spec, FunctionTypes) and entry in FunctionAttributes:
# allow a mock to actually be a function from mocksignature
continue
# XXXX do we need a better way of getting attributes without
# triggering code execution (?) Probably not - we need the actual
# object to mock it so we would rather trigger a property than mock
# the property descriptor. Likewise we want to mock out dynamically
# provided attributes.
# XXXX what about attributes that raise exceptions on being fetched
# we could be resilient against it, or catch and propagate the
# exception when the attribute is fetched from the mock
original = getattr(spec, entry)
kwargs = {'spec': original}
if spec_set:
kwargs = {'spec_set': original}
if not isinstance(original, FunctionTypes):
new = _SpecState(original, spec_set, mock, entry, instance)
mock._mock_children[entry] = new
else:
parent = mock
if isinstance(spec, FunctionTypes):
parent = mock.mock
new = MagicMock(parent=parent, name=entry, _new_name=entry,
_new_parent=parent, **kwargs)
mock._mock_children[entry] = new
skipfirst = _must_skip(spec, entry, is_type)
_check_signature(original, new, skipfirst=skipfirst)
# so functions created with mocksignature become instance attributes,
# *plus* their underlying mock exists in _mock_children of the parent
# mock. Adding to _mock_children may be unnecessary where we are also
# setting as an instance attribute?
if isinstance(new, FunctionTypes):
setattr(mock, entry, new)
return mock
def _must_skip(spec, entry, is_type):
if not isinstance(spec, ClassTypes):
if entry in getattr(spec, '__dict__', {}):
# instance attribute - shouldn't skip
return False
# can't use type because of old style classes
spec = spec.__class__
if not hasattr(spec, '__mro__'):
# old style class: can't have descriptors anyway
return is_type
for klass in spec.__mro__:
result = klass.__dict__.get(entry, DEFAULT)
if result is DEFAULT:
continue
if isinstance(result, (staticmethod, classmethod)):
return False
return is_type
# shouldn't get here unless function is a dynamically provided attribute
# XXXX untested behaviour
return is_type
def _get_class(obj):
try:
return obj.__class__
except AttributeError:
# in Python 2, _sre.SRE_Pattern objects have no __class__
return type(obj)
class _SpecState(object):
def __init__(self, spec, spec_set=False, parent=None,
name=None, ids=None, instance=False):
self.spec = spec
self.ids = ids
self.spec_set = spec_set
self.parent = parent
self.instance = instance
self.name = name
FunctionTypes = (
# python function
type(create_autospec),
# instance method
type(ANY.__eq__),
# unbound method
type(_ANY.__eq__),
)
FunctionAttributes = set([
'func_closure',
'func_code',
'func_defaults',
'func_dict',
'func_doc',
'func_globals',
'func_name',
])<|fim▁end|> | |
<|file_name|>table_test.py<|end_file_name|><|fim▁begin|>import unittest
import os
from sqltxt.table import Table
from sqltxt.column import Column, ColumnName, AmbiguousColumnNameError
from sqltxt.expression import Expression
class TableTest(unittest.TestCase):
def setUp(self):
self.data_path = os.path.join(os.path.dirname(__file__), '../data')
table_header = ["col_a", "col_b"]
table_contents = """1,1
2,3
3,2"""
self.table_a = Table.from_cmd(
name = 'table_a',
cmd = 'echo -e "{0}"'.format(table_contents),
columns = table_header
)
table_header = ["col_a", "col_b"]
table_contents = """1,w
2,x
2,y
5,z"""
self.table_b = Table.from_cmd(
name = 'table_b',
cmd = 'echo -e "{0}"'.format(table_contents),
columns = table_header
)
def test_subset_rows(self):
conditions = [
[Expression('col_b', '==', '1'), 'or', Expression('col_a', '==', '2')]
]
self.table_a.subset_rows(conditions)
cmds_actual = self.table_a.cmds
cmds_expected = [
'echo -e "1,1\n2,3\n3,2"',
"awk -F',' 'OFS=\",\" { if (($2 == 1 || $1 == 2)) { print $1,$2 } }'"]
self.assertEqual(cmds_actual, cmds_expected)
def test_order_columns(self):
col_name_order = [ColumnName('col_b'), ColumnName('col_a')]
self.table_a.order_columns(col_name_order)
cmds_actual = self.table_a.cmds
cmds_expected = ['echo -e "1,1\n2,3\n3,2"', "awk -F',' 'OFS=\",\" { print $2,$1 }'"]
self.assertEqual(cmds_actual, cmds_expected)
def test_sort(self):
sort_by_col_names = [ColumnName('col_a'), ColumnName('col_b')]
self.table_a.sort(sort_by_col_names)
cmds_actual = self.table_a.cmds
cmds_expected = ['echo -e "1,1\n2,3\n3,2"', "sort -t, -k 1,1 -k 2,2"]
self.assertEqual(cmds_actual, cmds_expected)
sort_by_cols = [self.table_a.get_column_for_name(cn) for cn in sort_by_col_names]
self.assertEqual(self.table_a.sorted_by, sort_by_cols)
def test_is_sorted_by(self):
table_from_cmd = Table.from_cmd(
name = 'table_a',
cmd = 'echo -e ""',
columns = ['col_a', 'col_b'])
table_from_cmd.sorted_by = [Column('table_a.col_a'), Column('table_a.col_b')]
self.assertTrue(table_from_cmd.is_sorted_by([0]))
self.assertFalse(table_from_cmd.is_sorted_by([1]))
self.assertTrue(table_from_cmd.is_sorted_by([0,1]))
def test_get_column_for_name_raises_on_ambiguity(self):
table_from_cmd = Table.from_cmd(
name = 'table_a',
cmd = 'echo -e ""',
columns = ['col_a', 'col_a'])
with self.assertRaisesRegexp(AmbiguousColumnNameError, 'Ambiguous column reference'):
table_from_cmd.get_column_for_name(ColumnName('col_a'))
table_from_cmd = Table.from_cmd(
name = 'table_a',
cmd = 'echo -e ""',
columns = ['ta.col_a', 'tb.col_a'])
with self.assertRaisesRegexp(AmbiguousColumnNameError, 'Ambiguous column reference'):
table_from_cmd.get_column_for_name(ColumnName('col_a'))
first_column = Column('ta.col_a')
first_column.add_name('col_alpha')
second_column = Column('tb.col_a')
table_from_cmd = Table.from_cmd(
name = 'table_a',
cmd = 'echo -e ""',
columns = [first_column, second_column])
with self.assertRaisesRegexp(AmbiguousColumnNameError, 'Ambiguous column reference'):
table_from_cmd.get_column_for_name(ColumnName('col_a'))
def test_sample_rows(self):
self.table_a.sample_rows(1)
cmds_actual = self.table_a.cmds
cmds_expected = ['echo -e "1,1\n2,3\n3,2"',
"""awk -v seed=$RANDOM -v n={0} '
BEGIN {{ srand(seed) }}
NR <= n {{ reservoir[NR] = $0 }}
NR > n {{ M = int(rand() * NR) + 1; if (M <= n) {{ reservoir[M] = $0 }}}}
END {{ for (key in reservoir) {{ print reservoir[key] }}}}'""".format(1)
]
self.assertEqual(cmds_actual, cmds_expected)
def test_get_cmd_str(self):
table_from_file = Table.from_file_path(os.path.join(self.data_path, 'table_a.txt'))
# output from a file-backed Table to STDOUT
cmd_actual = table_from_file.get_cmd_str()
cmd_expected = 'tail -n+2 {}/table_a.txt'.format(self.data_path)
self.assertEqual(cmd_actual, cmd_expected)
table_from_cmd = Table.from_cmd(
'table_a',
cmd = 'echo -e "1,2,3,4"',
columns = ['col_a', 'col_b', 'col_c', 'col_d'])
# output from a command-backed Table to STDOUT
cmd_actual = table_from_cmd.get_cmd_str()
cmd_expected = 'echo -e "1,2,3,4"'
self.assertEqual(cmd_actual, cmd_expected)
<|fim▁hole|> table_from_cmd.cmds += ['sort']
# to STDOUT
cmd_actual = table_from_cmd.get_cmd_str()
cmd_expected = 'echo -e "1,2,3,4" | sort'
self.assertEqual(cmd_actual, cmd_expected)<|fim▁end|> | # add a command, then output |
<|file_name|>cache.go<|end_file_name|><|fim▁begin|>package api
import (
"github.com/johnny-morrice/godless/crdt"
)
type CacheCloser interface {
CloseCache() error
}
type Cache interface {
HeadCache
IndexCache
NamespaceCache
CacheCloser
}
type HeadCache interface {
SetHead(head crdt.IPFSPath) error
GetHead() (crdt.IPFSPath, error)
}
type RequestPriorityQueue interface {<|fim▁hole|> Close() error
}
type IndexCache interface {
GetIndex(indexAddr crdt.IPFSPath) (crdt.Index, error)
SetIndex(indexAddr crdt.IPFSPath, index crdt.Index) error
}
type NamespaceCache interface {
GetNamespace(namespaceAddr crdt.IPFSPath) (crdt.Namespace, error)
SetNamespace(namespaceAddr crdt.IPFSPath, namespace crdt.Namespace) error
}<|fim▁end|> | Len() int
Enqueue(request Request, data interface{}) error
Drain() <-chan interface{} |
<|file_name|>workerglobalscope.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use devtools_traits::{DevtoolScriptControlMsg, ScriptToDevtoolsControlMsg, WorkerId};
use dom::bindings::codegen::Bindings::FunctionBinding::Function;
use dom::bindings::codegen::Bindings::WorkerGlobalScopeBinding::WorkerGlobalScopeMethods;
use dom::bindings::error::{Error, ErrorResult, Fallible, report_pending_exception};
use dom::bindings::global::GlobalRef;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{JS, MutNullableHeap, Root};
use dom::bindings::reflector::Reflectable;
use dom::console::Console;
use dom::crypto::Crypto;
use dom::dedicatedworkerglobalscope::DedicatedWorkerGlobalScope;
use dom::eventtarget::EventTarget;
use dom::window::{base64_atob, base64_btoa};
use dom::workerlocation::WorkerLocation;
use dom::workernavigator::WorkerNavigator;
use ipc_channel::ipc::IpcSender;
use js::jsapi::{HandleValue, JSAutoRequest, JSContext};
use js::rust::Runtime;
use msg::constellation_msg::{ConstellationChan, PipelineId};
use net_traits::{LoadContext, ResourceThread, load_whole_resource};
use profile_traits::mem;
use script_thread::{CommonScriptMsg, ScriptChan, ScriptPort};
use script_traits::ScriptMsg as ConstellationMsg;
use script_traits::{MsDuration, TimerEvent, TimerEventId, TimerEventRequest, TimerSource};
use std::cell::Cell;
use std::default::Default;
use std::rc::Rc;
use std::sync::mpsc::Receiver;
use timers::{IsInterval, OneshotTimerCallback, OneshotTimerHandle, OneshotTimers, TimerCallback};
use url::Url;
use util::str::DOMString;
#[derive(Copy, Clone, PartialEq)]
pub enum WorkerGlobalScopeTypeId {
DedicatedWorkerGlobalScope,
}
pub struct WorkerGlobalScopeInit {
pub resource_thread: ResourceThread,
pub mem_profiler_chan: mem::ProfilerChan,
pub to_devtools_sender: Option<IpcSender<ScriptToDevtoolsControlMsg>>,
pub from_devtools_sender: Option<IpcSender<DevtoolScriptControlMsg>>,
pub constellation_chan: ConstellationChan<ConstellationMsg>,
pub scheduler_chan: IpcSender<TimerEventRequest>,
pub worker_id: WorkerId,
}
// https://html.spec.whatwg.org/multipage/#the-workerglobalscope-common-interface
#[dom_struct]
pub struct WorkerGlobalScope {
eventtarget: EventTarget,
worker_id: WorkerId,
worker_url: Url,
#[ignore_heap_size_of = "Defined in js"]
runtime: Runtime,
next_worker_id: Cell<WorkerId>,
#[ignore_heap_size_of = "Defined in std"]
resource_thread: ResourceThread,
location: MutNullableHeap<JS<WorkerLocation>>,
navigator: MutNullableHeap<JS<WorkerNavigator>>,
console: MutNullableHeap<JS<Console>>,
crypto: MutNullableHeap<JS<Crypto>>,
timers: OneshotTimers,
#[ignore_heap_size_of = "Defined in std"]
mem_profiler_chan: mem::ProfilerChan,
#[ignore_heap_size_of = "Defined in ipc-channel"]
to_devtools_sender: Option<IpcSender<ScriptToDevtoolsControlMsg>>,
#[ignore_heap_size_of = "Defined in ipc-channel"]
/// Optional `IpcSender` for sending the `DevtoolScriptControlMsg`
/// to the server from within the worker
from_devtools_sender: Option<IpcSender<DevtoolScriptControlMsg>>,
#[ignore_heap_size_of = "Defined in std"]
/// This `Receiver` will be ignored later if the corresponding
/// `IpcSender` doesn't exist
from_devtools_receiver: Receiver<DevtoolScriptControlMsg>,
/// A flag to indicate whether the developer tools has requested live updates
/// from the worker
devtools_wants_updates: Cell<bool>,
#[ignore_heap_size_of = "Defined in std"]
constellation_chan: ConstellationChan<ConstellationMsg>,
#[ignore_heap_size_of = "Defined in std"]
scheduler_chan: IpcSender<TimerEventRequest>,
}
impl WorkerGlobalScope {
pub fn new_inherited(init: WorkerGlobalScopeInit,
worker_url: Url,
runtime: Runtime,
from_devtools_receiver: Receiver<DevtoolScriptControlMsg>,
timer_event_chan: IpcSender<TimerEvent>)
-> WorkerGlobalScope {
WorkerGlobalScope {
eventtarget: EventTarget::new_inherited(),
next_worker_id: Cell::new(WorkerId(0)),
worker_id: init.worker_id,
worker_url: worker_url,
runtime: runtime,
resource_thread: init.resource_thread,
location: Default::default(),
navigator: Default::default(),
console: Default::default(),
crypto: Default::default(),
timers: OneshotTimers::new(timer_event_chan, init.scheduler_chan.clone()),
mem_profiler_chan: init.mem_profiler_chan,
to_devtools_sender: init.to_devtools_sender,
from_devtools_sender: init.from_devtools_sender,
from_devtools_receiver: from_devtools_receiver,
devtools_wants_updates: Cell::new(false),
constellation_chan: init.constellation_chan,
scheduler_chan: init.scheduler_chan,
}
}
pub fn mem_profiler_chan(&self) -> mem::ProfilerChan {
self.mem_profiler_chan.clone()
}
pub fn devtools_chan(&self) -> Option<IpcSender<ScriptToDevtoolsControlMsg>> {
self.to_devtools_sender.clone()
}
pub fn from_devtools_sender(&self) -> Option<IpcSender<DevtoolScriptControlMsg>> {
self.from_devtools_sender.clone()
}
pub fn from_devtools_receiver(&self) -> &Receiver<DevtoolScriptControlMsg> {
&self.from_devtools_receiver
}
pub fn constellation_chan(&self) -> ConstellationChan<ConstellationMsg> {
self.constellation_chan.clone()
}
pub fn scheduler_chan(&self) -> IpcSender<TimerEventRequest> {
self.scheduler_chan.clone()
}
pub fn schedule_callback(&self, callback: OneshotTimerCallback, duration: MsDuration) -> OneshotTimerHandle {
self.timers.schedule_callback(callback,
duration,
TimerSource::FromWorker)
}
pub fn unschedule_callback(&self, handle: OneshotTimerHandle) {
self.timers.unschedule_callback(handle);
}
pub fn get_cx(&self) -> *mut JSContext {
self.runtime.cx()
}
pub fn resource_thread(&self) -> &ResourceThread {
&self.resource_thread
}
pub fn get_url(&self) -> &Url {
&self.worker_url
}
pub fn get_worker_id(&self) -> WorkerId {
self.worker_id.clone()
}
pub fn get_next_worker_id(&self) -> WorkerId {
let worker_id = self.next_worker_id.get();
let WorkerId(id_num) = worker_id;
self.next_worker_id.set(WorkerId(id_num + 1));
worker_id
}
}
impl WorkerGlobalScopeMethods for WorkerGlobalScope {
// https://html.spec.whatwg.org/multipage/#dom-workerglobalscope-self
fn Self_(&self) -> Root<WorkerGlobalScope> {
Root::from_ref(self)
}
// https://html.spec.whatwg.org/multipage/#dom-workerglobalscope-location
fn Location(&self) -> Root<WorkerLocation> {
self.location.or_init(|| {
WorkerLocation::new(self, self.worker_url.clone())
})
}
// https://html.spec.whatwg.org/multipage/#dom-workerglobalscope-importscripts
fn ImportScripts(&self, url_strings: Vec<DOMString>) -> ErrorResult {
let mut urls = Vec::with_capacity(url_strings.len());
for url in url_strings {
let url = self.worker_url.join(&url);
match url {
Ok(url) => urls.push(url),
Err(_) => return Err(Error::Syntax),
};
}
for url in urls {
let (url, source) = match load_whole_resource(LoadContext::Script, &self.resource_thread, url, None) {
Err(_) => return Err(Error::Network),
Ok((metadata, bytes)) => {
(metadata.final_url, String::from_utf8(bytes).unwrap())
}
};
match self.runtime.evaluate_script(
self.reflector().get_jsobject(), source, url.serialize(), 1) {
Ok(_) => (),
Err(_) => {
println!("evaluate_script failed");
return Err(Error::JSFailed);
}
}
}
Ok(())
}
// https://html.spec.whatwg.org/multipage/#dom-worker-navigator
fn Navigator(&self) -> Root<WorkerNavigator> {
self.navigator.or_init(|| WorkerNavigator::new(self))
}
// https://developer.mozilla.org/en-US/docs/Web/API/WorkerGlobalScope/console
fn Console(&self) -> Root<Console> {
self.console.or_init(|| Console::new(GlobalRef::Worker(self)))
}
// https://html.spec.whatwg.org/multipage/#dfn-Crypto
fn Crypto(&self) -> Root<Crypto> {
self.crypto.or_init(|| Crypto::new(GlobalRef::Worker(self)))
}
// https://html.spec.whatwg.org/multipage/#dom-windowbase64-btoa
fn Btoa(&self, btoa: DOMString) -> Fallible<DOMString> {
base64_btoa(btoa)
}
// https://html.spec.whatwg.org/multipage/#dom-windowbase64-atob
fn Atob(&self, atob: DOMString) -> Fallible<DOMString> {
base64_atob(atob)
}
// https://html.spec.whatwg.org/multipage/#dom-windowtimers-setinterval
fn SetTimeout(&self, _cx: *mut JSContext, callback: Rc<Function>, timeout: i32, args: Vec<HandleValue>) -> i32 {
self.timers.set_timeout_or_interval(GlobalRef::Worker(self),
TimerCallback::FunctionTimerCallback(callback),
args,
timeout,
IsInterval::NonInterval,
TimerSource::FromWorker)
}
// https://html.spec.whatwg.org/multipage/#dom-windowtimers-setinterval
fn SetTimeout_(&self, _cx: *mut JSContext, callback: DOMString, timeout: i32, args: Vec<HandleValue>) -> i32 {
self.timers.set_timeout_or_interval(GlobalRef::Worker(self),
TimerCallback::StringTimerCallback(callback),
args,
timeout,
IsInterval::NonInterval,
TimerSource::FromWorker)
}
// https://html.spec.whatwg.org/multipage/#dom-windowtimers-clearinterval
fn ClearTimeout(&self, handle: i32) {
self.timers.clear_timeout_or_interval(GlobalRef::Worker(self), handle);
}
// https://html.spec.whatwg.org/multipage/#dom-windowtimers-setinterval
fn SetInterval(&self, _cx: *mut JSContext, callback: Rc<Function>, timeout: i32, args: Vec<HandleValue>) -> i32 {
self.timers.set_timeout_or_interval(GlobalRef::Worker(self),
TimerCallback::FunctionTimerCallback(callback),
args,
timeout,
IsInterval::Interval,
TimerSource::FromWorker)
}
// https://html.spec.whatwg.org/multipage/#dom-windowtimers-setinterval
fn SetInterval_(&self, _cx: *mut JSContext, callback: DOMString, timeout: i32, args: Vec<HandleValue>) -> i32 {
self.timers.set_timeout_or_interval(GlobalRef::Worker(self),
TimerCallback::StringTimerCallback(callback),
args,
timeout,
IsInterval::Interval,
TimerSource::FromWorker)
}
// https://html.spec.whatwg.org/multipage/#dom-windowtimers-clearinterval
fn ClearInterval(&self, handle: i32) {
self.ClearTimeout(handle);
}
}
impl WorkerGlobalScope {
pub fn execute_script(&self, source: DOMString) {
match self.runtime.evaluate_script(
self.reflector().get_jsobject(), String::from(source), self.worker_url.serialize(), 1) {
Ok(_) => (),
Err(_) => {
// TODO: An error needs to be dispatched to the parent.
// https://github.com/servo/servo/issues/6422
println!("evaluate_script failed");
let _ar = JSAutoRequest::new(self.runtime.cx());
report_pending_exception(self.runtime.cx(), self.reflector().get_jsobject().get());
}
}
}
pub fn script_chan(&self) -> Box<ScriptChan + Send> {
let dedicated =
self.downcast::<DedicatedWorkerGlobalScope>();
match dedicated {
Some(dedicated) => dedicated.script_chan(),
None => panic!("need to implement a sender for SharedWorker"),
}
}
pub fn pipeline(&self) -> PipelineId {
let dedicated =
self.downcast::<DedicatedWorkerGlobalScope>();
match dedicated {
Some(dedicated) => dedicated.pipeline(),
None => panic!("need to add a pipeline for SharedWorker"),
}
}
pub fn new_script_pair(&self) -> (Box<ScriptChan + Send>, Box<ScriptPort + Send>) {
let dedicated =
self.downcast::<DedicatedWorkerGlobalScope>();
match dedicated {
Some(dedicated) => dedicated.new_script_pair(),
None => panic!("need to implement creating isolated event loops for SharedWorker"),
}
}
pub fn process_event(&self, msg: CommonScriptMsg) {
let dedicated =
self.downcast::<DedicatedWorkerGlobalScope>();
match dedicated {
Some(dedicated) => dedicated.process_event(msg),
None => panic!("need to implement processing single events for SharedWorker"),
}<|fim▁hole|> self.timers.fire_timer(timer_id, self);
}
pub fn set_devtools_wants_updates(&self, value: bool) {
self.devtools_wants_updates.set(value);
}
}<|fim▁end|> | }
pub fn handle_fire_timer(&self, timer_id: TimerEventId) { |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>const {
QUERY_PROFILE_PROJECT,
QUERY_PROFILE_ENVIRONMENT,
QUERY_PROFILE_SETTINGS,
MUTATION_LOG,
MUTATION_SESSION,
} = require('constants/permissions/values');
const {
PRIVATE,
} = require('constants/permissions/entries');
const app = require('./app');
const user = require('./user');
const { APP, USER } = require('constants/profiles/types');
const values = [
{
value: QUERY_PROFILE_PROJECT,
entries: [PRIVATE]<|fim▁hole|> },
{
value: QUERY_PROFILE_SETTINGS,
entries: [PRIVATE]
},
{
value: MUTATION_LOG,
entries: [PRIVATE]
},
{
value: MUTATION_SESSION,
entries: [PRIVATE]
},
];
exports.values = values;
exports[APP] = [...values, ...app];
exports[USER] = [...values, ...user];<|fim▁end|> | },
{
value: QUERY_PROFILE_ENVIRONMENT,
entries: [PRIVATE] |
<|file_name|>imagecollection.py<|end_file_name|><|fim▁begin|># coding=utf-8
""" Module holding tools for ee.ImageCollections """
import ee
import ee.data
import pandas as pd
import math
from . import date, ee_list
from . import image as image_module
from . import collection as eecollection
from ..utils import castImage
from .. import composite
def add(collection, image):
""" Add an Image to the Collection
**SERVER SIDE**
"""
# TODO: handle a list of images
collist = collection.toList(collection.size())
append = collist.add(image)
return ee.ImageCollection.fromImages(append)
def allMasked(collection):
""" Get a mask which indicates pixels that are masked in all images (0) and
pixels that have a valid pixel in at least one image (1) """
masks = collection.map(lambda i: i.mask())
masksum = ee.Image(masks.sum())
return ee.Image(masksum.gt(0))
def containsAllBands(collection, bands):
""" Filter a collection with images containing all bands specified in
parameter `bands` """
bands = ee.List(bands)
# add bands as metadata
collection = collection.map(
lambda i: ee.Image(i).set('_BANDS_', ee.Image(i).bandNames()))
band0 = ee.String(bands.get(0))
rest = ee.List(bands.slice(1))
filt0 = ee.Filter.listContains(leftField='_BANDS_', rightValue=band0)
# Get filter
def wrap(band, filt):
band = ee.String(band)
filt = ee.Filter(filt)
newfilt = ee.Filter.listContains(leftField='_BANDS_', rightValue=band)
return ee.Filter.And(filt, newfilt)
filt = ee.Filter(rest.iterate(wrap, filt0))
return collection.filter(filt)
def containsAnyBand(collection, bands):
""" Filter a collection with images cotaining any of the bands specified in
parameter `bands` """
bands = ee.List(bands)
# add bands as metadata
collection = collection.map(
lambda i: ee.Image(i).set('_BANDS_', ee.Image(i).bandNames()))
band0 = ee.String(bands.get(0))
rest = ee.List(bands.slice(1))
filt0 = ee.Filter.listContains(leftField='_BANDS_', rightValue=band0)
# Get filter
def wrap(band, filt):
band = ee.String(band)
filt = ee.Filter(filt)
newfilt = ee.Filter.listContains(leftField='_BANDS_', rightValue=band)
return ee.Filter.Or(filt, newfilt)
filt = ee.Filter(rest.iterate(wrap, filt0))
return collection.filter(filt)
def getId(collection):
""" Get the ImageCollection id.
**CLIENT SIDE**
:type collection: ee.ImageCollection
:return: the collection's id
:rtype: str
"""
return collection.limit(0).getInfo()['id']
def getImage(collection, index):
""" Get an Image using its collection index """
collist = collection.toList(collection.size())
return ee.Image(collist.get(index))
def wrapper(f, *arg, **kwargs):
""" Wrap a function and its arguments into a mapping function for
ImageCollections. The first parameter of the functions must be an Image,
and it must return an Image.
:param f: the function to be wrapped
:type f: function
:return: a function to use in ee.ImageCollection.map
:rtype: function
"""
def wrap(img):
return f(img, *arg, **kwargs)
return wrap
def enumerateProperty(collection, name='enumeration'):
"""
:param collection:
:param name:
:return:
"""
enumerated = eecollection.enumerate(collection)
def over_list(l):
l = ee.List(l)
index = ee.Number(l.get(0))
element = l.get(1)
return ee.Image(element).set(name, index)
imlist = enumerated.map(over_list)
return ee.ImageCollection(imlist)
def enumerateSimple(collection, name='ENUM'):
""" Simple enumeration of features inside a collection. Each feature stores
its enumeration, so if the order of features changes over time, the numbers
will not be in order """
size = collection.size()
collist = collection.toList(size)
seq = ee.List.sequence(0, size.subtract(1))
def wrap(n):
n = ee.Number(n).toInt()
feat = collist.get(n)
return ee.Image(feat).set(name, n)
fc = ee.ImageCollection.fromImages(seq.map(wrap))
return ee.ImageCollection(fc.copyProperties(source=collection))
def fillWithLast(collection, reverse=False, proxy=-999):
""" Fill each masked pixels with the last available not masked pixel. If reverse, it goes backwards.
Images must contain a valid date (system:time_start property by default) """
axis = 0
def shift(array):
if reverse:
rigth = array.arraySlice(axis, 1)
last = array.arraySlice(axis, -1)
return rigth.arrayCat(last, axis)
else:
left = array.arraySlice(axis, 0, -1)
first = array.arraySlice(axis, 0, 1)
return first.arrayCat(left, axis)
def move(array):
shifted = shift(array)
masked = array.neq(proxy)
maskednot = array.eq(proxy)
t1 = array.multiply(masked)
t2 = shifted.multiply(maskednot)
final = t1.add(t2)
return final
def fill(array, size):
size = ee.Number(size)
indices = ee.List.sequence(0, size.subtract(1))
def wrap(i, a):
a = ee.Image(a)
return move(a)
return ee.Image(indices.iterate(wrap, array))
collection = collection.map(
lambda i: image_module.emptyBackground(i, proxy).copyProperties(
source=i, properties=i.propertyNames()))
bands = ee.Image(collection.first()).bandNames()
size = collection.size()
array = collection.toArray()
fill_array = fill(array, size)
props = aggregate_array_all(collection)
indices = ee.List.sequence(0, size.subtract(1))
def wrap(index):
index = ee.Number(index).toInt()
sliced = fill_array.arraySlice(axis, index, index.add(1))
im = sliced.arrayProject([1]).arrayFlatten([bands])
prop = ee.Dictionary(props.get(index))
im = ee.Image(im.setMulti(prop))
return im.updateMask(im.neq(proxy))
return ee.ImageCollection.fromImages(indices.map(wrap))
def mergeGeometries(collection):
""" Merge the geometries of many images. Return ee.Geometry """
imlist = collection.toList(collection.size())
first = ee.Image(imlist.get(0))
rest = imlist.slice(1)
def wrap(img, ini):
ini = ee.Geometry(ini)
img = ee.Image(img)
geom = img.geometry()
union = geom.union(ini)
return union.dissolve()
return ee.Geometry(rest.iterate(wrap, first.geometry()))
def mosaicSameDay(collection, qualityBand=None):
""" Return a collection where images from the same day are mosaicked
:param qualityBand: the band that holds the quality score for mosaiking.
If None it will use the simplier mosaic() function
:type qualityBand: str
:return: a new image collection with 1 image per day. The only property
kept is `system:time_start`
:rtype: ee.ImageCollection
"""
all_dates = collection.aggregate_array('system:time_start')
def overdates(d, l):
l = ee.List(l)
date = ee.Date(d)
day = date.get('day')
month = date.get('month')
year = date.get('year')
clean_date = ee.Date.fromYMD(year, month, day)
condition = l.contains(clean_date)
return ee.Algorithms.If(condition, l, l.add(clean_date))
date_list = ee.List(all_dates.iterate(overdates, ee.List([])))
first_img = ee.Image(collection.first())
bands = first_img.bandNames()
def make_col(date):
date = ee.Date(date)
filtered = collection.filterDate(date, date.advance(1, 'day'))
if qualityBand:
mosaic = filtered.qualityMosaic(qualityBand)
else:
mosaic = filtered.mosaic()
mosaic = mosaic.set('system:time_start', date.millis(),
'system:footprint', mergeGeometries(filtered))
# mosaic = mosaic.rename(bands)
mosaic = mosaic.select(bands)
def reproject(bname, mos):
mos = ee.Image(mos)
mos_bnames = mos.bandNames()
bname = ee.String(bname)
proj = first_img.select(bname).projection()
newmos = ee.Image(ee.Algorithms.If(
mos_bnames.contains(bname),
image_module.replace(mos, bname, mos.select(bname).setDefaultProjection(proj)),
mos))
return newmos
mosaic = ee.Image(bands.iterate(reproject, mosaic))
return mosaic
new_col = ee.ImageCollection.fromImages(date_list.map(make_col))
return new_col
def reduceEqualInterval(collection, interval=30, unit='day', reducer=None,
start_date=None, end_date=None):
""" Reduce an ImageCollection into a new one that has one image per
reduced interval, for example, one image per month.
:param collection: the collection
:type collection: ee.ImageCollection
:param interval: the interval to reduce
:type interval: int
:param unit: unit of the interval. Can be 'day', 'month', 'year'
:param reducer: the reducer to apply where images overlap. If None, uses
a median reducer
:type reducer: ee.Reducer
:param start_date: fix the start date. If None, uses the date of the first
image in the collection
:type start_date: ee.Date
:param end_date: fix the end date. If None, uses the date of the last image
in the collection
:type end_date: ee.Date
:return:
"""
interval = int(interval) # force to int
first = ee.Image(collection.sort('system:time_start').first())
bands = first.bandNames()
if not start_date:
start_date = first.date()
if not end_date:
last = ee.Image(collection.sort('system:time_start', False).first())
end_date = last.date()
if not reducer:
reducer = ee.Reducer.median()
def apply_reducer(red, col):
return ee.Image(col.reduce(red))
ranges = date.daterangeList(start_date, end_date, interval, unit)
def over_ranges(drange, ini):
ini = ee.List(ini)
drange = ee.DateRange(drange)
start = drange.start()
end = drange.end()
filtered = collection.filterDate(start, end)
condition = ee.Number(filtered.size()).gt(0)
def true():
image = apply_reducer(reducer, filtered)\
.set('system:time_start', end.millis())\
.set('reduced_from', start.format())\
.set('reduced_to', end.format())
# rename to original names
image = image.select(image.bandNames(), bands)
result = ini.add(image)
return result
return ee.List(ee.Algorithms.If(condition, true(), ini))
imlist = ee.List(ranges.iterate(over_ranges, ee.List([])))
return ee.ImageCollection.fromImages(imlist)
def makeEqualInterval(collection, interval=1, unit='month'):
""" Make a list of image collections filtered by the given interval,
for example, one month. Starts from the end of the parsed collection
:param collection: the collection
:type collection: ee.ImageCollection
:param interval: the interval
:type interval: int
:param unit: unit of the interval. Can be 'day', 'month', 'year'
:rtype: ee.List
"""
interval = int(interval) # force to int
collist = collection.sort('system:time_start').toList(collection.size())
start_date = ee.Image(collist.get(0)).date()
end_date = ee.Image(collist.get(-1)).date()
ranges = date.daterangeList(start_date, end_date, interval, unit)
def over_ranges(drange, ini):
ini = ee.List(ini)
drange = ee.DateRange(drange)
start = drange.start()
end = drange.end()
filtered = collection.filterDate(start, end)
condition = ee.Number(filtered.size()).gt(0)
return ee.List(ee.Algorithms.If(condition, ini.add(filtered), ini))
imlist = ee.List(ranges.iterate(over_ranges, ee.List([])))
return imlist
def makeDayIntervals(collection, interval=30, reverse=False, buffer='second'):
""" Make day intervals """
interval = int(interval)
collection = collection.sort('system:time_start', True)
start = collection.first().date()
end = collection.sort('system:time_start', False).first().date()
ranges = date.dayRangeIntervals(start, end, interval, reverse, buffer)
def over_ranges(drange, ini):
ini = ee.List(ini)
drange = ee.DateRange(drange)
start = drange.start()
end = drange.end()
filtered = collection.filterDate(start, end)
condition = ee.Number(filtered.size()).gt(0)
return ee.List(ee.Algorithms.If(condition, ini.add(filtered), ini))
imlist = ee.List(ranges.iterate(over_ranges, ee.List([])))
return imlist
def reduceDayIntervals(collection, reducer, interval=30, reverse=False,
buffer='second'):
""" Reduce Day Intervals
:param reducer: a function that takes as only argument a collection
and returns an image
:type reducer: function
:return: an image collection
:rtype: ee.ImageCollection
"""
intervals = makeDayIntervals(collection, interval, reverse, buffer)
reduced = intervals.map(reducer)
return ee.ImageCollection.fromImages(reduced)
def getValues(collection, geometry, scale=None, reducer=None,
id='system:index', properties=None, side='server',
maxPixels=1e7, bestEffort=False, tileScale=1):
""" Return all values of all bands of an image collection in the
specified geometry
:param geometry: Point from where to get the info
:type geometry: ee.Geometry
:param scale: The scale to use in the reducer. It defaults to 10 due
to the minimum scale available in EE (Sentinel 10m)
:type scale: int
:param id: image property that will be the key in the result dict
:type id: str
:param properties: image properties that will be added to the resulting
dict
:type properties: list
:param side: 'server' or 'client' side
:type side: str
:return: Values of all bands in the ponit
:rtype: dict
"""
if reducer is None:
reducer = ee.Reducer.mean()
if not scale:
scale = 1
else:
scale = int(scale)
if not properties:
properties = []
properties = ee.List(properties)
def listval(img, it):
theid = ee.Algorithms.String(img.get(id))
values = img.reduceRegion(
reducer, geometry, scale, maxPixels=maxPixels,
bestEffort=bestEffort, tileScale=tileScale
)
values = ee.Dictionary(values)
img_props = img.propertyNames()
def add_properties(prop, ini):
ini = ee.Dictionary(ini)
condition = img_props.contains(prop)
def true():
value = img.get(prop)
return ini.set(prop, value)
return ee.Algorithms.If(condition, true(), ini)
with_prop = ee.Dictionary(properties.iterate(add_properties, values))
return ee.Dictionary(it).set(theid, with_prop)
result = collection.iterate(listval, ee.Dictionary({}))
result = ee.Dictionary(ee.Algorithms.If(collection.size().neq(0),
result, {}))
if side == 'server':
return result
elif side == 'client':
return result.getInfo()
else:
raise ValueError("side parameter must be 'server' or 'client'")
def outliers(collection, bands, sigma=2, updateMask=False):
""" Compute outliers by:
outlier = value > mean+(sigma*stddev)
outlier = value < mean-(sigma*stddev)
Example (sigma = 1):
- values = [1, 5, 6, 4, 7, 10]
- mean = 5.5
- std dev = 3
- mean + (sigma*stddev) = 8.5
- mean - (sigma*stddev) = 2.5
- outliers = values between 2.5 and 8.5 = [1, 10]
if `updateMask` is False return the passed collection in which each image
have new bands (a mask) corresponding to the passed dict and a suffix '_outlier'
else return the passed collection with the passed bands masked if are
outliers (the outlier band is not returned).
idea from: https://www.kdnuggets.com/2017/02/removing-outliers-standard-deviation-python.html
"""
bands = bands or ee.Image(collection.first()).bandNames()
bands = ee.List(bands)
forstats = collection.select(bands)
mean = forstats.mean()
stddev = forstats.reduce(ee.Reducer.stdDev())
imin = mean.subtract(stddev.multiply(sigma))
imax = mean.add(stddev.multiply(sigma))
def getOutlier(im, imin, imax):
ismin = im.lt(imin)
ismax = im.gt(imax)
outlier = ismin.Or(ismax)
return outlier
def overcol(im):
outs = getOutlier(im.select(bands), imin, imax)
if updateMask:
ibands = im.select(bands)
ibands = ibands.updateMask(outs.Not())
else:
ibands = image_module.addSuffix(outs, '_outlier')
return im.addBands(ibands, overwrite=True)
return collection.map(overcol)
def data2pandas(data):
"""
Convert data coming from tools.imagecollection.get_values to a
pandas DataFrame
:type data: dict
:rtype: pandas.DataFrame
"""
# Indices
# header
allbands = [val.keys() for bands, val in data.items()]
header = []
for bandlist in allbands:
for band in bandlist:
if band not in header:
header.append(band)
data_dict = {}
indices = []
for i, head in enumerate(header):
band_data = []
for iid, val in data.items():
if i == 0:
indices.append(iid)
band_data.append(val[head])
data_dict[head] = band_data
df = pd.DataFrame(data=data_dict, index=indices)
return df
def parametrizeProperty(collection, property, range_from, range_to,
pattern='{property}_PARAMETRIZED'):
""" Parametrize a property
:param collection: the ImageCollection
:param range_from: the original property range
:param range_to: the desired property range
:param property: the name of the property
:param pattern: the name of the resulting property. Wherever it says
'property' will be replaced with the passed property.
:return: the parsed collection in which every image has a new
parametrized property
"""
name = pattern.replace('{property}', property)
original_range = range_from if isinstance(range_from, ee.List) \
else ee.List(range_from)
final_range = range_to if isinstance(range_to, ee.List) \
else ee.List(range_to)
# original min and max
min0 = ee.Number(original_range.get(0))
max0 = ee.Number(original_range.get(1))
# range from min to max
rango0 = max0.subtract(min0)
# final min max images
min1 = ee.Number(final_range.get(0))
max1 = ee.Number(final_range.get(1))
rango1 = max1.subtract(min1)
def wrap(img):
value = ee.Number(img.get(property))
percent = value.subtract(min0).divide(rango0)
final = percent.multiply(rango1).add(min1)
return img.set(name, final)
return collection.map(wrap)
def linearFunctionBand(collection, band, range_min=None, range_max=None,
mean=None, output_min=None, output_max=None,
name='linear_function'):
""" Apply a linear function over the bands across every image of the
ImageCollection using the following formula:
- a = abs(val-mean)
- b = output_max-output_min
- c = abs(range_max-mean)
- d = abs(range_min-mean)
- e = max(c, d)
f(x) = a*(-1)*(b/e)+output_max
:param band: the band to process
:param range_min: the minimum pixel value in the parsed band. If None, it
will be computed reducing the collection
:param range_max: the maximum pixel value in the parsed band. If None, it
will be computed reducing the collection
:param output_min: the minimum value that will take the resulting band.
:param output_max: the minimum value that will take the resulting band.
:param mean: the value on the given range that will take the `output_max`
value
:param name: the name of the resulting band
:return: the parsed collection in which every image will have an extra band
that results of applying the linear function over every pixel in the
image
:rtype: ee.ImageCollection
"""
if range_min is None:
range_min = ee.Image(collection.select(band).min()).rename('imin')
else:
range_min = castImage(range_min)
if range_max is None:
range_max = ee.Image(collection.select(band).max()).rename('imax')
else:
range_max = castImage(range_max)
def to_map(img):
result = image_module.linearFunction(img, band, range_min, range_max,
mean, output_min, output_max,
name)
return img.addBands(result.rename(name))
collection = collection.map(to_map)
return collection
def linearFunctionProperty(collection, property, range_min=None,
range_max=None, mean=None, output_min=None,
output_max=None, name='LINEAR_FUNCTION'):
""" Apply a linear function over the properties across every image of the
ImageCollection using the following formula:
- a = abs(val-mean)
- b = output_max-output_min
- c = abs(range_max-mean)
- d = abs(range_min-mean)
- e = max(c, d)
f(x) = a*(-1)*(b/e)+output_max
:param property: the property to process
:param range_min: the minimum pixel value in the parsed band. If None, it
will be computed reducing the collection
:param range_max: the maximum pixel value in the parsed band. If None, it
will be computed reducing the collection
:param output_min: the minimum value that will take the resulting band.
:param output_max: the minimum value that will take the resulting band.
:param mean: the value on the given range that will take the `output_max`
value
:param name: the name of the resulting band
:return: the parsed collection in which every image will have an extra
property that results of applying the linear function over every pixel
in the image
:rtype: ee.ImageCollection
"""
if range_min is None:
imin = ee.Number(collection.aggregate_min(property))
else:
imin = ee.Number(range_min)
if range_max is None:
imax = ee.Number(collection.aggregate_max(property))
else:
imax = ee.Number(range_max)
if mean is None:
imean = imax
else:
imean = ee.Number(mean)
if output_max is None:
output_max = imax
else:
output_max = ee.Number(output_max)
if output_min is None:
output_min = imin
else:
output_min = ee.Number(output_min)
a = imax.subtract(imean).abs()
b = imin.subtract(imean).abs()
t = a.max(b)
def to_map(img):
val = ee.Number(img.get(property))
a = val.subtract(imean).abs().multiply(-1)
b = output_max.subtract(output_min)
c = b.divide(t)
d = a.multiply(c)
result = d.add(output_max)
return img.set(name, result)
collection = collection.map(to_map)
return collection
def linearInterpolation(collection, date_property='system:time_start'):
def _addTime(collection):
def wrap(i):
sec = ee.Number(i.get(date_property))
isec = image_module.empty(sec, i.bandNames())
isec_suffix = image_module.addSuffix(isec, '_tmpTime')
m = i.mask()
isec_masked = isec.updateMask(m)
isec_masked_suffix = image_module.addSuffix(isec_masked,
'_maskedTime')
return i.addBands(isec_suffix).addBands(isec_masked_suffix)
return collection.map(wrap)
# get the mask for the final result
finalmask = allMasked(collection)
if date_property != 'system:time_start':
collection = collection.sort(date_property)
# add time bands
collection = _addTime(collection)
filled = fillWithLast(collection, False)
filled_back = fillWithLast(collection, True)
condition = ee.Filter.equals(leftField='system:index',
rightField='system:index')
match1 = ee.Join.saveFirst('filled').apply(
primary=collection,
secondary=filled,
condition=condition
)
match2 = ee.Join.saveFirst('filled_back').apply(
primary=match1,
secondary=filled_back,
condition=condition
)
def wrap(image):
o = ee.Image(image)
bands = o.bandNames()
masked = o.mask().Not()
f = ee.Image(image.get('filled')).unmask()
fb = ee.Image(image.get('filled_back')).unmask()
# filters
filter0 = ee.Filter.stringContains('item', 'maskedTime')
filter1 = ee.Filter.stringContains('item', 'maskedTime').Not()
filter2 = ee.Filter.stringContains('item', 'tmpTime').Not()
# get all deltas (including delta x)
dy = ee.Image(fb.subtract(f)).unmask()
dx_bands = bands.filter(filter0)
# select only delta x for each band
dx = dy.select(dx_bands)
# get original bands
original_bands = bands.filter(filter1).filter(filter2)
# get delta for original bands
delta = dy.select(original_bands)
# now that we have delta x and delta for the original bands
# get the slope
slope = delta.divide(dx).unmask()
# filled original bands
fo = f.select(original_bands)
# filled back original bands
fob = fb.select(original_bands)
# original bands
oo = o.select(original_bands)
# masked original bands
mo = masked.select(original_bands)
t = o.select('.+_tmpTime').subtract(f.select('.+_maskedTime'))
fill = fo.add(slope.multiply(t)).unmask()<|fim▁hole|> final = oo.unmask().where(mo, fill3)
final = image_module.deleteProperties(final)
final = final.select(original_bands) \
.copyProperties(o, exclude=['filled', 'filled_back']) \
.set(date_property, o.get(date_property)) \
.set('system:index', o.get('system:index'))
return ee.Image(final).updateMask(finalmask)
return ee.ImageCollection(match2.map(wrap))
def gaussFunctionBand(collection, band, range_min=None, range_max=None,
mean=0, output_min=None, output_max=1, std=None,
stretch=1, name='gauss'):
""" Compute a Guass function using a specified band over an
ImageCollection. See: https://en.wikipedia.org/wiki/Gaussian_function
:param band: the name of the band to use
:type band: str
:param range_min: the minimum pixel value in the parsed band. If None, it
will be computed
:param range_max: the maximum pixel value in the parsed band. If None, it
will be computed
:param mean: the position of the center of the peak. Defaults to 0
:type mean: int or float
:param std: the standard deviation value. Defaults to range/4
:type std: int or float
:param output_max: height of the curve's peak
:type output_max: int or float
:param output_min: the desired minimum of the curve
:type output_min: int or float
:param stretch: a stretching value. As bigger as stretch
:type stretch: int or float
:param name: the name of the resulting band
:return: the parsed collection in which every image will have an extra band
that results of applying the gauss function over every pixel in the
image
:rtype: ee.ImageCollection
"""
if range_min is None:
range_min = ee.Image(collection.min())
else:
range_min = castImage(range_min)
if range_max is None:
range_max = ee.Image(collection.max())
else:
range_max = castImage(range_max)
def to_map(img):
result = image_module.gaussFunction(img, band,
range_min=range_min,
range_max=range_max,
mean=mean, std=std,
output_min=output_min,
output_max=output_max,
stretch=stretch,
name=name)
return img.addBands(result)
collection = collection.map(to_map)
return collection
def gaussFunctionProperty(collection, property, range_min=None,
range_max=None, mean=0, output_min=None,
output_max=1, std=None, stretch=1,
name='GAUSS'):
""" Compute a Guass function using a specified property over an
ImageCollection. See: https://en.wikipedia.org/wiki/Gaussian_function
:param collection:
:type collection: ee.ImageCollection
:param property: the name of the property to use
:type property: str
:param range_min: the minimum pixel value in the parsed band. If None, it
will be computed
:param range_max: the maximum pixel value in the parsed band. If None, it
will be computed
:param mean: the position of the center of the peak. Defaults to 0
:type mean: int or float
:param std: the standard deviation value. Defaults to range/4
:type std: int or float
:param output_max: height of the curve's peak
:type output_max: int or float
:param output_min: the desired minimum of the curve
:type output_min: int or float
:param stretch: a stretching value. As bigger as stretch
:type stretch: int or float
:param name: the name of the resulting property
:return: the parsed collection in which every image will have an extra
property that results of applying the linear function over every pixel
in the image
:rtype: ee.ImageCollection
"""
if range_min is None:
range_min = ee.Number(collection.aggregate_min(property))
else:
range_min = ee.Number(range_min)
if range_max is None:
range_max = ee.Number(collection.aggregate_max(property))
else:
range_max = ee.Number(range_max)
mean = ee.Number(mean)
output_max = ee.Number(output_max)
if std is None:
std = range_max.subtract(range_min).divide(4)
else:
std = ee.Number(std)
stretch = ee.Number(stretch)
def to_map(img):
def compute_gauss(value):
a = value.subtract(mean).pow(2)
b = std.pow(2).multiply(-2)
c = a.divide(b).multiply(stretch)
d = c.exp()
return d.multiply(output_max)
no_parametrized = compute_gauss(ee.Number(img.get(property)))
if output_min is None:
return img.set(name, no_parametrized)
else:
min_result = compute_gauss(range_min)
max_result = compute_gauss(range_max)
min_result_final = min_result.min(max_result)
e = no_parametrized.subtract(min_result_final)
f = output_max.subtract(min_result_final)
g = output_max.subtract(output_min)
parametrized = e.divide(f).multiply(g).add(output_min)
return img.set(name, parametrized)
collection = collection.map(to_map)
return collection
def normalDistributionProperty(collection, property, mean=None, std=None,
name='NORMAL_DISTRIBUTION'):
""" Compute a normal distribution using a specified property, over an
ImageCollection. For more see:
https://en.wikipedia.org/wiki/Normal_distribution
:param property: the name of the property to use
:type property: str
:param mean: the mean value. If None it will be computed from the source.
defaults to None.
:type mean: float
:param std: the standard deviation value. If None it will be computed from
the source. Defaults to None.
:type std: float
"""
if mean is None:
imean = ee.Number(collection.aggregate_mean(property))
else:
imean = ee.Number(mean)
if std is None:
istd = ee.Number(collection.aggregate_total_sd(property))
else:
istd = ee.Number(std)
imax = ee.Number(1)\
.divide(istd.multiply(ee.Number(2).multiply(math.pi).sqrt()))
return gaussFunctionProperty(collection, property, mean=imean,
output_max=imax, std=istd, name=name)
def normalDistributionBand(collection, band, mean=None, std=None,
name='normal_distribution'):
""" Compute a normal distribution using a specified band, over an
ImageCollection. For more see:
https://en.wikipedia.org/wiki/Normal_distribution
:param band: the name of the property to use
:type band: str
:param mean: the mean value. If None it will be computed from the source.
defaults to None.
:type mean: float
:param std: the standard deviation value. If None it will be computed from
the source. Defaults to None.
:type std: float
"""
if mean is None:
imean = ee.Image(collection.mean())
else:
imean = ee.Image.constant(mean)
if std is None:
istd = ee.Image(collection.reduce(ee.Reducer.stdDev()))
else:
istd = ee.Image.constant(std)
ipi = ee.Image.constant(math.pi)
imax = ee.Image(1) \
.divide(istd.multiply(ee.Image.constant(2).multiply(ipi).sqrt()))
return gaussFunctionBand(collection, band, mean=imean,
output_max=imax, std=istd, name=name)
def maskedSize(collection):
""" return an image with the percentage of masked pixels. 100% means all
pixels are masked """
mask = collection.map(lambda i: i.mask().Not())
def wrap(i):
onemore = i.add(1)
return onemore.divide(onemore)
total = mask.map(wrap)
masksum = mask.sum()
totalsum = total.sum()
return masksum.divide(totalsum).multiply(100).toInt()
def area_under_curve(collection, band, x_property=None, name='area_under'):
""" Compute the area under the curve taking the x axis from an image
property. If not specified, it'll use `system:time_start` """
x_property = x_property or "system:time_start"
max_x = collection.aggregate_max(x_property)
min_x = collection.aggregate_min(x_property)
total_lapsed = ee.Number(max_x).subtract(ee.Number(min_x))
def cummulative(image, cumm):
cumm = ee.List(cumm)
def true(i, c):
c = ee.List(c)
last = ee.Image(c.get(-1))
lapsed = ee.Number(image.get(x_property)).subtract(
ee.Number(last.get(x_property)))
lapsed_percent = lapsed.divide(total_lapsed)
rise = i.select(band).subtract(last.select(band)).divide(2)
toadd = i.select(band).add(rise).multiply(lapsed_percent).rename(
name).toFloat()
return c.add(i.addBands(toadd))
def false(i, c):
toadd = i.addBands(ee.Image(0).rename(name).toFloat())
return c.add(toadd)
return ee.List(ee.Algorithms.If(cumm.size(), true(image, cumm),
false(image, cumm)))
final = ee.List(collection.iterate(cummulative, ee.List([])))
final_ic = ee.ImageCollection.fromImages(final).select(name)
return ee.Image(final_ic.reduce(ee.Reducer.sum()))
def moving_average(collection, back=5, reducer=None,
use_original=True):
""" Compute the moving average over a time series
:param back: number of images back to use for computing the stats
:type back: int
:param reducer: the reducer to apply. Default is ee.Reducer.mean()
:type reducer: ee.Reducer
:param use_original: if True, computes the stats over the last original
values, otherwise, computes the stats over the last computed values
:type use_original: bool
"""
if reducer is None:
reducer = ee.Reducer.mean()
def wrap(i, d):
d = ee.Dictionary(d)
i = ee.Image(i)
original = ee.List(d.get('original'))
stats = ee.List(d.get('stats'))
def true(im, di):
original_true = ee.List(di.get('original'))
stats_true = ee.List(di.get('stats'))
original_true = original_true.add(im)
tocompute = original_true if use_original else stats_true.add(im)
tempcol = ee.ImageCollection.fromImages(tocompute.slice(back * -1))
stats = tempcol.reduce(reducer)
stats = stats.rename(im.bandNames())
stats = ee.Image(stats.copyProperties(im, properties=im.propertyNames()))
return ee.Dictionary({
'original': original_true,
'stats': stats_true.add(stats)
})
def false(im, di):
original2 = ee.List(di.get('original'))
stats2 = ee.List(di.get('stats'))
condition2 = original2.size().gt(0)
def true2(ima, dic):
original_true2 = ee.List(dic.get('original'))
original_true2 = original_true2.add(ima)
stats_true2 = ee.List(dic.get('stats'))
tocompute = original_true2 if use_original else stats_true2.add(ima)
tempcol2 = ee.ImageCollection.fromImages(tocompute)
stats2 = tempcol2.reduce(reducer)
stats2 = stats2.rename(ima.bandNames())
stats2 = ee.Image(stats2.copyProperties(ima, properties=ima.propertyNames()))
return ee.Dictionary({
'original': original_true2,
'stats': stats_true2.add(stats2)
})
def false2(ima, dic):
# first element
original_false2 = ee.List(dic.get('original'))
stats_false2 = ee.List(dic.get('stats'))
return ee.Dictionary({
'original': original_false2.add(ima),
'stats': stats_false2.add(ima)
})
return ee.Dictionary(
ee.Algorithms.If(condition2, true2(im, di), false2(im, di)))
condition = original.size().gte(back)
return ee.Dictionary(
ee.Algorithms.If(condition, true(i, d), false(i, d)))
final = ee.Dictionary(
collection.iterate(wrap, ee.Dictionary({'original': [], 'stats': []})))
return ee.ImageCollection.fromImages(ee.List(final.get('stats')))
def aggregate_array_all(collection):
""" Aggregate array in all images and return a list of dicts """
props = collection.first().propertyNames()
allprops = props.map(lambda p: collection.aggregate_array(p))
transposed = ee_list.transpose(allprops)
return transposed.map(lambda ps: ee.Dictionary.fromLists(props, ps))<|fim▁end|> | fill2 = fob.where(fill, fill)
fill3 = fo.where(fill2, fill2) |
<|file_name|>variance-cell-is-invariant.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license<|fim▁hole|>// Test that Cell is considered invariant with respect to its
// type.
use std::cell::Cell;
struct Foo<'a> {
x: Cell<Option<&'a int>>,
}
fn use_<'short,'long>(c: Foo<'short>,
s: &'short int,
l: &'long int,
_where:Option<&'short &'long ()>) {
let _: Foo<'long> = c; //~ ERROR mismatched types
}
fn main() {
}<|fim▁end|> | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
|
<|file_name|>suggestion_registry_test.py<|end_file_name|><|fim▁begin|># Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for suggestion registry classes."""
from __future__ import annotations
import datetime
import os
from core import feconf
from core import utils
from core.domain import config_services
from core.domain import exp_domain
from core.domain import exp_fetchers
from core.domain import exp_services
from core.domain import fs_domain
from core.domain import fs_services
from core.domain import html_validation_service
from core.domain import question_domain
from core.domain import question_services
from core.domain import skill_services
from core.domain import state_domain
from core.domain import suggestion_registry
from core.domain import suggestion_services
from core.platform import models
from core.tests import test_utils
(suggestion_models,) = models.Registry.import_models([models.NAMES.suggestion])
class MockInvalidSuggestion(suggestion_registry.BaseSuggestion):
def __init__(self): # pylint: disable=super-init-not-called
pass
class BaseSuggestionUnitTests(test_utils.GenericTestBase):
"""Tests for the BaseSuggestion class."""
def setUp(self):
super(BaseSuggestionUnitTests, self).setUp()
self.base_suggestion = MockInvalidSuggestion()
def test_base_class_accept_raises_error(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement accept.'):
self.base_suggestion.accept()
def test_base_class_get_change_list_for_accepting_suggestion_raises_error(
self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement '
'get_change_list_for_accepting_suggestion.'):
self.base_suggestion.get_change_list_for_accepting_suggestion()
def test_base_class_pre_accept_validate_raises_error(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement'
' pre_accept_validate.'):
self.base_suggestion.pre_accept_validate()
def test_base_class_populate_old_value_of_change_raises_error(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement'
' populate_old_value_of_change.'):
self.base_suggestion.populate_old_value_of_change()
def test_base_class_pre_update_validate_raises_error(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement'
' pre_update_validate.'):
self.base_suggestion.pre_update_validate({})
def test_base_class_get_all_html_content_strings(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement'
' get_all_html_content_strings.'):
self.base_suggestion.get_all_html_content_strings()
def test_base_class_get_target_entity_html_strings(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement'
' get_target_entity_html_strings.'):
self.base_suggestion.get_target_entity_html_strings()
def test_base_class_convert_html_in_suggestion_change(self):
def conversion_fn():
"""Temporary function."""
pass
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement'
' convert_html_in_suggestion_change.'):
self.base_suggestion.convert_html_in_suggestion_change(
conversion_fn)
class SuggestionEditStateContentUnitTests(test_utils.GenericTestBase):
"""Tests for the SuggestionEditStateContent class."""
AUTHOR_EMAIL = '[email protected]'
REVIEWER_EMAIL = '[email protected]'
ASSIGNED_REVIEWER_EMAIL = '[email protected]'
fake_date = datetime.datetime(2016, 4, 10, 0, 0, 0, 0)
def setUp(self):
super(SuggestionEditStateContentUnitTests, self).setUp()
self.signup(self.AUTHOR_EMAIL, 'author')
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.signup(self.REVIEWER_EMAIL, 'reviewer')
self.reviewer_id = self.get_user_id_from_email(self.REVIEWER_EMAIL)
self.suggestion_dict = {
'suggestion_id': 'exploration.exp1.thread1',
'suggestion_type': (
feconf.SUGGESTION_TYPE_EDIT_STATE_CONTENT),
'target_type': feconf.ENTITY_TYPE_EXPLORATION,
'target_id': 'exp1',
'target_version_at_submission': 1,
'status': suggestion_models.STATUS_ACCEPTED,
'author_name': 'author',
'final_reviewer_id': self.reviewer_id,
'change': {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'state_1',
'new_value': 'new suggestion content',
'old_value': None
},
'score_category': 'content.Algebra',
'language_code': None,
'last_updated': utils.get_time_in_millisecs(self.fake_date),
'edited_by_reviewer': False
}
def test_create_suggestion_edit_state_content(self):
expected_suggestion_dict = self.suggestion_dict
observed_suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
self.assertDictEqual(
observed_suggestion.to_dict(), expected_suggestion_dict)
def test_validate_suggestion_edit_state_content(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
def test_get_score_part_helper_methods(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
self.assertEqual(suggestion.get_score_type(), 'content')
self.assertEqual(suggestion.get_score_sub_type(), 'Algebra')
def test_validate_suggestion_type(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.suggestion_type = 'invalid_suggestion_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected suggestion_type to be among allowed choices'
):
suggestion.validate()
def test_validate_target_type(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.target_type = 'invalid_target_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected target_type to be among allowed choices'
):
suggestion.validate()
def test_validate_target_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.target_id = 0
with self.assertRaisesRegex(
utils.ValidationError, 'Expected target_id to be a string'
):
suggestion.validate()
def test_validate_target_version_at_submission(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.target_version_at_submission = 'invalid_version'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected target_version_at_submission to be an int'
):
suggestion.validate()
def test_validate_status(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.status = 'invalid_status'
with self.assertRaisesRegex(
utils.ValidationError, 'Expected status to be among allowed choices'
):
suggestion.validate()
def test_validate_author_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.author_id = 0
with self.assertRaisesRegex(
utils.ValidationError, 'Expected author_id to be a string'
):
suggestion.validate()
def test_validate_author_id_format(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.author_id = self.PSEUDONYMOUS_ID
suggestion.validate()
suggestion.author_id = ''
with self.assertRaisesRegex(
utils.ValidationError,
'Expected author_id to be in a valid user ID format'
):
suggestion.validate()
def test_validate_final_reviewer_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.final_reviewer_id = 1
with self.assertRaisesRegex(
utils.ValidationError, 'Expected final_reviewer_id to be a string'
):
suggestion.validate()
def test_validate_final_reviewer_id_format(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.final_reviewer_id = self.PSEUDONYMOUS_ID
suggestion.validate()
suggestion.final_reviewer_id = ''
with self.assertRaisesRegex(
utils.ValidationError,
'Expected final_reviewer_id to be in a valid user ID format'
):
suggestion.validate()
def test_validate_score_category(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.score_category = 0
with self.assertRaisesRegex(
utils.ValidationError, 'Expected score_category to be a string'
):
suggestion.validate()
def test_validate_score_category_format(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.score_category = 'score.score_type.score_sub_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected score_category to be of the form'
' score_type.score_sub_type'
):
suggestion.validate()
suggestion.score_category = 'invalid_score_category'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected score_category to be of the form'
' score_type.score_sub_type'
):
suggestion.validate()
def test_validate_score_type(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.score_category = 'invalid_score_type.score_sub_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the first part of score_category to be among allowed'
' choices'
):
suggestion.validate()
def test_validate_change(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change = {}
with self.assertRaisesRegex(
utils.ValidationError, 'Expected change to be an ExplorationChange'
):
suggestion.validate()
def test_validate_score_type_content(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.score_category = 'question.score_sub_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the first part of score_category to be content'
):
suggestion.validate()
def test_validate_change_cmd(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change.cmd = 'invalid_cmd'
with self.assertRaisesRegex(
utils.ValidationError, 'Expected cmd to be edit_state_property'
):
suggestion.validate()
def test_validate_change_property_name(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change.property_name = 'invalid_property'
with self.assertRaisesRegex(
utils.ValidationError, 'Expected property_name to be content'
):
suggestion.validate()
def test_validate_language_code_fails_when_language_codes_do_not_match(
self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.language_code = 'wrong_language_code'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected language_code to be None, received wrong_language_code'
):
suggestion.validate()
def test_pre_accept_validate_state_name(self):
self.save_new_default_exploration('exp1', self.author_id)
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
exp_services.update_exploration(
self.author_id, 'exp1', [
exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_ADD_STATE,
'state_name': 'State A',
})
], 'Added state')
suggestion.change.state_name = 'State A'
suggestion.pre_accept_validate()
suggestion.change.state_name = 'invalid_state_name'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected invalid_state_name to be a valid state name'
):
suggestion.pre_accept_validate()
def test_populate_old_value_of_change_with_invalid_state(self):
self.save_new_default_exploration('exp1', self.author_id)
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.change.state_name = 'invalid_state_name'
self.assertIsNone(suggestion.change.old_value)
suggestion.populate_old_value_of_change()
self.assertIsNone(suggestion.change.old_value)
def test_pre_update_validate_change_cmd(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
change = {
'cmd': exp_domain.CMD_ADD_STATE,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': suggestion.change.state_name,
'new_value': 'new suggestion content',
'old_value': None
}
with self.assertRaisesRegex(
utils.ValidationError,
'The following extra attributes are present: new_value, '
'old_value, property_name'
):
suggestion.pre_update_validate(exp_domain.ExplorationChange(change))
def test_pre_update_validate_change_property_name(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
change = {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_PARAM_CHANGES,
'state_name': suggestion.change.state_name,
'new_value': 'new suggestion content',
'old_value': None
}
with self.assertRaisesRegex(
utils.ValidationError,
'The new change property_name must be equal to content'
):
suggestion.pre_update_validate(exp_domain.ExplorationChange(change))
def test_pre_update_validate_change_state_name(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
change = {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'invalid_state',
'new_value': 'new suggestion content',
'old_value': None
}
with self.assertRaisesRegex(
utils.ValidationError,
'The new change state_name must be equal to state_1'
):
suggestion.pre_update_validate(exp_domain.ExplorationChange(change))
def test_pre_update_validate_change_new_value(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
new_content = state_domain.SubtitledHtml(
'content', '<p>new suggestion html</p>').to_dict()
suggestion.change.new_value = new_content
change = {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': suggestion.change.state_name,
'new_value': new_content,
'old_value': None
}
with self.assertRaisesRegex(
utils.ValidationError, 'The new html must not match the old html'
):
suggestion.pre_update_validate(exp_domain.ExplorationChange(change))
def test_pre_update_validate_non_equal_change_cmd(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
with self.assertRaisesRegex(
utils.ValidationError,
'The new change cmd must be equal to edit_state_property'
):
suggestion.pre_update_validate(exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_EDIT_EXPLORATION_PROPERTY,
'property_name': 'title',
'new_value': 'Exploration 1 Albert title'
}))
def test_get_all_html_content_strings(self):
change_dict = {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'state_1',
'new_value': {
'content_id': 'content',
'html': 'new suggestion content'
},
'old_value': None
}
suggestion = suggestion_registry.SuggestionEditStateContent(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id, change_dict,
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], False, self.fake_date)
actual_outcome_list = suggestion.get_all_html_content_strings()
expected_outcome_list = [u'new suggestion content']
self.assertEqual(expected_outcome_list, actual_outcome_list)
def test_convert_html_in_suggestion_change(self):
html_content = (
'<p>Value</p><oppia-noninteractive-math raw_latex-with-value="&a'
'mp;quot;+,-,-,+&quot;"></oppia-noninteractive-math>')
expected_html_content = (
'<p>Value</p><oppia-noninteractive-math math_content-with-value='
'"{&quot;raw_latex&quot;: &quot;+,-,-,+&quot;, &'
'amp;quot;svg_filename&quot;: &quot;&quot;}"></oppia'
'-noninteractive-math>')
change = {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'Introduction',
'new_value': {
'content_id': 'content',
'html': '<p>suggestion</p>'
},
'old_value': {
'content_id': 'content',
'html': html_content
}
}
suggestion = suggestion_registry.SuggestionEditStateContent(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id, change,
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], False, self.fake_date)
suggestion.convert_html_in_suggestion_change(
html_validation_service.
add_math_content_to_math_rte_components)
self.assertEqual(
suggestion.change.old_value['html'], expected_html_content)
def test_get_target_entity_html_strings_returns_expected_strings(self):
change_dict = {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'state_1',
'new_value': {
'content_id': 'content',
'html': 'new suggestion content'
},
'old_value': {
'content_id': 'content',
'html': 'Old content.'
}
}
suggestion = suggestion_registry.SuggestionEditStateContent(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id, change_dict,
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], False, self.fake_date)
actual_outcome_list = suggestion.get_target_entity_html_strings()
expected_outcome_list = [u'Old content.']
self.assertEqual(expected_outcome_list, actual_outcome_list)
def test_get_target_entity_html_with_none_old_value(self):
change_dict = {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'state_1',
'new_value': {
'content_id': 'content',
'html': 'new suggestion content'
},
'old_value': None
}
suggestion = suggestion_registry.SuggestionEditStateContent(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id, change_dict,
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], False, self.fake_date)
actual_outcome_list = suggestion.get_target_entity_html_strings()
self.assertEqual(actual_outcome_list, [])
class SuggestionTranslateContentUnitTests(test_utils.GenericTestBase):
"""Tests for the SuggestionEditStateContent class."""
AUTHOR_EMAIL = '[email protected]'
REVIEWER_EMAIL = '[email protected]'
ASSIGNED_REVIEWER_EMAIL = '[email protected]'
fake_date = datetime.datetime(2016, 4, 10, 0, 0, 0, 0)
def setUp(self):
super(SuggestionTranslateContentUnitTests, self).setUp()
self.signup(self.AUTHOR_EMAIL, 'author')
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.signup(self.REVIEWER_EMAIL, 'reviewer')
self.reviewer_id = self.get_user_id_from_email(self.REVIEWER_EMAIL)
self.suggestion_dict = {
'suggestion_id': 'exploration.exp1.thread1',
'suggestion_type': (
feconf.SUGGESTION_TYPE_TRANSLATE_CONTENT),
'target_type': feconf.ENTITY_TYPE_EXPLORATION,
'target_id': 'exp1',
'target_version_at_submission': 1,
'status': suggestion_models.STATUS_ACCEPTED,
'author_name': 'author',
'final_reviewer_id': self.reviewer_id,
'change': {
'cmd': exp_domain.CMD_ADD_WRITTEN_TRANSLATION,
'state_name': 'Introduction',
'content_id': 'content',
'language_code': 'hi',
'content_html': '<p>This is a content.</p>',
'translation_html': '<p>This is translated html.</p>',
'data_format': 'html'
},
'score_category': 'translation.Algebra',
'language_code': 'hi',
'last_updated': utils.get_time_in_millisecs(self.fake_date),
'edited_by_reviewer': False
}
def test_pre_update_validate_fails_for_invalid_change_cmd(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], self.fake_date)
change = {
'cmd': exp_domain.CMD_ADD_STATE,
'state_name': 'Introduction'
}
with self.assertRaisesRegex(
utils.ValidationError,
'The new change cmd must be equal to %s' % (
exp_domain.CMD_ADD_WRITTEN_TRANSLATION)
):
suggestion.pre_update_validate(exp_domain.ExplorationChange(change))
def test_pre_update_validate_change_state_name(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], self.fake_date)
change = {
'cmd': exp_domain.CMD_ADD_WRITTEN_TRANSLATION,
'state_name': 'State 1',
'content_id': 'content',
'language_code': 'hi',
'content_html': '<p>This is a content.</p>',
'translation_html': '<p>This is the updated translated html.</p>',
'data_format': 'html'
}
with self.assertRaisesRegex(
utils.ValidationError,
'The new change state_name must be equal to Introduction'
):
suggestion.pre_update_validate(exp_domain.ExplorationChange(change))
def test_pre_update_validate_change_language_code(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], self.fake_date)
change = {
'cmd': exp_domain.CMD_ADD_WRITTEN_TRANSLATION,
'state_name': 'Introduction',
'content_id': 'content',
'language_code': 'en',
'content_html': '<p>This is a content.</p>',
'translation_html': '<p>This is the updated translated html.</p>',
'data_format': 'html'
}
with self.assertRaisesRegex(
utils.ValidationError,
'The language code must be equal to hi'
):
suggestion.pre_update_validate(exp_domain.ExplorationChange(change))
def test_pre_update_validate_change_content_html(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], self.fake_date)
change = {
'cmd': exp_domain.CMD_ADD_WRITTEN_TRANSLATION,
'state_name': 'Introduction',
'content_id': 'content',
'language_code': 'en',
'content_html': '<p>This is the changed content.</p>',<|fim▁hole|> 'data_format': 'html'
}
with self.assertRaisesRegex(
utils.ValidationError,
'The new change content_html must be equal to <p>This is a ' +
'content.</p>'
):
suggestion.pre_update_validate(
exp_domain.ExplorationChange(change))
def test_create_suggestion_add_translation(self):
expected_suggestion_dict = self.suggestion_dict
observed_suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
self.assertDictEqual(
observed_suggestion.to_dict(), expected_suggestion_dict)
def test_validate_suggestion_add_translation(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
def test_get_score_part_helper_methods(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
self.assertEqual(suggestion.get_score_type(), 'translation')
self.assertEqual(suggestion.get_score_sub_type(), 'Algebra')
def test_validate_suggestion_type(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.suggestion_type = 'invalid_suggestion_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected suggestion_type to be among allowed choices'
):
suggestion.validate()
def test_validate_target_type(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.target_type = 'invalid_target_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected target_type to be among allowed choices'
):
suggestion.validate()
def test_validate_target_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.target_id = 0
with self.assertRaisesRegex(
utils.ValidationError, 'Expected target_id to be a string'
):
suggestion.validate()
def test_validate_target_version_at_submission(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.target_version_at_submission = 'invalid_version'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected target_version_at_submission to be an int'
):
suggestion.validate()
def test_validate_status(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.status = 'invalid_status'
with self.assertRaisesRegex(
utils.ValidationError, 'Expected status to be among allowed choices'
):
suggestion.validate()
def test_validate_author_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.author_id = 0
with self.assertRaisesRegex(
utils.ValidationError, 'Expected author_id to be a string'
):
suggestion.validate()
def test_validate_author_id_format(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.author_id = ''
with self.assertRaisesRegex(
utils.ValidationError,
'Expected author_id to be in a valid user ID format.'
):
suggestion.validate()
def test_validate_final_reviewer_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.final_reviewer_id = 1
with self.assertRaisesRegex(
utils.ValidationError, 'Expected final_reviewer_id to be a string'
):
suggestion.validate()
def test_validate_final_reviewer_id_format(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.final_reviewer_id = ''
with self.assertRaisesRegex(
utils.ValidationError,
'Expected final_reviewer_id to be in a valid user ID format'
):
suggestion.validate()
def test_validate_score_category(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.score_category = 0
with self.assertRaisesRegex(
utils.ValidationError, 'Expected score_category to be a string'
):
suggestion.validate()
def test_validate_score_category_format(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.score_category = 'score.score_type.score_sub_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected score_category to be of the form'
' score_type.score_sub_type'
):
suggestion.validate()
suggestion.score_category = 'invalid_score_category'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected score_category to be of the form'
' score_type.score_sub_type'
):
suggestion.validate()
def test_validate_score_type(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.score_category = 'invalid_score_type.score_sub_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the first part of score_category to be among allowed'
' choices'
):
suggestion.validate()
def test_validate_change(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change = {}
with self.assertRaisesRegex(
utils.ValidationError, 'Expected change to be an ExplorationChange'
):
suggestion.validate()
def test_validate_score_type_translation(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.score_category = 'question.score_sub_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the first part of score_category to be translation'
):
suggestion.validate()
def test_validate_change_cmd(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change.cmd = 'invalid_cmd'
with self.assertRaisesRegex(
utils.ValidationError, 'Expected cmd to be add_written_translation'
):
suggestion.validate()
def test_validate_language_code_fails_when_language_codes_do_not_match(
self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
expected_language_code = (
expected_suggestion_dict['change']['language_code']
)
suggestion.validate()
suggestion.language_code = 'wrong_language_code'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected language_code to be %s, '
'received wrong_language_code' % expected_language_code
):
suggestion.validate()
def test_validate_language_code_fails_when_language_code_is_set_to_none(
self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.language_code = None
with self.assertRaisesRegex(
utils.ValidationError, 'language_code cannot be None'
):
suggestion.validate()
def test_validate_change_with_invalid_language_code_fails_validation(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change.language_code = 'invalid_code'
with self.assertRaisesRegex(
utils.ValidationError, 'Invalid language_code: invalid_code'
):
suggestion.validate()
def test_pre_accept_validate_state_name(self):
self.save_new_default_exploration('exp1', self.author_id)
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
exp_services.update_exploration(
self.author_id, 'exp1', [
exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_ADD_STATE,
'state_name': 'State A',
}),
exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'new_value': {
'content_id': 'content',
'html': '<p>This is a content.</p>'
},
'state_name': 'State A',
})
], 'Added state')
suggestion.change.state_name = 'State A'
suggestion.pre_accept_validate()
suggestion.change.state_name = 'invalid_state_name'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected invalid_state_name to be a valid state name'
):
suggestion.pre_accept_validate()
def test_accept_suggestion_adds_translation_in_exploration(self):
self.save_new_default_exploration('exp1', self.author_id)
exploration = exp_fetchers.get_exploration_by_id('exp1')
self.assertEqual(exploration.get_translation_counts(), {})
suggestion = suggestion_registry.SuggestionTranslateContent(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id, self.suggestion_dict['change'],
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], False, self.fake_date)
suggestion.accept(
'Accepted suggestion by translator: Add translation change.')
exploration = exp_fetchers.get_exploration_by_id('exp1')
self.assertEqual(exploration.get_translation_counts(), {
'hi': 1
})
def test_accept_suggestion_with_set_of_string_adds_translation(self):
self.save_new_default_exploration('exp1', self.author_id)
exploration = exp_fetchers.get_exploration_by_id('exp1')
self.assertEqual(exploration.get_translation_counts(), {})
suggestion = suggestion_registry.SuggestionTranslateContent(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id,
{
'cmd': exp_domain.CMD_ADD_WRITTEN_TRANSLATION,
'state_name': 'Introduction',
'content_id': 'content',
'language_code': 'hi',
'content_html': ['text1', 'text2'],
'translation_html': ['translated text1', 'translated text2'],
'data_format': 'set_of_normalized_string'
},
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], False, self.fake_date)
suggestion.accept(
'Accepted suggestion by translator: Add translation change.')
exploration = exp_fetchers.get_exploration_by_id('exp1')
self.assertEqual(exploration.get_translation_counts(), {
'hi': 1
})
def test_accept_suggestion_with_psedonymous_author_adds_translation(self):
self.save_new_default_exploration('exp1', self.author_id)
exploration = exp_fetchers.get_exploration_by_id('exp1')
self.assertEqual(exploration.get_translation_counts(), {})
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.PSEUDONYMOUS_ID,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.accept(
'Accepted suggestion by translator: Add translation change.')
exploration = exp_fetchers.get_exploration_by_id('exp1')
self.assertEqual(exploration.get_translation_counts(), {
'hi': 1
})
def test_get_all_html_content_strings(self):
suggestion = suggestion_registry.SuggestionTranslateContent(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id, self.suggestion_dict['change'],
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], False, self.fake_date)
actual_outcome_list = suggestion.get_all_html_content_strings()
expected_outcome_list = [
u'<p>This is translated html.</p>', u'<p>This is a content.</p>']
self.assertEqual(expected_outcome_list, actual_outcome_list)
def test_get_all_html_content_strings_for_content_lists(self):
suggestion = suggestion_registry.SuggestionTranslateContent(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id,
{
'cmd': exp_domain.CMD_ADD_WRITTEN_TRANSLATION,
'state_name': 'Introduction',
'content_id': 'content',
'language_code': 'hi',
'content_html': ['text1', 'text2'],
'translation_html': ['translated text1', 'translated text2'],
'data_format': 'set_of_normalized_string'
},
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], False, self.fake_date)
actual_outcome_list = suggestion.get_all_html_content_strings()
expected_outcome_list = [
'translated text1', 'translated text2', 'text1', 'text2']
self.assertEqual(expected_outcome_list, actual_outcome_list)
def test_get_target_entity_html_strings_returns_expected_strings(self):
suggestion = suggestion_registry.SuggestionTranslateContent(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id, self.suggestion_dict['change'],
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], False, self.fake_date)
actual_outcome_list = suggestion.get_target_entity_html_strings()
expected_outcome_list = [self.suggestion_dict['change']['content_html']]
self.assertEqual(expected_outcome_list, actual_outcome_list)
def test_convert_html_in_suggestion_change(self):
html_content = (
'<p>Value</p><oppia-noninteractive-math raw_latex-with-value="&a'
'mp;quot;+,-,-,+&quot;"></oppia-noninteractive-math>')
expected_html_content = (
'<p>Value</p><oppia-noninteractive-math math_content-with-value='
'"{&quot;raw_latex&quot;: &quot;+,-,-,+&quot;, &'
'amp;quot;svg_filename&quot;: &quot;&quot;}"></oppia'
'-noninteractive-math>')
change_dict = {
'cmd': exp_domain.CMD_ADD_WRITTEN_TRANSLATION,
'state_name': 'Introduction',
'content_id': 'content',
'language_code': 'hi',
'content_html': html_content,
'translation_html': '<p>This is translated html.</p>',
'data_format': 'html'
}
suggestion = suggestion_registry.SuggestionTranslateContent(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id, change_dict,
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], False, self.fake_date)
suggestion.convert_html_in_suggestion_change(
html_validation_service.add_math_content_to_math_rte_components)
self.assertEqual(
suggestion.change.content_html, expected_html_content)
class SuggestionAddQuestionTest(test_utils.GenericTestBase):
"""Tests for the SuggestionAddQuestion class."""
AUTHOR_EMAIL = '[email protected]'
REVIEWER_EMAIL = '[email protected]'
ASSIGNED_REVIEWER_EMAIL = '[email protected]'
fake_date = datetime.datetime(2016, 4, 10, 0, 0, 0, 0)
def setUp(self):
super(SuggestionAddQuestionTest, self).setUp()
self.signup(self.AUTHOR_EMAIL, 'author')
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.signup(self.REVIEWER_EMAIL, 'reviewer')
self.reviewer_id = self.get_user_id_from_email(self.REVIEWER_EMAIL)
self.suggestion_dict = {
'suggestion_id': 'skill1.thread1',
'suggestion_type': feconf.SUGGESTION_TYPE_ADD_QUESTION,
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': 'skill1',
'target_version_at_submission': 1,
'status': suggestion_models.STATUS_ACCEPTED,
'author_name': 'author',
'final_reviewer_id': self.reviewer_id,
'change': {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': ['skill_1'],
'inapplicable_skill_misconception_ids': ['skillid12345-1']
},
'skill_id': 'skill_1',
'skill_difficulty': 0.3,
},
'score_category': 'question.topic_1',
'language_code': 'en',
'last_updated': utils.get_time_in_millisecs(self.fake_date),
'edited_by_reviewer': False
}
def test_create_suggestion_add_question(self):
expected_suggestion_dict = self.suggestion_dict
observed_suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
self.assertDictEqual(
observed_suggestion.to_dict(), expected_suggestion_dict)
def test_validate_suggestion_edit_state_content(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
def test_get_score_part_helper_methods(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
self.assertEqual(suggestion.get_score_type(), 'question')
self.assertEqual(suggestion.get_score_sub_type(), 'topic_1')
def test_validate_score_type(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.score_category = 'content.score_sub_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the first part of score_category to be "question"'
):
suggestion.validate()
def test_validate_change_type(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change = 'invalid_change'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected change to be an instance of QuestionSuggestionChange'
):
suggestion.validate()
def test_validate_change_cmd(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change.cmd = None
with self.assertRaisesRegex(
utils.ValidationError, 'Expected change to contain cmd'
):
suggestion.validate()
def test_validate_change_cmd_type(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change.cmd = 'invalid_cmd'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected cmd to be create_new_fully_specified_question'
):
suggestion.validate()
def test_validate_change_question_dict(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change.question_dict = None
with self.assertRaisesRegex(
utils.ValidationError, 'Expected change to contain question_dict'
):
suggestion.validate()
def test_validate_change_question_state_data_schema_version(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
# We are not setting value in suggestion.change.question_dict
# directly since pylint produces unsupported-assignment-operation
# error. The detailed analysis for the same can be checked
# in this issue: https://github.com/oppia/oppia/issues/7008.
question_dict = suggestion.change.question_dict
question_dict['question_state_data_schema_version'] = 0
suggestion.change.question_dict = question_dict
with self.assertRaisesRegex(
utils.ValidationError,
'Expected question state schema version to be %s, '
'received 0' % feconf.CURRENT_STATE_SCHEMA_VERSION
):
suggestion.validate()
def test_validate_change_skill_difficulty_none(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change.skill_difficulty = None
with self.assertRaisesRegex(
utils.ValidationError, 'Expected change to contain skill_difficulty'
):
suggestion.validate()
def test_validate_change_skill_difficulty_invalid_value(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change.skill_difficulty = 0.4
with self.assertRaisesRegex(
utils.ValidationError,
'Expected change skill_difficulty to be one of '
):
suggestion.validate()
def test_pre_accept_validate_change_skill_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
skill_id = skill_services.get_new_skill_id()
self.save_new_skill(skill_id, self.author_id, description='description')
suggestion.change.skill_id = skill_id
suggestion.pre_accept_validate()
suggestion.change.skill_id = None
with self.assertRaisesRegex(
utils.ValidationError, 'Expected change to contain skill_id'
):
suggestion.pre_accept_validate()
def test_pre_accept_validate_change_invalid_skill_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
skill_id = skill_services.get_new_skill_id()
self.save_new_skill(skill_id, self.author_id, description='description')
suggestion.change.skill_id = skill_id
suggestion.pre_accept_validate()
suggestion.change.skill_id = skill_services.get_new_skill_id()
with self.assertRaisesRegex(
utils.ValidationError, 'The skill with the given id doesn\'t exist.'
):
suggestion.pre_accept_validate()
def test_get_change_list_for_accepting_suggestion(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
self.assertIsNone(suggestion.get_change_list_for_accepting_suggestion())
def test_populate_old_value_of_change(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
self.assertIsNone(suggestion.populate_old_value_of_change())
def test_cannot_accept_suggestion_with_invalid_skill_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.change.skill_id = skill_services.get_new_skill_id()
with self.assertRaisesRegex(
utils.ValidationError,
'The skill with the given id doesn\'t exist.'
):
suggestion.accept('commit message')
def test_pre_update_validate_change_cmd(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
change = {
'cmd': question_domain.CMD_UPDATE_QUESTION_PROPERTY,
'property_name': question_domain.QUESTION_PROPERTY_LANGUAGE_CODE,
'new_value': 'bn',
'old_value': 'en'
}
with self.assertRaisesRegex(
utils.ValidationError,
'The new change cmd must be equal to '
'create_new_fully_specified_question'
):
suggestion.pre_update_validate(
question_domain.QuestionChange(change))
def test_pre_update_validate_change_skill_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
change = {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION)
},
'skill_id': 'skill_2'
}
with self.assertRaisesRegex(
utils.ValidationError,
'The new change skill_id must be equal to skill_1'
):
suggestion.pre_update_validate(
question_domain.QuestionChange(change))
def test_pre_update_validate_complains_if_nothing_changed(self):
change = {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION)
},
'skill_id': 'skill_1',
'skill_difficulty': 0.3
}
suggestion = suggestion_registry.SuggestionAddQuestion(
'exploration.exp1.thread1', 'exp1', 1,
suggestion_models.STATUS_ACCEPTED, self.author_id,
self.reviewer_id, change,
'question.topic_1', 'en', self.fake_date)
new_change = {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION)
},
'skill_id': 'skill_1',
'skill_difficulty': 0.3
}
with self.assertRaisesRegex(
utils.ValidationError,
'At least one of the new skill_difficulty or question_dict '
'should be changed.'):
suggestion.pre_update_validate(
question_domain.QuestionSuggestionChange(new_change))
def test_pre_update_validate_accepts_a_change_in_skill_difficulty_only(
self):
change = {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION)
},
'skill_id': 'skill_1',
'skill_difficulty': 0.3
}
suggestion = suggestion_registry.SuggestionAddQuestion(
'exploration.exp1.thread1', 'exp1', 1,
suggestion_models.STATUS_ACCEPTED, self.author_id,
self.reviewer_id, change,
'question.topic_1', 'en', self.fake_date)
new_change = {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION)
},
'skill_id': 'skill_1',
'skill_difficulty': 0.6
}
self.assertEqual(
suggestion.pre_update_validate(
question_domain.QuestionSuggestionChange(new_change)), None)
def test_pre_update_validate_accepts_a_change_in_state_data_only(self):
change = {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION)
},
'skill_id': 'skill_1',
'skill_difficulty': 0.3
}
suggestion = suggestion_registry.SuggestionAddQuestion(
'exploration.exp1.thread1', 'exp1', 1,
suggestion_models.STATUS_ACCEPTED, self.author_id,
self.reviewer_id, change,
'question.topic_1', 'en', self.fake_date)
new_change = {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'hi',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION)
},
'skill_id': 'skill_1',
'skill_difficulty': 0.3
}
self.assertEqual(
suggestion.pre_update_validate(
question_domain.QuestionSuggestionChange(new_change)), None)
def test_validate_author_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.author_id = 0
with self.assertRaisesRegex(
utils.ValidationError, 'Expected author_id to be a string'):
suggestion.validate()
def test_validate_author_id_format(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.author_id = ''
with self.assertRaisesRegex(
utils.ValidationError,
'Expected author_id to be in a valid user ID format.'):
suggestion.validate()
def test_validate_final_reviewer_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.final_reviewer_id = 1
with self.assertRaisesRegex(
utils.ValidationError, 'Expected final_reviewer_id to be a string'):
suggestion.validate()
def test_validate_final_reviewer_id_format(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.final_reviewer_id = ''
with self.assertRaisesRegex(
utils.ValidationError,
'Expected final_reviewer_id to be in a valid user ID format'):
suggestion.validate()
def test_validate_language_code_fails_when_language_codes_do_not_match(
self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
expected_question_dict = (
expected_suggestion_dict['change']['question_dict']
)
suggestion.validate()
expected_question_dict['language_code'] = 'wrong_language_code'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected question language_code.wrong_language_code. to be same '
'as suggestion language_code.en.'
):
suggestion.validate()
def test_validate_language_code_fails_when_language_code_is_set_to_none(
self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.language_code = None
with self.assertRaisesRegex(
utils.ValidationError,
'Expected language_code to be en, received None'):
suggestion.validate()
def test_get_all_html_conztent_strings(self):
suggestion = suggestion_registry.SuggestionAddQuestion(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id, self.suggestion_dict['change'],
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], self.fake_date)
actual_outcome_list = suggestion.get_all_html_content_strings()
expected_outcome_list = [
u'', u'<p>This is a hint.</p>', u'<p>This is a solution.</p>', u'']
self.assertEqual(expected_outcome_list, actual_outcome_list)
def test_convert_html_in_suggestion_change(self):
html_content = (
'<p>Value</p><oppia-noninteractive-math raw_latex-with-value="&a'
'mp;quot;+,-,-,+&quot;"></oppia-noninteractive-math>')
expected_html_content = (
'<p>Value</p><oppia-noninteractive-math math_content-with-value='
'"{&quot;raw_latex&quot;: &quot;+,-,-,+&quot;, &'
'amp;quot;svg_filename&quot;: &quot;&quot;}"></oppia'
'-noninteractive-math>')
answer_group = {
'outcome': {
'dest': None,
'feedback': {
'content_id': 'feedback_1',
'html': ''
},
'labelled_as_correct': True,
'param_changes': [],
'refresher_exploration_id': None,
'missing_prerequisite_skill_id': None
},
'rule_specs': [{
'inputs': {
'x': 0
},
'rule_type': 'Equals'
}],
'training_data': [],
'tagged_skill_misconception_id': None
}
question_state_dict = {
'content': {
'content_id': 'content_1',
'html': html_content
},
'recorded_voiceovers': {
'voiceovers_mapping': {
'content_1': {},
'feedback_1': {},
'feedback_2': {},
'hint_1': {},
'solution': {}
}
},
'written_translations': {
'translations_mapping': {
'content_1': {},
'feedback_1': {},
'feedback_2': {},
'hint_1': {},
'solution': {}
}
},
'interaction': {
'answer_groups': [answer_group],
'confirmed_unclassified_answers': [],
'customization_args': {
'choices': {
'value': [{
'html': 'option 1',
'content_id': 'ca_choices_0'
}]
},
'showChoicesInShuffledOrder': {
'value': True
}
},
'default_outcome': {
'dest': None,
'feedback': {
'content_id': 'feedback_2',
'html': 'Correct Answer'
},
'param_changes': [],
'refresher_exploration_id': None,
'labelled_as_correct': True,
'missing_prerequisite_skill_id': None
},
'hints': [{
'hint_content': {
'content_id': 'hint_1',
'html': 'Hint 1'
}
}],
'solution': {
'answer_is_exclusive': False,
'correct_answer': 0,
'explanation': {
'content_id': 'solution',
'html': '<p>This is a solution.</p>'
}
},
'id': 'MultipleChoiceInput'
},
'param_changes': [],
'solicit_answer_details': False,
'classifier_model_id': None
}
suggestion_dict = {
'suggestion_id': 'skill1.thread1',
'suggestion_type': feconf.SUGGESTION_TYPE_ADD_QUESTION,
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': 'skill1',
'target_version_at_submission': 1,
'status': suggestion_models.STATUS_ACCEPTED,
'author_name': 'author',
'final_reviewer_id': self.reviewer_id,
'change': {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': question_state_dict,
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': ['skill_1'],
'inapplicable_skill_misconception_ids': ['skillid12345-1']
},
'skill_id': 'skill_1',
'skill_difficulty': 0.3,
},
'score_category': 'question.skill1',
'language_code': 'en',
'last_updated': utils.get_time_in_millisecs(self.fake_date)
}
suggestion = suggestion_registry.SuggestionAddQuestion(
suggestion_dict['suggestion_id'], suggestion_dict['target_id'],
suggestion_dict['target_version_at_submission'],
suggestion_dict['status'], self.author_id, self.reviewer_id,
suggestion_dict['change'], suggestion_dict['score_category'],
suggestion_dict['language_code'], False, self.fake_date)
suggestion.convert_html_in_suggestion_change(
html_validation_service.add_math_content_to_math_rte_components)
self.assertEqual(
suggestion.change.question_dict['question_state_data']['content'][
'html'], expected_html_content)
def test_accept_suggestion_with_images(self):
html_content = (
'<p>Value</p><oppia-noninteractive-math math_content-with-value='
'"{&quot;raw_latex&quot;: &quot;+,-,-,+&quot;, &'
'amp;quot;svg_filename&quot;: &quot;img.svg&quot;}">'
'</oppia-noninteractive-math>')
question_state_dict = self._create_valid_question_data(
'default_state').to_dict()
question_state_dict['content']['html'] = html_content
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'test_svg.svg'),
'rb', encoding=None) as f:
raw_image = f.read()
image_context = feconf.IMAGE_CONTEXT_QUESTION_SUGGESTIONS
fs_services.save_original_and_compressed_versions_of_image(
'img.svg', image_context, 'skill1',
raw_image, 'image', False)
self.save_new_skill('skill1', self.author_id, description='description')
suggestion_dict = {
'suggestion_id': 'skill1.thread1',
'suggestion_type': feconf.SUGGESTION_TYPE_ADD_QUESTION,
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': 'skill1',
'target_version_at_submission': 1,
'status': suggestion_models.STATUS_ACCEPTED,
'author_name': 'author',
'final_reviewer_id': self.reviewer_id,
'change': {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': question_state_dict,
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': ['skill_1'],
'inapplicable_skill_misconception_ids': []
},
'skill_id': 'skill1',
'skill_difficulty': 0.3,
},
'score_category': 'question.skill1',
'language_code': 'en',
'last_updated': utils.get_time_in_millisecs(self.fake_date)
}
suggestion = suggestion_registry.SuggestionAddQuestion(
suggestion_dict['suggestion_id'], suggestion_dict['target_id'],
suggestion_dict['target_version_at_submission'],
suggestion_dict['status'], self.author_id, self.reviewer_id,
suggestion_dict['change'], suggestion_dict['score_category'],
suggestion_dict['language_code'], False, self.fake_date)
suggestion.accept('commit_message')
def test_accept_suggestion_with_image_region_interactions(self):
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb',
encoding=None) as f:
original_image_content = f.read()
fs_services.save_original_and_compressed_versions_of_image(
'image.png', 'question_suggestions', 'skill1',
original_image_content, 'image', True)
question_state_dict = {
'content': {
'html': '<p>Text</p>',
'content_id': 'content'
},
'classifier_model_id': None,
'linked_skill_id': None,
'interaction': {
'answer_groups': [
{
'rule_specs': [
{
'rule_type': 'IsInRegion',
'inputs': {'x': 'Region1'}
}
],
'outcome': {
'dest': None,
'feedback': {
'html': '<p>assas</p>',
'content_id': 'feedback_0'
},
'labelled_as_correct': True,
'param_changes': [],
'refresher_exploration_id': None,
'missing_prerequisite_skill_id': None
},
'training_data': [],
'tagged_skill_misconception_id': None
}
],
'confirmed_unclassified_answers': [],
'customization_args': {
'imageAndRegions': {
'value': {
'imagePath': 'image.png',
'labeledRegions': [
{
'label': 'Region1',
'region': {
'regionType': 'Rectangle',
'area': [
[
0.2644628099173554,
0.21807065217391305
],
[
0.9201101928374655,
0.8847373188405797
]
]
}
}
]
}
},
'highlightRegionsOnHover': {
'value': False
}
},
'default_outcome': {
'dest': None,
'feedback': {
'html': '<p>wer</p>',
'content_id': 'default_outcome'
},
'labelled_as_correct': False,
'param_changes': [],
'refresher_exploration_id': None,
'missing_prerequisite_skill_id': None
},
'hints': [
{
'hint_content': {
'html': '<p>assaas</p>',
'content_id': 'hint_1'
}
}
],
'id': 'ImageClickInput', 'solution': None
},
'param_changes': [],
'recorded_voiceovers': {
'voiceovers_mapping': {
'content': {},
'default_outcome': {},
'feedback_0': {},
'hint_1': {}
}
},
'solicit_answer_details': False,
'card_is_checkpoint': False,
'written_translations': {
'translations_mapping': {
'content': {},
'default_outcome': {},
'feedback_0': {},
'hint_1': {}
}
},
'next_content_id_index': 2
}
suggestion_dict = {
'suggestion_id': 'skill1.thread1',
'suggestion_type': feconf.SUGGESTION_TYPE_ADD_QUESTION,
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': 'skill1',
'target_version_at_submission': 1,
'status': suggestion_models.STATUS_ACCEPTED,
'author_name': 'author',
'final_reviewer_id': self.reviewer_id,
'change': {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': question_state_dict,
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': ['skill1'],
'inapplicable_skill_misconception_ids': []
},
'skill_id': 'skill1',
'skill_difficulty': 0.3,
},
'score_category': 'question.skill1',
'language_code': 'en',
'last_updated': utils.get_time_in_millisecs(self.fake_date)
}
self.save_new_skill(
'skill1', self.author_id, description='description')
suggestion = suggestion_registry.SuggestionAddQuestion(
suggestion_dict['suggestion_id'], suggestion_dict['target_id'],
suggestion_dict['target_version_at_submission'],
suggestion_dict['status'], self.author_id, self.reviewer_id,
suggestion_dict['change'], suggestion_dict['score_category'],
suggestion_dict['language_code'], False, self.fake_date)
suggestion.accept('commit_message')
question = question_services.get_questions_by_skill_ids(
1, ['skill1'], False)[0]
destination_fs = fs_domain.AbstractFileSystem(
fs_domain.GcsFileSystem(
feconf.ENTITY_TYPE_QUESTION, question.id))
self.assertTrue(destination_fs.isfile('image/%s' % 'image.png'))
self.assertEqual(
suggestion.status,
suggestion_models.STATUS_ACCEPTED)
def test_contructor_updates_state_shema_in_change_cmd(self):
score_category = (
suggestion_models.SCORE_TYPE_QUESTION +
suggestion_models.SCORE_CATEGORY_DELIMITER + 'skill_id')
change = {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': {
'question_state_data': self.VERSION_27_STATE_DICT,
'question_state_data_schema_version': 27,
'language_code': 'en',
'linked_skill_ids': ['skill_id'],
'inapplicable_skill_misconception_ids': []
},
'skill_id': 'skill_id',
'skill_difficulty': 0.3
}
self.assertEqual(
change['question_dict']['question_state_data_schema_version'], 27)
suggestion = suggestion_registry.SuggestionAddQuestion(
'suggestionId', 'target_id', 1, suggestion_models.STATUS_IN_REVIEW,
self.author_id, None, change, score_category, 'en', False,
self.fake_date)
self.assertEqual(
suggestion.change.question_dict[
'question_state_data_schema_version'],
feconf.CURRENT_STATE_SCHEMA_VERSION)
def test_contructor_raise_exception_for_invalid_state_shema_version(self):
score_category = (
suggestion_models.SCORE_TYPE_QUESTION +
suggestion_models.SCORE_CATEGORY_DELIMITER + 'skill_id')
change = {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': {
'question_state_data': self.VERSION_27_STATE_DICT,
'question_state_data_schema_version': 23,
'language_code': 'en',
'linked_skill_ids': ['skill_id'],
'inapplicable_skill_misconception_ids': []
},
'skill_id': 'skill_id',
'skill_difficulty': 0.3
}
self.assertEqual(
change['question_dict']['question_state_data_schema_version'], 23)
with self.assertRaisesRegex(
utils.ValidationError,
'Expected state schema version to be in between 25'
):
suggestion_registry.SuggestionAddQuestion(
'suggestionId', 'target_id', 1,
suggestion_models.STATUS_IN_REVIEW, self.author_id, None,
change, score_category, 'en', False, self.fake_date)
class MockInvalidVoiceoverApplication(
suggestion_registry.BaseVoiceoverApplication):
def __init__(self): # pylint: disable=super-init-not-called
pass
class BaseVoiceoverApplicationUnitTests(test_utils.GenericTestBase):
"""Tests for the BaseVoiceoverApplication class."""
def setUp(self):
super(BaseVoiceoverApplicationUnitTests, self).setUp()
self.base_voiceover_application = MockInvalidVoiceoverApplication()
def test_base_class_init_raises_error(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseVoiceoverApplication should implement '
'__init__.'):
suggestion_registry.BaseVoiceoverApplication()
def test_base_class_accept_raises_error(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseVoiceoverApplication should implement accept.'):
self.base_voiceover_application.accept()
def test_base_class_reject_raises_error(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseVoiceoverApplication should implement reject.'):
self.base_voiceover_application.reject()
class ExplorationVoiceoverApplicationUnitTest(test_utils.GenericTestBase):
"""Tests for the ExplorationVoiceoverApplication class."""
def setUp(self):
super(ExplorationVoiceoverApplicationUnitTest, self).setUp()
self.signup('[email protected]', 'author')
self.author_id = self.get_user_id_from_email('[email protected]')
self.signup('[email protected]', 'reviewer')
self.reviewer_id = self.get_user_id_from_email('[email protected]')
self.voiceover_application = (
suggestion_registry.ExplorationVoiceoverApplication(
'application_id', 'exp_id', suggestion_models.STATUS_IN_REVIEW,
self.author_id, None, 'en', 'audio_file.mp3', '<p>Content</p>',
None))
def test_validation_with_invalid_target_type_raise_exception(self):
self.voiceover_application.validate()
self.voiceover_application.target_type = 'invalid_target'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected target_type to be among allowed choices, '
'received invalid_target'
):
self.voiceover_application.validate()
def test_validation_with_invalid_target_id_raise_exception(self):
self.voiceover_application.validate()
self.voiceover_application.target_id = 123
with self.assertRaisesRegex(
utils.ValidationError, 'Expected target_id to be a string'
):
self.voiceover_application.validate()
def test_validation_with_invalid_status_raise_exception(self):
self.voiceover_application.validate()
self.voiceover_application.status = 'invalid_status'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected status to be among allowed choices, '
'received invalid_status'
):
self.voiceover_application.validate()
def test_validation_with_invalid_author_id_raise_exception(self):
self.voiceover_application.validate()
self.voiceover_application.author_id = 123
with self.assertRaisesRegex(
utils.ValidationError, 'Expected author_id to be a string'
):
self.voiceover_application.validate()
def test_validation_with_invalid_final_reviewer_id_raise_exception(self):
self.assertEqual(
self.voiceover_application.status,
suggestion_models.STATUS_IN_REVIEW)
self.assertEqual(self.voiceover_application.final_reviewer_id, None)
self.voiceover_application.validate()
self.voiceover_application.final_reviewer_id = 123
with self.assertRaisesRegex(
utils.ValidationError,
'Expected final_reviewer_id to be None as the '
'voiceover application is not yet handled.'
):
self.voiceover_application.validate()
def test_validation_for_handled_application_with_invalid_final_review(self):
self.assertEqual(
self.voiceover_application.status,
suggestion_models.STATUS_IN_REVIEW)
self.assertEqual(self.voiceover_application.final_reviewer_id, None)
self.voiceover_application.validate()
self.voiceover_application.status = suggestion_models.STATUS_ACCEPTED
with self.assertRaisesRegex(
utils.ValidationError, 'Expected final_reviewer_id to be a string'
):
self.voiceover_application.validate()
def test_validation_for_rejected_application_with_no_message(self):
self.assertEqual(
self.voiceover_application.status,
suggestion_models.STATUS_IN_REVIEW)
self.assertEqual(self.voiceover_application.rejection_message, None)
self.voiceover_application.validate()
self.voiceover_application.final_reviewer_id = 'reviewer_id'
self.voiceover_application.status = suggestion_models.STATUS_REJECTED
with self.assertRaisesRegex(
utils.ValidationError,
'Expected rejection_message to be a string for a '
'rejected application'
):
self.voiceover_application.validate()
def test_validation_for_accepted_application_with_message(self):
self.assertEqual(
self.voiceover_application.status,
suggestion_models.STATUS_IN_REVIEW)
self.assertEqual(self.voiceover_application.rejection_message, None)
self.voiceover_application.validate()
self.voiceover_application.final_reviewer_id = 'reviewer_id'
self.voiceover_application.status = suggestion_models.STATUS_ACCEPTED
self.voiceover_application.rejection_message = 'Invalid message'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected rejection_message to be None for the accepted '
'voiceover application, received Invalid message'
):
self.voiceover_application.validate()
def test_validation_with_invalid_language_code_type_raise_exception(self):
self.assertEqual(self.voiceover_application.language_code, 'en')
self.voiceover_application.validate()
self.voiceover_application.language_code = 1
with self.assertRaisesRegex(
utils.ValidationError, 'Expected language_code to be a string'
):
self.voiceover_application.validate()
def test_validation_with_invalid_language_code_raise_exception(self):
self.assertEqual(self.voiceover_application.language_code, 'en')
self.voiceover_application.validate()
self.voiceover_application.language_code = 'invalid language'
with self.assertRaisesRegex(
utils.ValidationError, 'Invalid language_code: invalid language'
):
self.voiceover_application.validate()
def test_validation_with_invalid_filename_type_raise_exception(self):
self.assertEqual(self.voiceover_application.filename, 'audio_file.mp3')
self.voiceover_application.validate()
self.voiceover_application.filename = 1
with self.assertRaisesRegex(
utils.ValidationError, 'Expected filename to be a string'
):
self.voiceover_application.validate()
def test_validation_with_invalid_content_type_raise_exception(self):
self.assertEqual(self.voiceover_application.content, '<p>Content</p>')
self.voiceover_application.validate()
self.voiceover_application.content = 1
with self.assertRaisesRegex(
utils.ValidationError, 'Expected content to be a string'
):
self.voiceover_application.validate()
def test_to_dict_returns_correct_dict(self):
self.voiceover_application.accept(self.reviewer_id)
expected_dict = {
'voiceover_application_id': 'application_id',
'target_type': 'exploration',
'target_id': 'exp_id',
'status': 'accepted',
'author_name': 'author',
'final_reviewer_name': 'reviewer',
'language_code': 'en',
'content': '<p>Content</p>',
'filename': 'audio_file.mp3',
'rejection_message': None
}
self.assertEqual(
self.voiceover_application.to_dict(), expected_dict)
def test_is_handled_property_returns_correct_value(self):
self.assertFalse(self.voiceover_application.is_handled)
self.voiceover_application.accept(self.reviewer_id)
self.assertTrue(self.voiceover_application.is_handled)
def test_accept_voiceover_application(self):
self.assertEqual(self.voiceover_application.final_reviewer_id, None)
self.assertEqual(self.voiceover_application.status, 'review')
self.voiceover_application.accept(self.reviewer_id)
self.assertEqual(
self.voiceover_application.final_reviewer_id, self.reviewer_id)
self.assertEqual(self.voiceover_application.status, 'accepted')
def test_reject_voiceover_application(self):
self.assertEqual(self.voiceover_application.final_reviewer_id, None)
self.assertEqual(self.voiceover_application.status, 'review')
self.voiceover_application.reject(self.reviewer_id, 'rejection message')
self.assertEqual(
self.voiceover_application.final_reviewer_id, self.reviewer_id)
self.assertEqual(self.voiceover_application.status, 'rejected')
self.assertEqual(
self.voiceover_application.rejection_message, 'rejection message')
class CommunityContributionStatsUnitTests(test_utils.GenericTestBase):
"""Tests for the CommunityContributionStats class."""
translation_reviewer_counts_by_lang_code = {
'hi': 0,
'en': 1
}
translation_suggestion_counts_by_lang_code = {
'fr': 6,
'en': 5
}
question_reviewer_count = 1
question_suggestion_count = 4
negative_count = -1
non_integer_count = 'non_integer_count'
sample_language_code = 'en'
invalid_language_code = 'invalid'
def _assert_community_contribution_stats_is_in_default_state(self):
"""Checks if the community contribution stats is in its default
state.
"""
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
self.assertEqual(
(
community_contribution_stats
.translation_reviewer_counts_by_lang_code
), {})
self.assertEqual(
(
community_contribution_stats
.translation_suggestion_counts_by_lang_code
), {})
self.assertEqual(
community_contribution_stats.question_reviewer_count, 0)
self.assertEqual(
community_contribution_stats.question_suggestion_count, 0)
def test_initial_object_with_valid_arguments_has_correct_properties(self):
community_contribution_stats = (
suggestion_registry.CommunityContributionStats(
self.translation_reviewer_counts_by_lang_code,
self.translation_suggestion_counts_by_lang_code,
self.question_reviewer_count,
self.question_suggestion_count
)
)
community_contribution_stats.validate()
self.assertEqual(
(
community_contribution_stats
.translation_reviewer_counts_by_lang_code
),
self.translation_reviewer_counts_by_lang_code)
self.assertEqual(
(
community_contribution_stats
.translation_suggestion_counts_by_lang_code
),
self.translation_suggestion_counts_by_lang_code
)
self.assertEqual(
community_contribution_stats.question_reviewer_count,
self.question_reviewer_count
)
self.assertEqual(
community_contribution_stats.question_suggestion_count,
self.question_suggestion_count
)
def test_set_translation_reviewer_count_for_lang_code_updates_empty_dict(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
self._assert_community_contribution_stats_is_in_default_state()
(
community_contribution_stats
.set_translation_reviewer_count_for_language_code(
self.sample_language_code, 2)
)
self.assertDictEqual(
(
community_contribution_stats
.translation_reviewer_counts_by_lang_code
),
{self.sample_language_code: 2}
)
def test_set_translation_reviewer_count_for_lang_code_updates_count_value(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
self._assert_community_contribution_stats_is_in_default_state()
(
community_contribution_stats
.translation_reviewer_counts_by_lang_code
) = {self.sample_language_code: 1}
(
community_contribution_stats
.set_translation_reviewer_count_for_language_code(
self.sample_language_code, 2)
)
self.assertDictEqual(
(
community_contribution_stats
.translation_reviewer_counts_by_lang_code
),
{self.sample_language_code: 2}
)
def test_set_translation_reviewer_count_for_lang_code_adds_new_lang_key(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
self._assert_community_contribution_stats_is_in_default_state()
(
community_contribution_stats
.translation_reviewer_counts_by_lang_code
) = {'en': 1}
(
community_contribution_stats
.set_translation_reviewer_count_for_language_code('hi', 2)
)
self.assertDictEqual(
(
community_contribution_stats
.translation_reviewer_counts_by_lang_code
),
{'en': 1, 'hi': 2}
)
def test_set_translation_suggestion_count_for_lang_code_updates_empty_dict(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
self._assert_community_contribution_stats_is_in_default_state()
(
community_contribution_stats
.set_translation_suggestion_count_for_language_code(
self.sample_language_code, 2)
)
self.assertDictEqual(
(
community_contribution_stats
.translation_suggestion_counts_by_lang_code
), {self.sample_language_code: 2}
)
def test_set_translation_suggestion_count_for_lang_code_updates_count_value(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
self._assert_community_contribution_stats_is_in_default_state()
(
community_contribution_stats
.translation_suggestion_counts_by_lang_code
) = {self.sample_language_code: 1}
(
community_contribution_stats
.set_translation_suggestion_count_for_language_code(
self.sample_language_code, 2)
)
self.assertDictEqual(
(
community_contribution_stats
.translation_suggestion_counts_by_lang_code
),
{self.sample_language_code: 2}
)
def test_set_translation_suggestion_count_for_lang_code_adds_new_lang_key(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
self._assert_community_contribution_stats_is_in_default_state()
(
community_contribution_stats
.translation_suggestion_counts_by_lang_code
) = {'en': 1}
(
community_contribution_stats
.set_translation_suggestion_count_for_language_code('hi', 2)
)
self.assertDictEqual(
(
community_contribution_stats
.translation_suggestion_counts_by_lang_code
),
{'en': 1, 'hi': 2}
)
def test_get_translation_language_codes_that_need_reviewers_for_one_lang(
self):
stats = suggestion_services.get_community_contribution_stats()
stats.set_translation_suggestion_count_for_language_code(
self.sample_language_code, 1)
language_codes_that_need_reviewers = (
stats.get_translation_language_codes_that_need_reviewers()
)
self.assertEqual(
language_codes_that_need_reviewers, {self.sample_language_code})
def test_get_translation_language_codes_that_need_reviewers_for_multi_lang(
self):
stats = suggestion_services.get_community_contribution_stats()
stats.set_translation_suggestion_count_for_language_code('hi', 1)
stats.set_translation_suggestion_count_for_language_code('fr', 1)
language_codes_that_need_reviewers = (
stats.get_translation_language_codes_that_need_reviewers()
)
self.assertEqual(
language_codes_that_need_reviewers, {'hi', 'fr'})
def test_get_translation_language_codes_that_need_reviewers_for_no_lang(
self):
stats = suggestion_services.get_community_contribution_stats()
language_codes_that_need_reviewers = (
stats.get_translation_language_codes_that_need_reviewers()
)
self.assertEqual(
language_codes_that_need_reviewers, set())
def test_translation_reviewers_are_needed_if_suggestions_but_no_reviewers(
self):
stats = suggestion_services.get_community_contribution_stats()
stats.set_translation_suggestion_count_for_language_code(
self.sample_language_code, 1)
self.assertTrue(
stats.are_translation_reviewers_needed_for_lang_code(
self.sample_language_code))
def test_translation_reviewers_are_needed_if_num_suggestions_past_max(self):
stats = suggestion_services.get_community_contribution_stats()
stats.set_translation_suggestion_count_for_language_code(
self.sample_language_code, 2)
stats.set_translation_reviewer_count_for_language_code(
self.sample_language_code, 1)
config_services.set_property(
'committer_id', 'max_number_of_suggestions_per_reviewer', 1)
reviewers_are_needed = (
stats.are_translation_reviewers_needed_for_lang_code(
self.sample_language_code))
self.assertTrue(reviewers_are_needed)
def test_translation_reviewers_not_needed_if_num_suggestions_eqs_max(self):
stats = suggestion_services.get_community_contribution_stats()
stats.set_translation_suggestion_count_for_language_code(
self.sample_language_code, 2)
stats.set_translation_reviewer_count_for_language_code(
self.sample_language_code, 2)
config_services.set_property(
'committer_id', 'max_number_of_suggestions_per_reviewer', 1)
reviewers_are_needed = (
stats.are_translation_reviewers_needed_for_lang_code(
self.sample_language_code))
self.assertFalse(reviewers_are_needed)
def test_translation_reviewers_not_needed_if_num_suggestions_less_max(self):
stats = suggestion_services.get_community_contribution_stats()
stats.set_translation_suggestion_count_for_language_code(
self.sample_language_code, 1)
stats.set_translation_reviewer_count_for_language_code(
self.sample_language_code, 2)
config_services.set_property(
'committer_id', 'max_number_of_suggestions_per_reviewer', 1)
reviewers_are_needed = (
stats.are_translation_reviewers_needed_for_lang_code(
self.sample_language_code))
self.assertFalse(reviewers_are_needed)
def test_translation_reviewers_not_needed_if_reviewers_and_no_sugestions(
self):
stats = suggestion_services.get_community_contribution_stats()
stats.set_translation_reviewer_count_for_language_code(
self.sample_language_code, 1)
self.assertFalse(
stats.are_translation_reviewers_needed_for_lang_code(
self.sample_language_code))
def test_translation_reviewers_not_needed_if_no_reviewers_no_sugestions(
self):
stats = suggestion_services.get_community_contribution_stats()
self._assert_community_contribution_stats_is_in_default_state()
self.assertFalse(
stats.are_translation_reviewers_needed_for_lang_code(
self.sample_language_code))
def test_question_reviewers_are_needed_if_suggestions_zero_reviewers(
self):
stats = suggestion_services.get_community_contribution_stats()
stats.question_suggestion_count = 1
self.assertTrue(stats.are_question_reviewers_needed())
def test_question_reviewers_are_needed_if_num_suggestions_past_max(self):
stats = suggestion_services.get_community_contribution_stats()
stats.question_suggestion_count = 2
stats.question_reviewer_count = 1
config_services.set_property(
'committer_id', 'max_number_of_suggestions_per_reviewer', 1)
reviewers_are_needed = stats.are_question_reviewers_needed()
self.assertTrue(reviewers_are_needed)
def test_question_reviewers_not_needed_if_num_suggestions_eqs_max(self):
stats = suggestion_services.get_community_contribution_stats()
stats.question_suggestion_count = 2
stats.question_reviewer_count = 2
config_services.set_property(
'committer_id', 'max_number_of_suggestions_per_reviewer', 1)
reviewers_are_needed = stats.are_question_reviewers_needed()
self.assertFalse(reviewers_are_needed)
def test_question_reviewers_not_needed_if_num_suggestions_less_max(self):
stats = suggestion_services.get_community_contribution_stats()
stats.question_suggestion_count = 1
stats.question_reviewer_count = 2
config_services.set_property(
'committer_id', 'max_number_of_suggestions_per_reviewer', 1)
reviewers_are_needed = stats.are_question_reviewers_needed()
self.assertFalse(reviewers_are_needed)
def test_question_reviewers_not_needed_if_no_reviewers_no_sugestions(
self):
stats = suggestion_services.get_community_contribution_stats()
self._assert_community_contribution_stats_is_in_default_state()
self.assertFalse(stats.are_question_reviewers_needed())
def test_validate_translation_reviewer_counts_fails_for_negative_counts(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
(
community_contribution_stats
.set_translation_reviewer_count_for_language_code(
self.sample_language_code, self.negative_count)
)
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the translation reviewer count to be non-negative for '
'%s language code, received: %s.' % (
self.sample_language_code, self.negative_count)
):
community_contribution_stats.validate()
def test_validate_translation_suggestion_counts_fails_for_negative_counts(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
(
community_contribution_stats
.set_translation_suggestion_count_for_language_code(
self.sample_language_code, self.negative_count)
)
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the translation suggestion count to be non-negative for '
'%s language code, received: %s.' % (
self.sample_language_code, self.negative_count)
):
community_contribution_stats.validate()
def test_validate_question_reviewer_count_fails_for_negative_count(self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
community_contribution_stats.question_reviewer_count = (
self.negative_count
)
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the question reviewer count to be non-negative, '
'received: %s.' % (
community_contribution_stats.question_reviewer_count)
):
community_contribution_stats.validate()
def test_validate_question_suggestion_count_fails_for_negative_count(self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
community_contribution_stats.question_suggestion_count = (
self.negative_count
)
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the question suggestion count to be non-negative, '
'received: %s.' % (
community_contribution_stats.question_suggestion_count)
):
community_contribution_stats.validate()
def test_validate_translation_reviewer_counts_fails_for_non_integer_counts(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
(
community_contribution_stats
.set_translation_reviewer_count_for_language_code(
self.sample_language_code, self.non_integer_count)
)
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the translation reviewer count to be an integer for '
'%s language code, received: %s.' % (
self.sample_language_code, self.non_integer_count)
):
community_contribution_stats.validate()
def test_validate_translation_suggestion_counts_fails_for_non_integer_count(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
(
community_contribution_stats
.set_translation_suggestion_count_for_language_code(
self.sample_language_code, self.non_integer_count)
)
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the translation suggestion count to be an integer for '
'%s language code, received: %s.' % (
self.sample_language_code, self.non_integer_count)
):
community_contribution_stats.validate()
def test_validate_question_reviewer_count_fails_for_non_integer_count(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
community_contribution_stats.question_reviewer_count = (
self.non_integer_count
)
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the question reviewer count to be an integer, '
'received: %s.' % (
community_contribution_stats.question_reviewer_count)
):
community_contribution_stats.validate()
def test_validate_question_suggestion_count_fails_for_non_integer_count(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
community_contribution_stats.question_suggestion_count = (
self.non_integer_count
)
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the question suggestion count to be an integer, '
'received: %s.' % (
community_contribution_stats.question_suggestion_count)
):
community_contribution_stats.validate()
def test_validate_translation_reviewer_counts_fails_for_invalid_lang_code(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
(
community_contribution_stats
.set_translation_reviewer_count_for_language_code(
self.invalid_language_code, 1)
)
with self.assertRaisesRegex(
utils.ValidationError,
'Invalid language code for the translation reviewer counts: '
'%s.' % self.invalid_language_code
):
community_contribution_stats.validate()
def test_validate_translation_suggestion_counts_fails_for_invalid_lang_code(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
(
community_contribution_stats
.set_translation_suggestion_count_for_language_code(
self.invalid_language_code, 1)
)
with self.assertRaisesRegex(
utils.ValidationError,
'Invalid language code for the translation suggestion counts: '
'%s.' % self.invalid_language_code
):
community_contribution_stats.validate()
class ReviewableSuggestionEmailInfoUnitTests(test_utils.GenericTestBase):
"""Tests for the ReviewableSuggestionEmailInfo class."""
suggestion_type = feconf.SUGGESTION_TYPE_ADD_QUESTION
language_code = 'en'
suggestion_content = 'sample question'
submission_datetime = datetime.datetime.utcnow()
def test_initial_object_with_valid_arguments_has_correct_properties(self):
reviewable_suggestion_email_info = (
suggestion_registry.ReviewableSuggestionEmailInfo(
self.suggestion_type, self.language_code,
self.suggestion_content, self.submission_datetime
)
)
self.assertEqual(
reviewable_suggestion_email_info.suggestion_type,
self.suggestion_type)
self.assertEqual(
reviewable_suggestion_email_info.language_code,
self.language_code)
self.assertEqual(
reviewable_suggestion_email_info.suggestion_content,
self.suggestion_content)
self.assertEqual(
reviewable_suggestion_email_info.submission_datetime,
self.submission_datetime)<|fim▁end|> | 'translation_html': '<p>This is the updated translated html.</p>', |
<|file_name|>set_version.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
import os
import sys
import glob
version = (int(sys.argv[1]), int(sys.argv[2]), int(sys.argv[3]), int(sys.argv[4]))
def substitute_file(name):
subst = ''
f = open(name)
for l in f:
if '#define LIBTORRENT_VERSION_MAJOR' in l and name.endswith('.hpp'):
l = '#define LIBTORRENT_VERSION_MAJOR %d\n' % version[0]
elif '#define LIBTORRENT_VERSION_MINOR' in l and name.endswith('.hpp'):
l = '#define LIBTORRENT_VERSION_MINOR %d\n' % version[1]
elif '#define LIBTORRENT_VERSION_TINY' in l and name.endswith('.hpp'):
l = '#define LIBTORRENT_VERSION_TINY %d\n' % version[2]
elif '#define LIBTORRENT_VERSION ' in l and name.endswith('.hpp'):
l = '#define LIBTORRENT_VERSION "%d.%d.%d.%d"\n' % (version[0], version[1], version[2], version[3])
elif 'AC_INIT([libtorrent-rasterbar]' in l and name.endswith('.ac'):
l = 'AC_INIT([libtorrent-rasterbar],[%d.%d.%d],[[email protected]],\n' % (version[0], version[1], version[2])
elif 'set (VERSION ' in l and name.endswith('.txt'):
l = 'set (VERSION "%d.%d.%d")\n' % (version[0], version[1], version[2])
elif ':Version: ' in l and (name.endswith('.rst') or name.endswith('.py')):
l = ':Version: %d.%d.%d\n' % (version[0], version[1], version[2])
elif 'VERSION = ' in l and name.endswith('Jamfile'):
l = 'VERSION = %d.%d.%d ;\n' % (version[0], version[1], version[2])
elif 'version=' in l and name.endswith('setup.py'):
l = "\tversion = '%d.%d.%d',\n" % (version[0], version[1], version[2])<|fim▁hole|> elif "version = '" in l and name.endswith('setup.py'):
l = "\tversion = '%d.%d.%d',\n" % (version[0], version[1], version[2])
subst += l
f.close()
open(name, 'w+').write(subst)
substitute_file('include/libtorrent/version.hpp')
substitute_file('CMakeLists.txt')
substitute_file('configure.ac')
substitute_file('bindings/python/setup.py')
substitute_file('docs/gen_reference_doc.py')
for i in glob.glob('docs/*.rst'):
substitute_file(i)
substitute_file('Jamfile')<|fim▁end|> | |
<|file_name|>parser.py<|end_file_name|><|fim▁begin|>from pymuco.midiio import *
from pymuco.midi import representation
class MidiParser(MidiOutStream.MidiOutStream):
"""
This class listens to a select few midi events relevant for a simple midifile containing a pianomelody
"""
def __init__(self, midifile):
self.midifile = midifile
self.notes_on = {}
self.ctrack = -1
self.mformat = -1
pass
# Event Listeners
def channel_message(self, message_type, channel, data):
pass
def note_on(self, channel=0, pitch=0x40, onvel=0x40):
if not str(channel) in self.midifile[str(self.ctrack)].channels:
self.midifile[str(self.ctrack)].channels[str(channel)] = 1
program = self.midifile[str(self.ctrack)].channels[str(channel)]
note = representation.Note(self.abs_time(), self.abs_time(), pitch, onvel, channel=channel, program=program)
if self.ctrack == -1:
print 'Midiparser: no track currently active.'
return
self.midifile[str(self.ctrack)].notes.append(note)
if not (pitch, channel) in self.notes_on:
self.notes_on[pitch, channel] = note
def note_off(self, channel=0, pitch=0x40, offvel=0x40):
if (pitch, channel) not in self.notes_on:
# print 'Midiparser: Note off before note on?'
return
note = self.notes_on[pitch, channel]
note.off = self.abs_time()
note.offvel = offvel
#self.midifile[str(self.ctrack)].insert(Note(on, self.abs_time(), pitch, onvel, offvel))
del self.notes_on[pitch, channel]
def header(self, format=0, nTracks=1, division=96):
self.midifile.division = division
self.mformat = format
self.midifile.format = format
def sequence_name(self, text):
self.midifile[str(self.ctrack)].name = text
def tempo(self, value):
self.midifile.tempo = value
def smtp_offset(self, hour, minute, second, frame, framePart):
self.midifile.smtp_offset = (hour, minute, second, frame, framePart)
def time_signature(self, nn, dd, cc, bb):
self.midifile.time_signature = (nn, dd, cc, bb)
def key_signature(self, sf, mi):
self.midifile.key_signature = (sf, mi)
def program_name(self, data):pass
def sequencer_specific(self, data):pass
def aftertouch(self, channel=0, note=0x40, velocity=0x40):pass
def continuous_controller(self, channel, controller, value):pass
def patch_change(self, channel, patch):
self.midifile[str(self.ctrack)].channels[str(channel)] = patch
def channel_pressure(self, channel, pressure):pass
def pitch_bend(self, channel, value):pass
def system_exclusive(self, data):pass
def song_position_pointer(self, value):pass
def song_select(self, songNumber):pass
def tuning_request(self):pass
def midi_time_code(self, msg_type, values):pass
def eof(self):pass
def start_of_track(self, n_track=0):
self.midifile[str(n_track)] = representation.Track(self.midifile, n_track)<|fim▁hole|> self.ctrack=-1
def sysex_event(self, data):pass
def meta_event(self, meta_type, data):pass
def sequence_number(self, value):pass
def text(self, text):pass
def copyright(self, text):pass
def instrument_name(self, text):pass
def lyric(self, text):pass
def marker(self, text):pass
def cuepoint(self, text):pass
def midi_ch_prefix(self, channel):pass
def midi_port(self, value):pass<|fim▁end|> | self.ctrack = n_track
def end_of_track(self): |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict'<|fim▁hole|>exports.Schemas = require('./schemas')
exports.Validator = require('./validator')<|fim▁end|> |
exports.Utils = require('./utils') |
<|file_name|>test_index_drawing.cpp<|end_file_name|><|fim▁begin|>// ==========================================================================
// SeqAn - The Library for Sequence Analysis
// ==========================================================================
// Copyright (c) 2006-2015, Knut Reinert, FU Berlin
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of Knut Reinert or the FU Berlin nor the names of
// its contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL KNUT REINERT OR THE FU BERLIN BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
// OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
// DAMAGE.
//
// ==========================================================================
#include <iostream>
#include <sstream>
#include <seqan/index.h>
#include <seqan/stream.h>
SEQAN_DEFINE_TEST(test_index_drawing_esa_dot)
{
seqan::CharString myString = "banana";
seqan::Index<seqan::CharString> stree(myString);
std::stringstream sstream;
writeRecords(sstream, stree, seqan::DotDrawing());
std::stringstream expected;
expected << "digraph G {\n"
<< "\n"
<< "/* Graph Attributes */\n"
<< "graph [rankdir = LR];\n"
<< "\n"<|fim▁hole|> << "/* Node Attributes */\n"
<< "node [shape = ellipse, fillcolor = lightgrey, style = filled, fontname = \"Times-Italic\"];\n"
<< "\n"
<< "/* Edge Attributes */\n"
<< "edge [fontname = \"Times-Italic\", arrowsize = 0.75, fontsize = 16];\n"
<< "\n"
<< "/* Edges */\n"
<< "\"[0:6)\" [style = dashed];\n"
<< "\"[0:3)\";\n"
<< "\"[0:6)\" -> \"[0:3)\" [label = \"a\"];\n"
<< "\"[1:3)\";\n"
<< "\"[0:3)\" -> \"[1:3)\" [label = \"na\"];\n"
<< "\"[2:3)\";\n"
<< "\"[1:3)\" -> \"[2:3)\" [label = \"na\"];\n"
<< "\"[3:4)\";\n"
<< "\"[0:6)\" -> \"[3:4)\" [label = \"banana\"];\n"
<< "\"[4:6)\";\n"
<< "\"[0:6)\" -> \"[4:6)\" [label = \"na\"];\n"
<< "\"[5:6)\";\n"
<< "\"[4:6)\" -> \"[5:6)\" [label = \"na\"];\n"
<< "\n"
<< "}\n";
SEQAN_ASSERT_EQ(expected.str(), sstream.str());
}
SEQAN_BEGIN_TESTSUITE(test_index_drawing)
{
SEQAN_CALL_TEST(test_index_drawing_esa_dot);
}
SEQAN_END_TESTSUITE<|fim▁end|> | |
<|file_name|>install_plugin_command_test.go<|end_file_name|><|fim▁begin|>package plugin
import (
"bytes"
"fmt"
"io/ioutil"
"log"
"net/http"
"os"
"path/filepath"
"code.cloudfoundry.org/cli/integration/helpers"
"code.cloudfoundry.org/cli/util/generic"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
. "github.com/onsi/gomega/gbytes"
. "github.com/onsi/gomega/gexec"
. "github.com/onsi/gomega/ghttp"
)
var _ = Describe("install-plugin command", func() {
var buffer *Buffer
BeforeEach(func() {
helpers.RunIfExperimental("experimental until all install-plugin refactor stories are finished")
})
AfterEach(func() {
pluginsHomeDirContents, err := ioutil.ReadDir(filepath.Join(homeDir, ".cf", "plugins"))
if os.IsNotExist(err) {
return
}
Expect(err).ToNot(HaveOccurred())
for _, entry := range pluginsHomeDirContents {
Expect(entry.Name()).NotTo(ContainSubstring("temp"))
}
})
Describe("help", func() {
Context("when the --help flag is given", func() {
It("displays command usage to stdout", func() {
session := helpers.CF("install-plugin", "--help")
Eventually(session.Out).Should(Say("NAME:"))
Eventually(session.Out).Should(Say("install-plugin - Install CLI plugin"))
Eventually(session.Out).Should(Say("USAGE:"))
Eventually(session.Out).Should(Say("cf install-plugin \\(LOCAL-PATH/TO/PLUGIN | URL | -r REPO_NAME PLUGIN_NAME\\) \\[-f\\]"))
Eventually(session.Out).Should(Say("EXAMPLES:"))
Eventually(session.Out).Should(Say("cf install-plugin ~/Downloads/plugin-foobar"))
Eventually(session.Out).Should(Say("cf install-plugin https://example.com/plugin-foobar_linux_amd64"))
Eventually(session.Out).Should(Say("cf install-plugin -r My-Repo plugin-echo"))
Eventually(session.Out).Should(Say("OPTIONS:"))
Eventually(session.Out).Should(Say("-f Force install of plugin without confirmation"))
Eventually(session.Out).Should(Say("-r Name of a registered repository where the specified plugin is located"))
Eventually(session.Out).Should(Say("SEE ALSO:"))
Eventually(session.Out).Should(Say("add-plugin-repo, list-plugin-repos, plugins"))
Eventually(session).Should(Exit(0))
})
})
})
Context("when the user does not provide a plugin name or location", func() {
It("errors and displays usage", func() {
session := helpers.CF("install-plugin")
Eventually(session.Err).Should(Say("Incorrect Usage: the required argument `PLUGIN_NAME_OR_LOCATION` was not provided"))
Eventually(session.Out).Should(Say("USAGE:"))
Eventually(session).Should(Exit(1))
})
})
Describe("installing a plugin from a local file", func() {
var pluginPath string
Context("when the file is compiled for a different os and architecture", func() {
BeforeEach(func() {
goos := os.Getenv("GOOS")
goarch := os.Getenv("GOARCH")
os.Setenv("GOOS", "openbsd")
os.Setenv("GOARCH", "amd64")
pluginPath = helpers.BuildConfigurablePlugin("configurable_plugin", "some-plugin", "1.0.0",
[]helpers.PluginCommand{
{Name: "some-command", Help: "some-command-help"},
},
)
os.Setenv("GOOS", goos)
os.Setenv("GOARCH", goarch)
})
It("fails and reports the file is not a valid CLI plugin", func() {
session := helpers.CF("install-plugin", pluginPath, "-f")<|fim▁hole|>
Eventually(session.Out).Should(Say("Attention: Plugins are binaries written by potentially untrusted authors\\."))
Eventually(session.Out).Should(Say("Install and use plugins at your own risk\\."))
Eventually(session.Out).Should(Say("FAILED"))
Eventually(session.Err).Should(Say("File is not a valid cf CLI plugin binary\\."))
Eventually(session).Should(Exit(1))
})
})
Context("when the file is compiled for the correct os and architecture", func() {
BeforeEach(func() {
pluginPath = helpers.BuildConfigurablePlugin("configurable_plugin", "some-plugin", "1.0.0",
[]helpers.PluginCommand{
{Name: "some-command", Help: "some-command-help"},
},
)
})
Context("when the -f flag is given", func() {
It("installs the plugin and cleans up all temp files", func() {
session := helpers.CF("install-plugin", pluginPath, "-f")
Eventually(session.Out).Should(Say("Attention: Plugins are binaries written by potentially untrusted authors\\."))
Eventually(session.Out).Should(Say("Install and use plugins at your own risk\\."))
Eventually(session.Out).Should(Say("Installing plugin some-plugin\\.\\.\\."))
Eventually(session.Out).Should(Say("OK"))
Eventually(session.Out).Should(Say("Plugin some-plugin 1\\.0\\.0 successfully installed\\."))
Eventually(session).Should(Exit(0))
installedPath := generic.ExecutableFilename(filepath.Join(homeDir, ".cf", "plugins", "some-plugin"))
pluginsSession := helpers.CF("plugins", "--checksum")
expectedSha := helpers.Sha1Sum(installedPath)
Eventually(pluginsSession.Out).Should(Say("some-plugin\\s+1\\.0\\.0\\s+%s", expectedSha))
Eventually(pluginsSession).Should(Exit(0))
Eventually(helpers.CF("some-command")).Should(Exit(0))
helpSession := helpers.CF("help")
Eventually(helpSession.Out).Should(Say("some-command"))
Eventually(helpSession).Should(Exit(0))
})
Context("when the file does not have executable permissions", func() {
BeforeEach(func() {
Expect(os.Chmod(pluginPath, 0666)).ToNot(HaveOccurred())
})
It("installs the plugin", func() {
session := helpers.CF("install-plugin", pluginPath, "-f")
Eventually(session.Out).Should(Say("Plugin some-plugin 1\\.0\\.0 successfully installed\\."))
Eventually(session).Should(Exit(0))
})
})
Context("when the plugin is already installed", func() {
BeforeEach(func() {
Eventually(helpers.CF("install-plugin", pluginPath, "-f")).Should(Exit(0))
})
It("uninstalls the existing plugin and installs the plugin", func() {
session := helpers.CF("install-plugin", pluginPath, "-f")
Eventually(session.Out).Should(Say("Plugin some-plugin 1\\.0\\.0 is already installed\\. Uninstalling existing plugin\\.\\.\\."))
Eventually(session.Out).Should(Say("CLI-MESSAGE-UNINSTALL"))
Eventually(session.Out).Should(Say("Plugin some-plugin successfully uninstalled\\."))
Eventually(session.Out).Should(Say("Plugin some-plugin 1\\.0\\.0 successfully installed\\."))
Eventually(session).Should(Exit(0))
})
})
Context("when the file does not exist", func() {
It("tells the user that the file was not found and fails", func() {
session := helpers.CF("install-plugin", "some/path/that/does/not/exist", "-f")
Eventually(session.Err).Should(Say("File not found locally, make sure the file exists at given path some/path/that/does/not/exist"))
Consistently(session.Out).ShouldNot(Say("Attention: Plugins are binaries written by potentially untrusted authors\\."))
Consistently(session.Out).ShouldNot(Say("Install and use plugins at your own risk\\."))
Eventually(session).Should(Exit(1))
})
})
Context("when the file is not an executable", func() {
BeforeEach(func() {
badPlugin, err := ioutil.TempFile("", "")
Expect(err).ToNot(HaveOccurred())
pluginPath = badPlugin.Name()
badPlugin.Close()
})
AfterEach(func() {
err := os.Remove(pluginPath)
Expect(err).ToNot(HaveOccurred())
})
It("tells the user that the file is not a plugin and fails", func() {
session := helpers.CF("install-plugin", pluginPath, "-f")
Eventually(session.Err).Should(Say("File is not a valid cf CLI plugin binary\\."))
Eventually(session).Should(Exit(1))
})
})
Context("when the file is not a plugin", func() {
BeforeEach(func() {
var err error
pluginPath, err = Build("code.cloudfoundry.org/cli/integration/assets/non_plugin")
Expect(err).ToNot(HaveOccurred())
})
It("tells the user that the file is not a plugin and fails", func() {
session := helpers.CF("install-plugin", pluginPath, "-f")
Eventually(session.Err).Should(Say("File is not a valid cf CLI plugin binary\\."))
Eventually(session).Should(Exit(1))
})
})
Context("command conflict", func() {
Context("when the plugin has a command that is the same as a built-in command", func() {
var pluginPath string
BeforeEach(func() {
pluginPath = helpers.BuildConfigurablePlugin(
"configurable_plugin", "some-plugin", "1.1.1",
[]helpers.PluginCommand{
{Name: "version"},
})
})
It("tells the user about the conflict and fails", func() {
session := helpers.CF("install-plugin", "-f", pluginPath)
Eventually(session.Out).Should(Say("Attention: Plugins are binaries written by potentially untrusted authors\\."))
Eventually(session.Out).Should(Say("Install and use plugins at your own risk\\."))
Eventually(session.Out).Should(Say("FAILED"))
Eventually(session.Err).Should(Say("Plugin some-plugin v1\\.1\\.1 could not be installed as it contains commands with names that are already used: version"))
Eventually(session).Should(Exit(1))
})
})
Context("when the plugin has a command that is the same as a built-in alias", func() {
BeforeEach(func() {
pluginPath = helpers.BuildConfigurablePlugin(
"configurable_plugin", "some-plugin", "1.1.1",
[]helpers.PluginCommand{
{Name: "cups"},
})
})
It("tells the user about the conflict and fails", func() {
session := helpers.CF("install-plugin", "-f", pluginPath)
Eventually(session.Out).Should(Say("Attention: Plugins are binaries written by potentially untrusted authors\\."))
Eventually(session.Out).Should(Say("Install and use plugins at your own risk\\."))
Eventually(session.Out).Should(Say("FAILED"))
Eventually(session.Err).Should(Say("Plugin some-plugin v1\\.1\\.1 could not be installed as it contains commands with names that are already used: cups"))
Eventually(session).Should(Exit(1))
})
})
Context("when the plugin has a command that is the same as another plugin command", func() {
BeforeEach(func() {
helpers.InstallConfigurablePlugin("existing-plugin", "1.1.1",
[]helpers.PluginCommand{
{Name: "existing-command"},
})
pluginPath = helpers.BuildConfigurablePlugin(
"configurable_plugin", "new-plugin", "1.1.1",
[]helpers.PluginCommand{
{Name: "existing-command"},
})
})
It("tells the user about the conflict and fails", func() {
session := helpers.CF("install-plugin", "-f", pluginPath)
Eventually(session.Out).Should(Say("Attention: Plugins are binaries written by potentially untrusted authors\\."))
Eventually(session.Out).Should(Say("Install and use plugins at your own risk\\."))
Eventually(session.Out).Should(Say("FAILED"))
Eventually(session.Err).Should(Say("Plugin new-plugin v1\\.1\\.1 could not be installed as it contains commands with names that are already used: existing-command\\."))
Eventually(session).Should(Exit(1))
})
})
Context("when the plugin has a command that is the same as another plugin alias", func() {
BeforeEach(func() {
helpers.InstallConfigurablePlugin("existing-plugin", "1.1.1",
[]helpers.PluginCommand{
{Name: "existing-command"},
})
pluginPath = helpers.BuildConfigurablePlugin(
"configurable_plugin", "new-plugin", "1.1.1",
[]helpers.PluginCommand{
{Name: "new-command", Alias: "existing-command"},
})
})
It("tells the user about the conflict and fails", func() {
session := helpers.CF("install-plugin", "-f", pluginPath)
Eventually(session.Out).Should(Say("Attention: Plugins are binaries written by potentially untrusted authors\\."))
Eventually(session.Out).Should(Say("Install and use plugins at your own risk\\."))
Eventually(session.Out).Should(Say("FAILED"))
Eventually(session.Err).Should(Say("Plugin new-plugin v1\\.1\\.1 could not be installed as it contains commands with aliases that are already used: existing-command\\."))
Eventually(session).Should(Exit(1))
})
})
})
Context("alias conflict", func() {
Context("when the plugin has an alias that is the same as a built-in command", func() {
var pluginPath string
BeforeEach(func() {
pluginPath = helpers.BuildConfigurablePlugin(
"configurable_plugin", "some-plugin", "1.1.1",
[]helpers.PluginCommand{
{Name: "some-command", Alias: "version"},
})
})
It("tells the user about the conflict and fails", func() {
session := helpers.CF("install-plugin", "-f", pluginPath)
Eventually(session.Out).Should(Say("Attention: Plugins are binaries written by potentially untrusted authors\\."))
Eventually(session.Out).Should(Say("Install and use plugins at your own risk\\."))
Eventually(session.Out).Should(Say("FAILED"))
Eventually(session.Err).Should(Say("Plugin some-plugin v1\\.1\\.1 could not be installed as it contains commands with aliases that are already used: version"))
Eventually(session).Should(Exit(1))
})
})
Context("when the plugin has an alias that is the same as a built-in alias", func() {
BeforeEach(func() {
pluginPath = helpers.BuildConfigurablePlugin(
"configurable_plugin", "some-plugin", "1.1.1",
[]helpers.PluginCommand{
{Name: "some-command", Alias: "cups"},
})
})
It("tells the user about the conflict and fails", func() {
session := helpers.CF("install-plugin", "-f", pluginPath)
Eventually(session.Out).Should(Say("Attention: Plugins are binaries written by potentially untrusted authors\\."))
Eventually(session.Out).Should(Say("Install and use plugins at your own risk\\."))
Eventually(session.Out).Should(Say("FAILED"))
Eventually(session.Err).Should(Say("Plugin some-plugin v1\\.1\\.1 could not be installed as it contains commands with aliases that are already used: cups"))
Eventually(session).Should(Exit(1))
})
})
Context("when the plugin has an alias that is the same as another plugin command", func() {
BeforeEach(func() {
helpers.InstallConfigurablePlugin("existing-plugin", "1.1.1",
[]helpers.PluginCommand{
{Name: "existing-command"},
})
pluginPath = helpers.BuildConfigurablePlugin(
"configurable_plugin", "new-plugin", "1.1.1",
[]helpers.PluginCommand{
{Name: "new-command", Alias: "existing-command"},
})
})
It("tells the user about the conflict and fails", func() {
session := helpers.CF("install-plugin", "-f", pluginPath)
Eventually(session.Out).Should(Say("Attention: Plugins are binaries written by potentially untrusted authors\\."))
Eventually(session.Out).Should(Say("Install and use plugins at your own risk\\."))
Eventually(session.Out).Should(Say("FAILED"))
Eventually(session.Err).Should(Say("Plugin new-plugin v1\\.1\\.1 could not be installed as it contains commands with aliases that are already used: existing-command\\."))
Eventually(session).Should(Exit(1))
})
})
Context("when the plugin has an alias that is the same as another plugin alias", func() {
BeforeEach(func() {
helpers.InstallConfigurablePlugin("existing-plugin", "1.1.1",
[]helpers.PluginCommand{
{Name: "existing-command", Alias: "existing-alias"},
})
pluginPath = helpers.BuildConfigurablePlugin(
"configurable_plugin", "new-plugin", "1.1.1",
[]helpers.PluginCommand{
{Name: "new-command", Alias: "existing-alias"},
})
})
It("tells the user about the conflict and fails", func() {
session := helpers.CF("install-plugin", "-f", pluginPath)
Eventually(session.Out).Should(Say("Attention: Plugins are binaries written by potentially untrusted authors\\."))
Eventually(session.Out).Should(Say("Install and use plugins at your own risk\\."))
Eventually(session.Out).Should(Say("FAILED"))
Eventually(session.Err).Should(Say("Plugin new-plugin v1\\.1\\.1 could not be installed as it contains commands with aliases that are already used: existing-alias\\."))
Eventually(session).Should(Exit(1))
})
})
})
Context("alias and command conflicts", func() {
Context("when the plugin has a command and an alias that are both taken by another plugin", func() {
BeforeEach(func() {
helpers.InstallConfigurablePlugin("existing-plugin", "1.1.1",
[]helpers.PluginCommand{
{Name: "existing-command", Alias: "existing-alias"},
})
pluginPath = helpers.BuildConfigurablePlugin(
"configurable_plugin", "new-plugin", "1.1.1",
[]helpers.PluginCommand{
{Name: "existing-command", Alias: "existing-alias"},
})
})
It("tells the user about the conflict and fails", func() {
session := helpers.CF("install-plugin", "-f", pluginPath)
Eventually(session.Out).Should(Say("Attention: Plugins are binaries written by potentially untrusted authors\\."))
Eventually(session.Out).Should(Say("Install and use plugins at your own risk\\."))
Eventually(session.Out).Should(Say("FAILED"))
Eventually(session.Err).Should(Say("Plugin new-plugin v1\\.1\\.1 could not be installed as it contains commands with names and aliases that are already used: existing-command, existing-alias\\."))
Eventually(session).Should(Exit(1))
})
})
})
})
Context("when the -f flag is not given", func() {
Context("when the user says yes", func() {
BeforeEach(func() {
buffer = NewBuffer()
buffer.Write([]byte("y\n"))
})
It("installs the plugin", func() {
session := helpers.CFWithStdin(buffer, "install-plugin", pluginPath)
Eventually(session.Out).Should(Say("Attention: Plugins are binaries written by potentially untrusted authors\\."))
Eventually(session.Out).Should(Say("Install and use plugins at your own risk\\."))
Eventually(session.Out).Should(Say("Do you want to install the plugin %s\\? \\[yN\\]: y", helpers.ConvertPathToRegularExpression(pluginPath)))
Eventually(session.Out).Should(Say("Installing plugin some-plugin\\.\\.\\."))
Eventually(session.Out).Should(Say("OK"))
Eventually(session.Out).Should(Say("Plugin some-plugin 1\\.0\\.0 successfully installed\\."))
Eventually(session).Should(Exit(0))
pluginsSession := helpers.CF("plugins", "--checksum")
expectedSha := helpers.Sha1Sum(
generic.ExecutableFilename(filepath.Join(homeDir, ".cf/plugins/some-plugin")))
Eventually(pluginsSession.Out).Should(Say("some-plugin\\s+1.0.0\\s+%s", expectedSha))
Eventually(pluginsSession).Should(Exit(0))
Eventually(helpers.CF("some-command")).Should(Exit(0))
helpSession := helpers.CF("help")
Eventually(helpSession.Out).Should(Say("some-command"))
Eventually(helpSession).Should(Exit(0))
})
Context("when the plugin is already installed", func() {
BeforeEach(func() {
Eventually(helpers.CF("install-plugin", pluginPath, "-f")).Should(Exit(0))
})
It("fails and tells the user how to force a reinstall", func() {
session := helpers.CFWithStdin(buffer, "install-plugin", pluginPath)
Eventually(session.Out).Should(Say("FAILED"))
Eventually(session.Err).Should(Say("Plugin some-plugin 1\\.0\\.0 could not be installed\\. A plugin with that name is already installed\\."))
Eventually(session.Err).Should(Say("TIP: Use 'cf install-plugin -f' to force a reinstall\\."))
Eventually(session).Should(Exit(1))
})
})
})
Context("when the user says no", func() {
BeforeEach(func() {
buffer = NewBuffer()
buffer.Write([]byte("n\n"))
})
It("does not install the plugin", func() {
session := helpers.CFWithStdin(buffer, "install-plugin", pluginPath)
Eventually(session.Out).Should(Say("Attention: Plugins are binaries written by potentially untrusted authors\\."))
Eventually(session.Out).Should(Say("Install and use plugins at your own risk\\."))
Eventually(session.Out).Should(Say("Do you want to install the plugin %s\\? \\[yN\\]: n", helpers.ConvertPathToRegularExpression(pluginPath)))
Eventually(session.Out).Should(Say("Plugin installation cancelled\\."))
Eventually(session).Should(Exit(0))
})
Context("when the plugin is already installed", func() {
BeforeEach(func() {
Eventually(helpers.CF("install-plugin", pluginPath, "-f")).Should(Exit(0))
})
It("does not uninstall the existing plugin", func() {
session := helpers.CFWithStdin(buffer, "install-plugin", pluginPath)
Eventually(session.Out).Should(Say("Plugin installation cancelled\\."))
Consistently(session.Out).ShouldNot(Say("Plugin some-plugin 1\\.0\\.0 is already installed\\. Uninstalling existing plugin\\.\\.\\."))
Consistently(session.Out).ShouldNot(Say("CLI-MESSAGE-UNINSTALL"))
Consistently(session.Out).ShouldNot(Say("Plugin some-plugin successfully uninstalled\\."))
Eventually(session).Should(Exit(0))
})
})
})
Context("when the user interrupts with control-c", func() {
BeforeEach(func() {
buffer = NewBuffer()
buffer.Write([]byte("y")) // but not enter
})
It("does not install the plugin and does not create a bad state", func() {
session := helpers.CFWithStdin(buffer, "install-plugin", pluginPath)
Eventually(session.Out).Should(Say("Attention: Plugins are binaries written by potentially untrusted authors\\."))
Eventually(session.Out).Should(Say("Install and use plugins at your own risk\\."))
Eventually(session.Out).Should(Say("Do you want to install the plugin %s\\? \\[yN\\]:", helpers.ConvertPathToRegularExpression(pluginPath)))
session.Interrupt()
Eventually(session.Out).Should(Say("FAILED"))
Eventually(session).Should(Exit(1))
// make sure cf plugins did not break
Eventually(helpers.CF("plugins", "--checksum")).Should(Exit(0))
// make sure a retry of the plugin install works
retrySession := helpers.CF("install-plugin", pluginPath, "-f")
Eventually(retrySession.Out).Should(Say("Plugin some-plugin 1\\.0\\.0 successfully installed\\."))
Eventually(retrySession).Should(Exit(0))
})
})
})
})
})
Describe("installing a plugin from a URL", func() {
var (
server *Server
pluginPath string
err error
)
BeforeEach(func() {
server = NewTLSServer()
// Suppresses ginkgo server logs
server.HTTPTestServer.Config.ErrorLog = log.New(&bytes.Buffer{}, "", 0)
})
AfterEach(func() {
server.Close()
})
Context("when a URL and the -f flag are provided", func() {
Context("when an executable is available for download at the URL", func() {
var (
pluginData []byte
)
BeforeEach(func() {
pluginPath = helpers.BuildConfigurablePlugin("configurable_plugin", "some-plugin", "1.0.0",
[]helpers.PluginCommand{
{Name: "some-command", Help: "some-command-help"},
},
)
pluginData, err = ioutil.ReadFile(pluginPath)
Expect(err).ToNot(HaveOccurred())
server.AppendHandlers(
CombineHandlers(
VerifyRequest(http.MethodGet, "/"),
RespondWith(http.StatusOK, pluginData),
),
)
})
AfterEach(func() {
err = os.Remove(pluginPath)
Expect(err).ToNot(HaveOccurred())
})
It("installs the plugin", func() {
session := helpers.CF("install-plugin", "-f", server.URL(), "-k")
Eventually(session.Out).Should(Say("Attention: Plugins are binaries written by potentially untrusted authors\\."))
Eventually(session.Out).Should(Say("Install and use plugins at your own risk\\."))
Eventually(session.Out).Should(Say("Starting download of plugin binary from URL\\.\\.\\."))
Eventually(session.Out).Should(Say("%d bytes downloaded\\.\\.\\.", len(pluginData)))
Eventually(session.Out).Should(Say("Installing plugin some-plugin\\.\\.\\."))
Eventually(session.Out).Should(Say("OK"))
Eventually(session.Out).Should(Say("Plugin some-plugin 1\\.0\\.0 successfully installed\\."))
Eventually(session).Should(Exit(0))
})
Context("when the URL redirects", func() {
BeforeEach(func() {
server.Reset()
server.AppendHandlers(
CombineHandlers(
VerifyRequest(http.MethodGet, "/redirect"),
RespondWith(http.StatusMovedPermanently, nil, http.Header{"Location": []string{server.URL()}}),
),
CombineHandlers(
VerifyRequest(http.MethodGet, "/"),
RespondWith(http.StatusOK, pluginData),
))
})
It("installs the plugin", func() {
session := helpers.CF("install-plugin", "-f", fmt.Sprintf("%s/redirect", server.URL()), "-k")
Eventually(session.Out).Should(Say("Installing plugin some-plugin\\.\\.\\."))
Eventually(session.Out).Should(Say("OK"))
Eventually(session.Out).Should(Say("Plugin some-plugin 1\\.0\\.0 successfully installed\\."))
Eventually(session).Should(Exit(0))
})
})
Context("when the plugin has already been installed", func() {
BeforeEach(func() {
Eventually(helpers.CF("install-plugin", pluginPath, "-f")).Should(Exit(0))
})
It("uninstalls and reinstalls the plugin", func() {
session := helpers.CF("install-plugin", "-f", server.URL(), "-k")
Eventually(session.Out).Should(Say("Attention: Plugins are binaries written by potentially untrusted authors\\."))
Eventually(session.Out).Should(Say("Install and use plugins at your own risk\\."))
Eventually(session.Out).Should(Say("Starting download of plugin binary from URL\\.\\.\\."))
Eventually(session.Out).Should(Say("%d bytes downloaded\\.\\.\\.", len(pluginData)))
Eventually(session.Out).Should(Say("Plugin some-plugin 1\\.0\\.0 is already installed\\. Uninstalling existing plugin\\.\\.\\."))
Eventually(session.Out).Should(Say("CLI-MESSAGE-UNINSTALL"))
Eventually(session.Out).Should(Say("Plugin some-plugin successfully uninstalled\\."))
Eventually(session.Out).Should(Say("OK"))
Eventually(session.Out).Should(Say("Plugin some-plugin 1\\.0\\.0 successfully installed\\."))
Eventually(session).Should(Exit(0))
})
})
})
Context("when a 4xx or 5xx HTTP response status is encountered", func() {
BeforeEach(func() {
server.AppendHandlers(
CombineHandlers(
VerifyRequest(http.MethodGet, "/"),
RespondWith(http.StatusNotFound, nil),
),
)
})
It("displays an appropriate error", func() {
session := helpers.CF("install-plugin", "-f", server.URL(), "-k")
Eventually(session.Out).Should(Say("Starting download of plugin binary from URL\\.\\.\\."))
Eventually(session.Out).Should(Say("FAILED"))
Eventually(session.Err).Should(Say("Download attempt failed; server returned 404 Not Found"))
Eventually(session.Err).Should(Say("Unable to install; plugin is not available from the given URL\\."))
Eventually(session).Should(Exit(1))
})
})
Context("when the file is not a plugin", func() {
BeforeEach(func() {
var err error
pluginPath, err = Build("code.cloudfoundry.org/cli/integration/assets/non_plugin")
Expect(err).ToNot(HaveOccurred())
pluginData, err := ioutil.ReadFile(pluginPath)
Expect(err).ToNot(HaveOccurred())
server.AppendHandlers(
CombineHandlers(
VerifyRequest(http.MethodGet, "/"),
RespondWith(http.StatusOK, pluginData),
),
)
})
AfterEach(func() {
err = os.Remove(pluginPath)
Expect(err).ToNot(HaveOccurred())
})
It("tells the user that the file is not a plugin and fails", func() {
session := helpers.CF("install-plugin", "-f", server.URL(), "-k")
Eventually(session.Out).Should(Say("Starting download of plugin binary from URL\\.\\.\\."))
Eventually(session.Out).Should(Say("FAILED"))
Eventually(session.Err).Should(Say("File is not a valid cf CLI plugin binary\\."))
Eventually(session).Should(Exit(1))
})
})
})
Context("when the -f flag is not provided", func() {
var (
pluginData []byte
)
BeforeEach(func() {
pluginPath = helpers.BuildConfigurablePlugin("configurable_plugin", "some-plugin", "1.0.0",
[]helpers.PluginCommand{
{Name: "some-command", Help: "some-command-help"},
},
)
pluginData, err = ioutil.ReadFile(pluginPath)
Expect(err).ToNot(HaveOccurred())
server.AppendHandlers(
CombineHandlers(
VerifyRequest(http.MethodGet, "/"),
RespondWith(http.StatusOK, pluginData),
),
)
})
AfterEach(func() {
err = os.Remove(pluginPath)
Expect(err).ToNot(HaveOccurred())
})
Context("when the user says yes", func() {
BeforeEach(func() {
buffer = NewBuffer()
buffer.Write([]byte("y\n"))
})
It("installs the plugin", func() {
session := helpers.CFWithStdin(buffer, "install-plugin", server.URL(), "-k")
Eventually(session.Out).Should(Say("Attention: Plugins are binaries written by potentially untrusted authors\\."))
Eventually(session.Out).Should(Say("Install and use plugins at your own risk\\."))
Eventually(session.Out).Should(Say("Do you want to install the plugin %s\\? \\[yN\\]: y", server.URL()))
Eventually(session.Out).Should(Say("Starting download of plugin binary from URL\\.\\.\\."))
Eventually(session.Out).Should(Say("%d bytes downloaded\\.\\.\\.", len(pluginData)))
Eventually(session.Out).Should(Say("Installing plugin some-plugin\\.\\.\\."))
Eventually(session.Out).Should(Say("OK"))
Eventually(session.Out).Should(Say("Plugin some-plugin 1\\.0\\.0 successfully installed\\."))
Eventually(session).Should(Exit(0))
})
Context("when the plugin is already installed", func() {
BeforeEach(func() {
Eventually(helpers.CF("install-plugin", pluginPath, "-f")).Should(Exit(0))
})
It("fails and tells the user how to force a reinstall", func() {
session := helpers.CFWithStdin(buffer, "install-plugin", server.URL(), "-k")
Eventually(session.Out).Should(Say("Attention: Plugins are binaries written by potentially untrusted authors\\."))
Eventually(session.Out).Should(Say("Install and use plugins at your own risk\\."))
Eventually(session.Out).Should(Say("Do you want to install the plugin %s\\? \\[yN\\]: y", server.URL()))
Eventually(session.Out).Should(Say("Starting download of plugin binary from URL\\.\\.\\."))
Eventually(session.Out).Should(Say("%d bytes downloaded\\.\\.\\.", len(pluginData)))
Eventually(session.Out).Should(Say("FAILED"))
Eventually(session.Err).Should(Say("Plugin some-plugin 1\\.0\\.0 could not be installed\\. A plugin with that name is already installed\\."))
Eventually(session.Err).Should(Say("TIP: Use 'cf install-plugin -f' to force a reinstall\\."))
Eventually(session).Should(Exit(1))
})
})
})
Context("when the user says no", func() {
BeforeEach(func() {
buffer = NewBuffer()
buffer.Write([]byte("n\n"))
})
It("does not install the plugin", func() {
session := helpers.CFWithStdin(buffer, "install-plugin", server.URL())
Eventually(session.Out).Should(Say("Attention: Plugins are binaries written by potentially untrusted authors\\."))
Eventually(session.Out).Should(Say("Install and use plugins at your own risk\\."))
Eventually(session.Out).Should(Say("Do you want to install the plugin %s\\? \\[yN\\]: n", server.URL()))
Eventually(session.Out).Should(Say("Plugin installation cancelled\\."))
Eventually(session).Should(Exit(0))
Expect(server.ReceivedRequests()).To(HaveLen(0))
})
})
Context("when the user interrupts with control-c", func() {
BeforeEach(func() {
buffer = NewBuffer()
buffer.Write([]byte("y")) // but not enter
})
It("does not install the plugin and does not create a bad state", func() {
session := helpers.CFWithStdin(buffer, "install-plugin", pluginPath)
Eventually(session.Out).Should(Say("Attention: Plugins are binaries written by potentially untrusted authors\\."))
Eventually(session.Out).Should(Say("Install and use plugins at your own risk\\."))
Eventually(session.Out).Should(Say("Do you want to install the plugin %s\\? \\[yN\\]:", helpers.ConvertPathToRegularExpression(pluginPath)))
session.Interrupt()
Eventually(session.Out).Should(Say("FAILED"))
Eventually(session).Should(Exit(1))
Expect(server.ReceivedRequests()).To(HaveLen(0))
// make sure cf plugins did not break
Eventually(helpers.CF("plugins", "--checksum")).Should(Exit(0))
// make sure a retry of the plugin install works
retrySession := helpers.CF("install-plugin", pluginPath, "-f")
Eventually(retrySession.Out).Should(Say("Plugin some-plugin 1\\.0\\.0 successfully installed\\."))
Eventually(retrySession).Should(Exit(0))
})
})
})
})
})<|fim▁end|> | |
<|file_name|>INetworkReachabilityCallbackError.ts<|end_file_name|><|fim▁begin|>/**
--| ADAPTIVE RUNTIME PLATFORM |----------------------------------------------------------------------------------------
(C) Copyright 2013-2015 Carlos Lozano Diez t/a Adaptive.me <http://adaptive.me>.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 . Unless required by appli-
-cable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
Original author:
* Carlos Lozano Diez
<http://github.com/carloslozano>
<http://twitter.com/adaptivecoder>
<mailto:[email protected]>
Contributors:
* Ferran Vila Conesa
<http://github.com/fnva>
<http://twitter.com/ferran_vila>
<mailto:[email protected]>
* See source code files for contributors.
Release:
* @version v2.2.15
-------------------------------------------| aut inveniam viam aut faciam |--------------------------------------------
*/
module Adaptive {
/**
@enum {Adaptive.INetworkReachabilityCallbackError} Adaptive.INetworkReachabilityCallbackError
Enumeration INetworkReachabilityCallbackError
*/
export class INetworkReachabilityCallbackError {
constructor(public value:string){}
toString(){return this.value;}
/**
@property {Adaptive.INetworkReachabilityCallbackError} [Forbidden='Forbidden']
*/
static Forbidden = new INetworkReachabilityCallbackError("Forbidden");
/**
@property {Adaptive.INetworkReachabilityCallbackError} [NotFound='NotFound']
*/
static NotFound = new INetworkReachabilityCallbackError("NotFound");
/**
@property {Adaptive.INetworkReachabilityCallbackError} [MethodNotAllowed='MethodNotAllowed']
*/
static MethodNotAllowed = new INetworkReachabilityCallbackError("MethodNotAllowed");
/**
@property {Adaptive.INetworkReachabilityCallbackError} [NotAllowed='NotAllowed']
*/
static NotAllowed = new INetworkReachabilityCallbackError("NotAllowed");
/**
@property {Adaptive.INetworkReachabilityCallbackError} [NotAuthenticated='NotAuthenticated']
*/
static NotAuthenticated = new INetworkReachabilityCallbackError("NotAuthenticated");
/**
@property {Adaptive.INetworkReachabilityCallbackError} [TimeOut='TimeOut']
*/
static TimeOut = new INetworkReachabilityCallbackError("TimeOut");
/**
@property {Adaptive.INetworkReachabilityCallbackError} [NoResponse='NoResponse']
*/
static NoResponse = new INetworkReachabilityCallbackError("NoResponse");
/**
@property {Adaptive.INetworkReachabilityCallbackError} [Unreachable='Unreachable']
*/
static Unreachable = new INetworkReachabilityCallbackError("Unreachable");
/**
@property {Adaptive.INetworkReachabilityCallbackError} [WrongParams='WrongParams']
*/
static WrongParams = new INetworkReachabilityCallbackError("WrongParams");
/**
@property {Adaptive.INetworkReachabilityCallbackError} [MalformedUrl='MalformedUrl']
*/
static MalformedUrl = new INetworkReachabilityCallbackError("MalformedUrl");
/**
@property {Adaptive.INetworkReachabilityCallbackError} [DomainUnresolvable='DomainUnresolvable']
*/
static DomainUnresolvable = new INetworkReachabilityCallbackError("DomainUnresolvable");
/**
@property {Adaptive.INetworkReachabilityCallbackError} [Unknown='Unknown']
*/
static Unknown = new INetworkReachabilityCallbackError("Unknown");
/**
@method
@static
Convert JSON parsed object to enumeration.
@return {Adaptive.INetworkReachabilityCallbackError}
*/
static toObject(object : any) : INetworkReachabilityCallbackError {
var retValue : INetworkReachabilityCallbackError = INetworkReachabilityCallbackError.Unknown;
if (object != null && object.value != null && INetworkReachabilityCallbackError.hasOwnProperty(object.value)) {
retValue = INetworkReachabilityCallbackError[object.value];<|fim▁hole|> return retValue;
}
}
}<|fim▁end|> | } |
<|file_name|>attribute-form.js<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2008-2009 The Open Source Geospatial Foundation
*
* Published under the BSD license.
* See http://svn.geoext.org/core/trunk/geoext/license.txt for the full text
* of the license.
*/
/** api: example[attribute-form]
* Attribute Form
* --------------
* Create a form with fields from attributes read from a WFS
* DescribeFeatureType response
*/
var form;
Ext.onReady(function() {
Ext.QuickTips.init();
// create attributes store
var attributeStore = new GeoExt.data.AttributeStore({
url: "data/describe_feature_type.xml"
});<|fim▁hole|> autoScroll: true,
height: 300,
width: 350,
defaults: {
width: 120,
maxLengthText: "too long",
minLengthText: "too short"
},
plugins: [
new GeoExt.plugins.AttributeForm({
attributeStore: attributeStore,
recordToFieldOptions: {
labelTpl: new Ext.XTemplate(
'{name}{[this.getStar(values)]}', {
compiled: true,
disableFormats: true,
getStar: function(v) {
return v.nillable ? '' : ' *';
}
}
)
}
})
]
});
attributeStore.load();
});<|fim▁end|> |
form = new Ext.form.FormPanel({
renderTo: document.body, |
<|file_name|>storageinterface.py<|end_file_name|><|fim▁begin|>from config import cloudplatform
storage_adapter = None
if cloudplatform == "google":
import googlestorage
storage_adapter = googlestorage
elif cloudplatform == "aws":
import awsstorage
storage_adapter = awsstorage
elif cloudplatform == "azure":
from FlaskWebProject import azurestorage
storage_adapter = azurestorage
def create_container(bucketID):
""" Creates Container with given bucketID
:param string bucketID: container name
:return boolean: true if succeed
"""
return storage_adapter.create_container(bucketID)
def container_exists(bucketID):
""" Check if container with ID exists
:param string bucketID: container name
:return boolean: true if exists
"""
return storage_adapter.container_exists(bucketID)
def file_exists(bucketID, filename):
""" Checks if file in container exists
:param string bucketID: container name
:param string filename: file to search
:return boolean: true if exists
"""
return storage_adapter.file_exists(bucketID, filename)
def list_files(bucketID):
""" Lists files in specified bucket
:param string bucketID: container name
:return list: list of FileIDs
"""
return storage_adapter.list_files(bucketID)
def delete_file(bucketID, filename):
""" delete file from container
:param string bucketID: container name
:param string filename: file to delete
:return boolean: true if succeed
"""
return storage_adapter.delete_file(bucketID, filename)
def delete_container(bucketID):
""" delete container
:param string bucketID: container name
:return boolean: true if succeed
"""
return storage_adapter.delete_container(bucketID)
def upload_from_path(bucketID, path):
""" Uploads a local file from client to the cloud
:param string bucketID: container name
:param string path: local filepath
:return boolean: true if succeed
"""
return storage_adapter.upload_from_path(bucketID, path)
def upload_from_text(bucketID, filename, text):
""" Uploads text to container in specified file
:param string bucketID: container name<|fim▁hole|> return storage_adapter.upload_from_text(bucketID, filename, text)
def download_file_to_path(bucketID, filename, path):
""" Downloads file from container to local path
:param string bucketID: container name
:param string filename: file to download
:param string path: destination local filepath
:return boolean: true if succeed
"""
return storage_adapter.download_file_to_path(bucketID, filename, path)
def download_file_to_text(bucketID, filename):
""" Downloads file from container to text
:param string bucketID: container name
:param string filename: file to download
:return string: text that got downloaded
"""
return storage_adapter.download_file_to_text(bucketID, filename)
def get_download_url(bucketID, filename):
""" Returns a download for specified file in container
:param string bucketID: container name
:param string filename: file to download
:return string: the url to download the file from
"""
return storage_adapter.get_download_url(bucketID, filename)<|fim▁end|> | :param string filename: destination file
:param string text: text to upload
:return boolean: true if succeed
""" |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), "README.md")) as readme:
README = readme.read()
with open(os.path.join(os.path.dirname(__file__), "requirements.in")) as requirements:
REQUIREMENTS = [
req.split("#egg=")[1] if "#egg=" in req else req
for req in requirements.readlines()
]
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name="django-cineclub",
version="3.0.0",
packages=["cine"],
install_requires=REQUIREMENTS,
include_package_data=True,
license="GPL License",
description="A Django app to manage a cineclub.",
long_description=README,
url="https://saurel.me/",
author="Guilhem Saurel",
author_email="[email protected]",
classifiers=[
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: GPL License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.6",
"Topic :: Internet :: WWW/HTTP",<|fim▁hole|><|fim▁end|> | "Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
) |
<|file_name|>post_install.js<|end_file_name|><|fim▁begin|>/* eslint-disable */<|fim▁hole|>var fs = require('fs');
// Node 0.10 check
if (!execSync) {
execSync = require('sync-exec');
}
function exec(command) {
execSync(command, {
stdio: [0, 1, 2]
});
}
fs.stat('dist', function(error, stat) {
// Skip building on Travis
if (process.env.TRAVIS) {
return;
}
if (error || !stat.isDirectory()) {
// Create a directory to avoid getting stuck
// in postinstall loop
fs.mkdirSync('dist');
exec('npm install --only=dev');
exec('npm run build');
}
});<|fim▁end|> | // adapted based on rackt/history (MIT)
// Node 0.10+
var execSync = require('child_process').execSync; |
<|file_name|>plugin-trust.e2e.js<|end_file_name|><|fim▁begin|>'use strict';
const { expect } = require('chai');
const kadence = require('..');
const network = require('./fixtures/node-generator');
const trust = require('../lib/plugin-trust');
const sinon = require('sinon');
const async = require('async');
describe('@module kadence/trust + @class UDPTransport', function() {
let clock = null;
let [node1, node2, node3, node4] = network(4, kadence.UDPTransport);
before(function(done) {
this.timeout(12000);
clock = sinon.useFakeTimers(0);
async.eachSeries([node1, node2, node3, node4], (node, next) => {
node.listen(node.contact.port, next);
}, done);
});
after(function() {
clock.restore();
process._getActiveHandles().forEach((h) => h.unref());
})
it('should allow the whitelisted contact', function(done) {
node2.trust = node2.plugin(trust([
{
identity: node1.identity,
methods: ['PING']
}
], trust.MODE_WHITELIST));
node1.trust = node1.plugin(trust([
{
identity: node2.identity,
methods: ['PING']
}
], trust.MODE_WHITELIST));
node1.send('PING', [], [
node2.identity.toString('hex'),
node2.contact
], done);
});
it('should prevent the blacklisted contact', function(done) {
node3.trust = node3.plugin(trust([
{
identity: node1.identity,
methods: ['PING']
}
], trust.MODE_BLACKLIST));
node1.trust.addTrustPolicy({
identity: node3.identity,
methods: ['*']
})
node1.send('PING', [], [
node3.identity.toString('hex'),
node3.contact
], err => {
expect(err.message.includes('Refusing')).to.equal(true);
done();
});
});<|fim▁hole|> identity: node3.identity.toString('hex'),
methods: ['PING']
})
node2.send('PING', [], [
node3.identity.toString('hex'),
node3.contact
], done);
});
it('should prevent the non-whitelisted contact', function(done) {
node4.send('PING', [], [
node2.identity.toString('hex'),
node2.contact
], err => {
expect(err.message.includes('Refusing')).to.equal(true);
done();
});
});
it('should blacklist all nodes from using PING', function(done) {
node3.trust.addTrustPolicy({
identity: '*',
methods: ['PING']
});
node2.send('PING', [], [
node3.identity.toString('hex'),
node3.contact
], err => {
expect(err.message.includes('Refusing')).to.equal(true);
node2.send('PING', [], [
node3.identity.toString('hex'),
node3.contact
], err => {
expect(err.message.includes('Refusing')).to.equal(true);
done();
});
});
});
it('should refuse send to node with missing trust policy', function(done) {
node1.trust.removeTrustPolicy(node2.identity);
node1.send('PING', [], [
node2.identity.toString('hex'),
node2.contact
], err => {
expect(err.message.includes('Refusing')).to.equal(true);
done();
});
});
it('should allow if method is not blacklisted', function(done) {
node2.trust.addTrustPolicy({
identity: node3.identity,
methods: ['PING']
});
node3.trust.addTrustPolicy({
identity: node2.identity,
methods: ['FIND_NODE']
});
node2.send('PING', [], [
node3.identity,
node3.contact
], done);
});
it('should reject if method is not whitelisted', function(done) {
node4.trust = node4.plugin(trust([
{
identity: node2.identity,
methods: ['FIND_NODE']
}
], trust.MODE_WHITELIST));
node2.trust.addTrustPolicy({
identity: node4.identity,
methods: ['PING']
});
node4.send('FIND_NODE', [], [
node2.identity.toString('hex'),
node2.contact
], err => {
expect(err.message.includes('Refusing')).to.equal(true);
done();
});
});
});<|fim▁end|> |
it('should allow the non-blacklisted contact', function(done) {
node2.trust.addTrustPolicy({ |
<|file_name|>param_header.go<|end_file_name|><|fim▁begin|>package sctp
import (
"encoding/binary"
"fmt"
)
type paramHeader struct {
typ paramType
len int
raw []byte
}
const (<|fim▁hole|> paramLengthPlusHeader := paramHeaderLength + len(p.raw)
rawParam := make([]byte, paramLengthPlusHeader)
binary.BigEndian.PutUint16(rawParam[0:], uint16(p.typ))
binary.BigEndian.PutUint16(rawParam[2:], uint16(paramLengthPlusHeader))
copy(rawParam[paramHeaderLength:], p.raw)
return rawParam, nil
}
func (p *paramHeader) unmarshal(raw []byte) {
paramLengthPlusHeader := binary.BigEndian.Uint16(raw[2:])
paramLength := paramLengthPlusHeader - initOptionalVarHeaderLength
p.typ = paramType(binary.BigEndian.Uint16(raw[0:]))
p.raw = raw[paramHeaderLength : paramHeaderLength+paramLength]
p.len = int(paramLengthPlusHeader)
}
func (p *paramHeader) length() int {
return p.len
}
// String makes paramHeader printable
func (p paramHeader) String() string {
return fmt.Sprintf("%s (%d): %s", p.typ, p.len, p.raw)
}<|fim▁end|> | paramHeaderLength = 4
)
func (p *paramHeader) marshal() ([]byte, error) { |
<|file_name|>mysql_replication.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Ansible module to manage mysql replication
(c) 2013, Balazs Pocze <[email protected]>
Certain parts are taken from Mark Theunissen's mysqldb module
This file is part of Ansible
Ansible is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Ansible is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
<|fim▁hole|>short_description: Manage MySQL replication
description:
- Manages MySQL server replication, slave, master status get and change master host.
version_added: "1.3"
author: "Balazs Pocze (@banyek)"
options:
mode:
description:
- module operating mode. Could be getslave (SHOW SLAVE STATUS), getmaster (SHOW MASTER STATUS), changemaster (CHANGE MASTER TO), startslave
(START SLAVE), stopslave (STOP SLAVE), resetslave (RESET SLAVE), resetslaveall (RESET SLAVE ALL)
required: False
choices:
- getslave
- getmaster
- changemaster
- stopslave
- startslave
- resetslave
- resetslaveall
default: getslave
master_host:
description:
- same as mysql variable
master_user:
description:
- same as mysql variable
master_password:
description:
- same as mysql variable
master_port:
description:
- same as mysql variable
master_connect_retry:
description:
- same as mysql variable
master_log_file:
description:
- same as mysql variable
master_log_pos:
description:
- same as mysql variable
relay_log_file:
description:
- same as mysql variable
relay_log_pos:
description:
- same as mysql variable
master_ssl:
description:
- same as mysql variable
choices: [ 0, 1 ]
master_ssl_ca:
description:
- same as mysql variable
master_ssl_capath:
description:
- same as mysql variable
master_ssl_cert:
description:
- same as mysql variable
master_ssl_key:
description:
- same as mysql variable
master_ssl_cipher:
description:
- same as mysql variable
master_auto_position:
description:
- does the host uses GTID based replication or not
required: false
default: null
version_added: "2.0"
extends_documentation_fragment: mysql
'''
EXAMPLES = '''
# Stop mysql slave thread
- mysql_replication:
mode: stopslave
# Get master binlog file name and binlog position
- mysql_replication:
mode: getmaster
# Change master to master server 192.0.2.1 and use binary log 'mysql-bin.000009' with position 4578
- mysql_replication:
mode: changemaster
master_host: 192.0.2.1
master_log_file: mysql-bin.000009
master_log_pos: 4578
# Check slave status using port 3308
- mysql_replication:
mode: getslave
login_host: ansible.example.com
login_port: 3308
'''
import os
import warnings
try:
import MySQLdb
except ImportError:
mysqldb_found = False
else:
mysqldb_found = True
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.mysql import mysql_connect
from ansible.module_utils.pycompat24 import get_exception
def get_master_status(cursor):
cursor.execute("SHOW MASTER STATUS")
masterstatus = cursor.fetchone()
return masterstatus
def get_slave_status(cursor):
cursor.execute("SHOW SLAVE STATUS")
slavestatus = cursor.fetchone()
return slavestatus
def stop_slave(cursor):
try:
cursor.execute("STOP SLAVE")
stopped = True
except:
stopped = False
return stopped
def reset_slave(cursor):
try:
cursor.execute("RESET SLAVE")
reset = True
except:
reset = False
return reset
def reset_slave_all(cursor):
try:
cursor.execute("RESET SLAVE ALL")
reset = True
except:
reset = False
return reset
def start_slave(cursor):
try:
cursor.execute("START SLAVE")
started = True
except:
started = False
return started
def changemaster(cursor, chm, chm_params):
sql_param = ",".join(chm)
query = 'CHANGE MASTER TO %s' % sql_param
cursor.execute(query, chm_params)
def main():
module = AnsibleModule(
argument_spec=dict(
login_user=dict(default=None),
login_password=dict(default=None, no_log=True),
login_host=dict(default="localhost"),
login_port=dict(default=3306, type='int'),
login_unix_socket=dict(default=None),
mode=dict(default="getslave", choices=["getmaster", "getslave", "changemaster", "stopslave", "startslave", "resetslave", "resetslaveall"]),
master_auto_position=dict(default=False, type='bool'),
master_host=dict(default=None),
master_user=dict(default=None),
master_password=dict(default=None, no_log=True),
master_port=dict(default=None, type='int'),
master_connect_retry=dict(default=None, type='int'),
master_log_file=dict(default=None),
master_log_pos=dict(default=None, type='int'),
relay_log_file=dict(default=None),
relay_log_pos=dict(default=None, type='int'),
master_ssl=dict(default=False, type='bool'),
master_ssl_ca=dict(default=None),
master_ssl_capath=dict(default=None),
master_ssl_cert=dict(default=None),
master_ssl_key=dict(default=None),
master_ssl_cipher=dict(default=None),
connect_timeout=dict(default=30, type='int'),
config_file=dict(default="~/.my.cnf", type='path'),
ssl_cert=dict(default=None),
ssl_key=dict(default=None),
ssl_ca=dict(default=None),
)
)
mode = module.params["mode"]
master_host = module.params["master_host"]
master_user = module.params["master_user"]
master_password = module.params["master_password"]
master_port = module.params["master_port"]
master_connect_retry = module.params["master_connect_retry"]
master_log_file = module.params["master_log_file"]
master_log_pos = module.params["master_log_pos"]
relay_log_file = module.params["relay_log_file"]
relay_log_pos = module.params["relay_log_pos"]
master_ssl = module.params["master_ssl"]
master_ssl_ca = module.params["master_ssl_ca"]
master_ssl_capath = module.params["master_ssl_capath"]
master_ssl_cert = module.params["master_ssl_cert"]
master_ssl_key = module.params["master_ssl_key"]
master_ssl_cipher = module.params["master_ssl_cipher"]
master_auto_position = module.params["master_auto_position"]
ssl_cert = module.params["ssl_cert"]
ssl_key = module.params["ssl_key"]
ssl_ca = module.params["ssl_ca"]
connect_timeout = module.params['connect_timeout']
config_file = module.params['config_file']
if not mysqldb_found:
module.fail_json(msg="the python mysqldb module is required")
else:
warnings.filterwarnings('error', category=MySQLdb.Warning)
login_password = module.params["login_password"]
login_user = module.params["login_user"]
try:
cursor = mysql_connect(module, login_user, login_password, config_file, ssl_cert, ssl_key, ssl_ca, None, 'MySQLdb.cursors.DictCursor',
connect_timeout=connect_timeout)
except Exception:
e = get_exception()
if os.path.exists(config_file):
module.fail_json(msg="unable to connect to database, check login_user and login_password are correct or %s has the credentials. "
"Exception message: %s" % (config_file, e))
else:
module.fail_json(msg="unable to find %s. Exception message: %s" % (config_file, e))
if mode in "getmaster":
status = get_master_status(cursor)
if not isinstance(status, dict):
status = dict(Is_Master=False, msg="Server is not configured as mysql master")
else:
status['Is_Master'] = True
module.exit_json(**status)
elif mode in "getslave":
status = get_slave_status(cursor)
if not isinstance(status, dict):
status = dict(Is_Slave=False, msg="Server is not configured as mysql slave")
else:
status['Is_Slave'] = True
module.exit_json(**status)
elif mode in "changemaster":
chm = []
chm_params = {}
result = {}
if master_host:
chm.append("MASTER_HOST=%(master_host)s")
chm_params['master_host'] = master_host
if master_user:
chm.append("MASTER_USER=%(master_user)s")
chm_params['master_user'] = master_user
if master_password:
chm.append("MASTER_PASSWORD=%(master_password)s")
chm_params['master_password'] = master_password
if master_port is not None:
chm.append("MASTER_PORT=%(master_port)s")
chm_params['master_port'] = master_port
if master_connect_retry is not None:
chm.append("MASTER_CONNECT_RETRY=%(master_connect_retry)s")
chm_params['master_connect_retry'] = master_connect_retry
if master_log_file:
chm.append("MASTER_LOG_FILE=%(master_log_file)s")
chm_params['master_log_file'] = master_log_file
if master_log_pos is not None:
chm.append("MASTER_LOG_POS=%(master_log_pos)s")
chm_params['master_log_pos'] = master_log_pos
if relay_log_file:
chm.append("RELAY_LOG_FILE=%(relay_log_file)s")
chm_params['relay_log_file'] = relay_log_file
if relay_log_pos is not None:
chm.append("RELAY_LOG_POS=%(relay_log_pos)s")
chm_params['relay_log_pos'] = relay_log_pos
if master_ssl:
chm.append("MASTER_SSL=1")
if master_ssl_ca:
chm.append("MASTER_SSL_CA=%(master_ssl_ca)s")
chm_params['master_ssl_ca'] = master_ssl_ca
if master_ssl_capath:
chm.append("MASTER_SSL_CAPATH=%(master_ssl_capath)s")
chm_params['master_ssl_capath'] = master_ssl_capath
if master_ssl_cert:
chm.append("MASTER_SSL_CERT=%(master_ssl_cert)s")
chm_params['master_ssl_cert'] = master_ssl_cert
if master_ssl_key:
chm.append("MASTER_SSL_KEY=%(master_ssl_key)s")
chm_params['master_ssl_key'] = master_ssl_key
if master_ssl_cipher:
chm.append("MASTER_SSL_CIPHER=%(master_ssl_cipher)s")
chm_params['master_ssl_cipher'] = master_ssl_cipher
if master_auto_position:
chm.append("MASTER_AUTO_POSITION = 1")
try:
changemaster(cursor, chm, chm_params)
except MySQLdb.Warning:
e = get_exception()
result['warning'] = str(e)
except Exception:
e = get_exception()
module.fail_json(msg='%s. Query == CHANGE MASTER TO %s' % (e, chm))
result['changed'] = True
module.exit_json(**result)
elif mode in "startslave":
started = start_slave(cursor)
if started is True:
module.exit_json(msg="Slave started ", changed=True)
else:
module.exit_json(msg="Slave already started (Or cannot be started)", changed=False)
elif mode in "stopslave":
stopped = stop_slave(cursor)
if stopped is True:
module.exit_json(msg="Slave stopped", changed=True)
else:
module.exit_json(msg="Slave already stopped", changed=False)
elif mode in "resetslave":
reset = reset_slave(cursor)
if reset is True:
module.exit_json(msg="Slave reset", changed=True)
else:
module.exit_json(msg="Slave already reset", changed=False)
elif mode in "resetslaveall":
reset = reset_slave_all(cursor)
if reset is True:
module.exit_json(msg="Slave reset", changed=True)
else:
module.exit_json(msg="Slave already reset", changed=False)
if __name__ == '__main__':
main()
warnings.simplefilter("ignore")<|fim▁end|> | DOCUMENTATION = '''
---
module: mysql_replication
|
<|file_name|>TestImpGPSI_MNIST.py<|end_file_name|><|fim▁begin|>##################################################################
# Code for testing the variational Multi-Stage Generative Model. #
##################################################################
# basic python
import numpy as np
import numpy.random as npr
import cPickle
# theano business
import theano
import theano.tensor as T
# phil's sweetness
import utils
from NetLayers import relu_actfun, softplus_actfun, tanh_actfun
from InfNet import InfNet
from HydraNet import HydraNet
from GPSImputer import GPSImputer, load_gpsimputer_from_file
from load_data import load_udm, load_tfd, load_svhn_gray, load_binarized_mnist
from HelperFuncs import construct_masked_data, shift_and_scale_into_01, \
row_shuffle, to_fX
RESULT_PATH = "IMP_MNIST_GPSI/"
###############################
###############################
## TEST GPS IMPUTER ON MNIST ##
###############################
###############################
def test_mnist(step_type='add',
imp_steps=6,
occ_dim=15,
drop_prob=0.0):
#########################################
# Format the result tag more thoroughly #
#########################################
dp_int = int(100.0 * drop_prob)
result_tag = "{}RELU_GPSI_OD{}_DP{}_IS{}_{}_NA".format(RESULT_PATH, occ_dim, dp_int, imp_steps, step_type)
##########################
# Get some training data #
##########################
rng = np.random.RandomState(1234)
Xtr, Xva, Xte = load_binarized_mnist(data_path='./data/')
Xtr = np.vstack((Xtr, Xva))
Xva = Xte
#del Xte
tr_samples = Xtr.shape[0]
va_samples = Xva.shape[0]
<|fim▁hole|> # dataset = 'data/mnist.pkl.gz'
# datasets = load_udm(dataset, as_shared=False, zero_mean=False)
# Xtr = datasets[0][0]
# Xva = datasets[1][0]
# Xte = datasets[2][0]
# # Merge validation set and training set, and test on test set.
# #Xtr = np.concatenate((Xtr, Xva), axis=0)
# #Xva = Xte
# Xtr = to_fX(shift_and_scale_into_01(Xtr))
# Xva = to_fX(shift_and_scale_into_01(Xva))
# tr_samples = Xtr.shape[0]
# va_samples = Xva.shape[0]
batch_size = 200
batch_reps = 1
all_pix_mean = np.mean(np.mean(Xtr, axis=1))
data_mean = to_fX( all_pix_mean * np.ones((Xtr.shape[1],)) )
############################################################
# Setup some parameters for the Iterative Refinement Model #
############################################################
x_dim = Xtr.shape[1]
s_dim = x_dim
#s_dim = 300
z_dim = 100
init_scale = 0.6
x_in_sym = T.matrix('x_in_sym')
x_out_sym = T.matrix('x_out_sym')
x_mask_sym = T.matrix('x_mask_sym')
#################
# p_zi_given_xi #
#################
params = {}
shared_config = [(x_dim + x_dim), 500, 500]
top_config = [shared_config[-1], z_dim]
params['shared_config'] = shared_config
params['mu_config'] = top_config
params['sigma_config'] = top_config
params['activation'] = relu_actfun
params['init_scale'] = init_scale
params['vis_drop'] = 0.0
params['hid_drop'] = 0.0
params['bias_noise'] = 0.0
params['input_noise'] = 0.0
params['build_theano_funcs'] = False
p_zi_given_xi = InfNet(rng=rng, Xd=x_in_sym, \
params=params, shared_param_dicts=None)
p_zi_given_xi.init_biases(0.0)
###################
# p_sip1_given_zi #
###################
params = {}
shared_config = [z_dim, 500, 500]
output_config = [s_dim, s_dim, s_dim]
params['shared_config'] = shared_config
params['output_config'] = output_config
params['activation'] = relu_actfun
params['init_scale'] = init_scale
params['vis_drop'] = 0.0
params['hid_drop'] = 0.0
params['bias_noise'] = 0.0
params['input_noise'] = 0.0
params['build_theano_funcs'] = False
p_sip1_given_zi = HydraNet(rng=rng, Xd=x_in_sym, \
params=params, shared_param_dicts=None)
p_sip1_given_zi.init_biases(0.0)
################
# p_x_given_si #
################
params = {}
shared_config = [s_dim]
output_config = [x_dim, x_dim]
params['shared_config'] = shared_config
params['output_config'] = output_config
params['activation'] = relu_actfun
params['init_scale'] = init_scale
params['vis_drop'] = 0.0
params['hid_drop'] = 0.0
params['bias_noise'] = 0.0
params['input_noise'] = 0.0
params['build_theano_funcs'] = False
p_x_given_si = HydraNet(rng=rng, Xd=x_in_sym, \
params=params, shared_param_dicts=None)
p_x_given_si.init_biases(0.0)
#################
# q_zi_given_xi #
#################
params = {}
shared_config = [(x_dim + x_dim), 500, 500]
top_config = [shared_config[-1], z_dim]
params['shared_config'] = shared_config
params['mu_config'] = top_config
params['sigma_config'] = top_config
params['activation'] = relu_actfun
params['init_scale'] = init_scale
params['vis_drop'] = 0.0
params['hid_drop'] = 0.0
params['bias_noise'] = 0.0
params['input_noise'] = 0.0
params['build_theano_funcs'] = False
q_zi_given_xi = InfNet(rng=rng, Xd=x_in_sym, \
params=params, shared_param_dicts=None)
q_zi_given_xi.init_biases(0.0)
###########################################################
# Define parameters for the GPSImputer, and initialize it #
###########################################################
print("Building the GPSImputer...")
gpsi_params = {}
gpsi_params['x_dim'] = x_dim
gpsi_params['z_dim'] = z_dim
gpsi_params['s_dim'] = s_dim
# switch between direct construction and construction via p_x_given_si
gpsi_params['use_p_x_given_si'] = False
gpsi_params['imp_steps'] = imp_steps
gpsi_params['step_type'] = step_type
gpsi_params['x_type'] = 'bernoulli'
gpsi_params['obs_transform'] = 'sigmoid'
GPSI = GPSImputer(rng=rng,
x_in=x_in_sym, x_out=x_out_sym, x_mask=x_mask_sym, \
p_zi_given_xi=p_zi_given_xi, \
p_sip1_given_zi=p_sip1_given_zi, \
p_x_given_si=p_x_given_si, \
q_zi_given_xi=q_zi_given_xi, \
params=gpsi_params, \
shared_param_dicts=None)
################################################################
# Apply some updates, to check that they aren't totally broken #
################################################################
log_name = "{}_RESULTS.txt".format(result_tag)
out_file = open(log_name, 'wb')
costs = [0. for i in range(10)]
learn_rate = 0.0002
momentum = 0.5
batch_idx = np.arange(batch_size) + tr_samples
for i in range(250000):
scale = min(1.0, ((i+1) / 5000.0))
lam_scale = 1.0 - min(1.0, ((i+1) / 100000.0)) # decays from 1.0->0.0
if (((i + 1) % 15000) == 0):
learn_rate = learn_rate * 0.93
if (i > 10000):
momentum = 0.90
else:
momentum = 0.75
# get the indices of training samples for this batch update
batch_idx += batch_size
if (np.max(batch_idx) >= tr_samples):
# we finished an "epoch", so we rejumble the training set
Xtr = row_shuffle(Xtr)
batch_idx = np.arange(batch_size)
# set sgd and objective function hyperparams for this update
GPSI.set_sgd_params(lr=scale*learn_rate, \
mom_1=scale*momentum, mom_2=0.98)
GPSI.set_train_switch(1.0)
GPSI.set_lam_nll(lam_nll=1.0)
GPSI.set_lam_kld(lam_kld_p=0.05, lam_kld_q=0.95, lam_kld_g=(0.1 * lam_scale))
GPSI.set_lam_l2w(1e-5)
# perform a minibatch update and record the cost for this batch
xb = to_fX( Xtr.take(batch_idx, axis=0) )
xi, xo, xm = construct_masked_data(xb, drop_prob=drop_prob, \
occ_dim=occ_dim, data_mean=data_mean)
result = GPSI.train_joint(xi, xo, xm, batch_reps)
# do diagnostics and general training tracking
costs = [(costs[j] + result[j]) for j in range(len(result)-1)]
if ((i % 250) == 0):
costs = [(v / 250.0) for v in costs]
str1 = "-- batch {0:d} --".format(i)
str2 = " joint_cost: {0:.4f}".format(costs[0])
str3 = " nll_bound : {0:.4f}".format(costs[1])
str4 = " nll_cost : {0:.4f}".format(costs[2])
str5 = " kld_cost : {0:.4f}".format(costs[3])
str6 = " reg_cost : {0:.4f}".format(costs[4])
joint_str = "\n".join([str1, str2, str3, str4, str5, str6])
print(joint_str)
out_file.write(joint_str+"\n")
out_file.flush()
costs = [0.0 for v in costs]
if ((i % 1000) == 0):
Xva = row_shuffle(Xva)
# record an estimate of performance on the test set
xi, xo, xm = construct_masked_data(Xva[0:5000], drop_prob=drop_prob, \
occ_dim=occ_dim, data_mean=data_mean)
nll, kld = GPSI.compute_fe_terms(xi, xo, xm, sample_count=10)
vfe = np.mean(nll) + np.mean(kld)
str1 = " va_nll_bound : {}".format(vfe)
str2 = " va_nll_term : {}".format(np.mean(nll))
str3 = " va_kld_q2p : {}".format(np.mean(kld))
joint_str = "\n".join([str1, str2, str3])
print(joint_str)
out_file.write(joint_str+"\n")
out_file.flush()
if ((i % 2000) == 0):
GPSI.save_to_file("{}_PARAMS.pkl".format(result_tag))
# Get some validation samples for evaluating model performance
xb = to_fX( Xva[0:100] )
xi, xo, xm = construct_masked_data(xb, drop_prob=drop_prob, \
occ_dim=occ_dim, data_mean=data_mean)
xi = np.repeat(xi, 2, axis=0)
xo = np.repeat(xo, 2, axis=0)
xm = np.repeat(xm, 2, axis=0)
# draw some sample imputations from the model
samp_count = xi.shape[0]
_, model_samps = GPSI.sample_imputer(xi, xo, xm, use_guide_policy=False)
seq_len = len(model_samps)
seq_samps = np.zeros((seq_len*samp_count, model_samps[0].shape[1]))
idx = 0
for s1 in range(samp_count):
for s2 in range(seq_len):
seq_samps[idx] = model_samps[s2][s1]
idx += 1
file_name = "{0:s}_samples_ng_b{1:d}.png".format(result_tag, i)
utils.visualize_samples(seq_samps, file_name, num_rows=20)
# get visualizations of policy parameters
# file_name = "{0:s}_gen_step_weights_b{1:d}.png".format(result_tag, i)
# W = GPSI.gen_step_weights.get_value(borrow=False)
# utils.visualize_samples(W[:,:x_dim], file_name, num_rows=20)
# file_name = "{0:s}_gen_write_gate_weights_b{1:d}.png".format(result_tag, i)
# W = GPSI.gen_write_gate_weights.get_value(borrow=False)
# utils.visualize_samples(W[:,:x_dim], file_name, num_rows=20)
# file_name = "{0:s}_gen_erase_gate_weights_b{1:d}.png".format(result_tag, i)
# W = GPSI.gen_erase_gate_weights.get_value(borrow=False)
# utils.visualize_samples(W[:,:x_dim], file_name, num_rows=20)
# file_name = "{0:s}_gen_inf_weights_b{1:d}.png".format(result_tag, i)
# W = GPSI.gen_inf_weights.get_value(borrow=False).T
# utils.visualize_samples(W[:,:x_dim], file_name, num_rows=20)
#################################
#################################
## CHECK MNIST IMPUTER RESULTS ##
#################################
#################################
def test_mnist_results(step_type='add',
imp_steps=6,
occ_dim=15,
drop_prob=0.0):
#########################################
# Format the result tag more thoroughly #
#########################################
dp_int = int(100.0 * drop_prob)
result_tag = "{}GPSI_OD{}_DP{}_IS{}_{}_NA".format(RESULT_PATH, occ_dim, dp_int, imp_steps, step_type)
##########################
# Get some training data #
##########################
rng = np.random.RandomState(1234)
Xtr, Xva, Xte = load_binarized_mnist(data_path='./data/')
Xtr = np.vstack((Xtr, Xva))
Xva = Xte
#del Xte
tr_samples = Xtr.shape[0]
va_samples = Xva.shape[0]
##########################
# Get some training data #
##########################
# rng = np.random.RandomState(1234)
# dataset = 'data/mnist.pkl.gz'
# datasets = load_udm(dataset, as_shared=False, zero_mean=False)
# Xtr = datasets[0][0]
# Xva = datasets[1][0]
# Xte = datasets[2][0]
# # Merge validation set and training set, and test on test set.
# #Xtr = np.concatenate((Xtr, Xva), axis=0)
# #Xva = Xte
# Xtr = to_fX(shift_and_scale_into_01(Xtr))
# Xva = to_fX(shift_and_scale_into_01(Xva))
# tr_samples = Xtr.shape[0]
# va_samples = Xva.shape[0]
batch_size = 250
batch_reps = 1
all_pix_mean = np.mean(np.mean(Xtr, axis=1))
data_mean = to_fX( all_pix_mean * np.ones((Xtr.shape[1],)) )
# Load parameters from a previously trained model
print("Testing model load from file...")
GPSI = load_gpsimputer_from_file(f_name="{}_PARAMS.pkl".format(result_tag), \
rng=rng)
################################################################
# Apply some updates, to check that they aren't totally broken #
################################################################
log_name = "{}_FINAL_RESULTS_NEW.txt".format(result_tag)
out_file = open(log_name, 'wb')
Xva = row_shuffle(Xva)
# record an estimate of performance on the test set
str0 = "GUIDED SAMPLE BOUND:"
print(str0)
xi, xo, xm = construct_masked_data(Xva[:5000], drop_prob=drop_prob, \
occ_dim=occ_dim, data_mean=data_mean)
nll_0, kld_0 = GPSI.compute_fe_terms(xi, xo, xm, sample_count=10, \
use_guide_policy=True)
xi, xo, xm = construct_masked_data(Xva[5000:], drop_prob=drop_prob, \
occ_dim=occ_dim, data_mean=data_mean)
nll_1, kld_1 = GPSI.compute_fe_terms(xi, xo, xm, sample_count=10, \
use_guide_policy=True)
nll = np.concatenate((nll_0, nll_1))
kld = np.concatenate((kld_0, kld_1))
vfe = np.mean(nll) + np.mean(kld)
str1 = " va_nll_bound : {}".format(vfe)
str2 = " va_nll_term : {}".format(np.mean(nll))
str3 = " va_kld_q2p : {}".format(np.mean(kld))
joint_str = "\n".join([str0, str1, str2, str3])
print(joint_str)
out_file.write(joint_str+"\n")
out_file.flush()
# record an estimate of performance on the test set
str0 = "UNGUIDED SAMPLE BOUND:"
print(str0)
xi, xo, xm = construct_masked_data(Xva[:5000], drop_prob=drop_prob, \
occ_dim=occ_dim, data_mean=data_mean)
nll_0, kld_0 = GPSI.compute_fe_terms(xi, xo, xm, sample_count=10, \
use_guide_policy=False)
xi, xo, xm = construct_masked_data(Xva[5000:], drop_prob=drop_prob, \
occ_dim=occ_dim, data_mean=data_mean)
nll_1, kld_1 = GPSI.compute_fe_terms(xi, xo, xm, sample_count=10, \
use_guide_policy=False)
nll = np.concatenate((nll_0, nll_1))
kld = np.concatenate((kld_0, kld_1))
str1 = " va_nll_bound : {}".format(np.mean(nll))
str2 = " va_nll_term : {}".format(np.mean(nll))
str3 = " va_kld_q2p : {}".format(np.mean(kld))
joint_str = "\n".join([str0, str1, str2, str3])
print(joint_str)
out_file.write(joint_str+"\n")
out_file.flush()
if __name__=="__main__":
#########
# MNIST #
#########
# TRAINING
#test_mnist(step_type='add', occ_dim=14, drop_prob=0.0)
#test_mnist(step_type='add', occ_dim=16, drop_prob=0.0)
#test_mnist(step_type='add', occ_dim=0, drop_prob=0.6)
#test_mnist(step_type='add', occ_dim=0, drop_prob=0.8)
#test_mnist(step_type='jump', occ_dim=14, drop_prob=0.0)
#test_mnist(step_type='jump', occ_dim=16, drop_prob=0.0)
#test_mnist(step_type='jump', occ_dim=0, drop_prob=0.6)
#test_mnist(step_type='jump', occ_dim=0, drop_prob=0.8)
#test_mnist(step_type='add', imp_steps=1, occ_dim=0, drop_prob=0.9)
#test_mnist(step_type='add', imp_steps=2, occ_dim=0, drop_prob=0.9)
test_mnist(step_type='add', imp_steps=5, occ_dim=0, drop_prob=0.9)
#test_mnist(step_type='add', imp_steps=10, occ_dim=0, drop_prob=0.9)
#test_mnist(step_type='add', imp_steps=15, occ_dim=0, drop_prob=0.9)
# RESULTS
# test_mnist_results(step_type='add', occ_dim=14, drop_prob=0.0)
# test_mnist_results(step_type='add', occ_dim=16, drop_prob=0.0)
# test_mnist_results(step_type='add', occ_dim=0, drop_prob=0.6)
# test_mnist_results(step_type='add', occ_dim=0, drop_prob=0.7)
# test_mnist_results(step_type='add', occ_dim=0, drop_prob=0.8)
# test_mnist_results(step_type='add', occ_dim=0, drop_prob=0.9)
# test_mnist_results(step_type='jump', occ_dim=14, drop_prob=0.0)
# test_mnist_results(step_type='jump', occ_dim=16, drop_prob=0.0)
# test_mnist_results(step_type='jump', occ_dim=0, drop_prob=0.6)
# test_mnist_results(step_type='jump', occ_dim=0, drop_prob=0.7)
# test_mnist_results(step_type='jump', occ_dim=0, drop_prob=0.8)
# test_mnist_results(step_type='jump', occ_dim=0, drop_prob=0.9)
#test_mnist_results(step_type='add', imp_steps=1, occ_dim=0, drop_prob=0.9)
#test_mnist_results(step_type='add', imp_steps=2, occ_dim=0, drop_prob=0.9)
test_mnist_results(step_type='add', imp_steps=5, occ_dim=0, drop_prob=0.9)
#test_mnist_results(step_type='add', imp_steps=10, occ_dim=0, drop_prob=0.9)
#test_mnist_results(step_type='add', imp_steps=15, occ_dim=0, drop_prob=0.9)<|fim▁end|> | ##########################
# Get some training data #
##########################
# rng = np.random.RandomState(1234) |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>// This file is generated automatically by `scripts/build/fp.js`. Please, don't change it.
import fn from '../../isWeekend/index'<|fim▁hole|>export default convertToFP(fn, 1)<|fim▁end|> | import convertToFP from '../_lib/convertToFP/index'
|
<|file_name|>all.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | //= require mes/modeler |
<|file_name|>Tools.Type.Spec.ts<|end_file_name|><|fim▁begin|>/// <reference path="../typings/tsd.d.ts" />
import Forge = require('forge-di');
import Smithy = require('../src/index');
class CustomType {};
describe('Smithy.Tools.Type', () => {
beforeEach(() => {<|fim▁hole|> var tool = new Smithy.Tools.Type({name: 'name', target: CustomType});
expect(tool.targetType).toEqual(Smithy.TargetType.Type);
});
it('should throw exception when target is not a function', () => {
expect(() => {
var tool = new Smithy.Tools.Type({name: 'name', target: <any>{}});
}).toThrow("'target' is required and must be a function");
});
});<|fim▁end|> | });
it('should set targetType property to Smithy.TargetType.Type on new instance', () => { |
<|file_name|>Lab2InputOutput.java<|end_file_name|><|fim▁begin|>// John Meyer
// CSE 271 F
// Dr. Angel Bravo
import java.util.Scanner;
import java.io.*;
/**
* Copies a file with line numbers prefixed to every line
*/
public class Lab2InputOutput {
public static void main(String[] args) throws Exception {
// Define variables
Scanner keyboardReader = new Scanner(System.in);
String inputFileName;
String outputFileName;
// Check arguments
if (args.length == 0) {
System.out.println("Usage: java Lab2InputOutput /path/to/file");
return;
}
inputFileName = args[0];
// Find input file
File inputFile = new File(inputFileName);
Scanner fileInput = new Scanner(inputFile);
// Get output file name
System.out.print("Output File Name: ");
outputFileName = keyboardReader.next();
File outputFile = new File(outputFileName);<|fim▁hole|> PrintWriter fileOutput = new PrintWriter(outputFile);
String lineContent;
for (int lineNumber = 1; fileInput.hasNext(); lineNumber++) {
lineContent = fileInput.nextLine();
fileOutput.printf("/* %d */ %s%n", lineNumber, lineContent);
}
fileInput.close();
fileOutput.close();
} // end method main
} // end class Lab2InputOutput<|fim▁end|> |
// Start copying |
<|file_name|>usage.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Usage(Model):
"""Describes Storage Resource Usage.
:param unit: The unit of measurement. Possible values include: 'Count',
'Bytes', 'Seconds', 'Percent', 'CountsPerSecond', 'BytesPerSecond'
:type unit: str or :class:`UsageUnit
<azure.mgmt.storage.v2015_06_15.models.UsageUnit>`
:param current_value: The current count of the allocated resources in the
subscription.
:type current_value: int
:param limit: The maximum count of the resources that can be allocated in
the subscription.
:type limit: int
:param name: The name of the type of usage.
:type name: :class:`UsageName
<azure.mgmt.storage.v2015_06_15.models.UsageName>`
"""
_validation = {
'unit': {'required': True},
'current_value': {'required': True},
'limit': {'required': True},
'name': {'required': True},<|fim▁hole|> }
_attribute_map = {
'unit': {'key': 'unit', 'type': 'UsageUnit'},
'current_value': {'key': 'currentValue', 'type': 'int'},
'limit': {'key': 'limit', 'type': 'int'},
'name': {'key': 'name', 'type': 'UsageName'},
}
def __init__(self, unit, current_value, limit, name):
self.unit = unit
self.current_value = current_value
self.limit = limit
self.name = name<|fim▁end|> | |
<|file_name|>EntityToAnalyticalServiceMapper.java<|end_file_name|><|fim▁begin|>/*L
* Copyright Georgetown University, Washington University.
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/cab2b/LICENSE.txt for details.
*/
package edu.wustl.cab2b.server.analyticalservice;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.dom4j.Document;
import org.dom4j.DocumentException;
import org.dom4j.Element;
import org.dom4j.io.SAXReader;
import edu.common.dynamicextensions.domaininterface.EntityInterface;
import edu.wustl.cab2b.common.analyticalservice.ServiceDetailsInterface;
import edu.wustl.cab2b.common.exception.RuntimeException;
/**
* This is a singleton class which parses the EntityToAnalyticalServiceMapping.xml file and stores the mapping information into an internal map.
* This class provieds the methods to get the service interface and the service invoker interface.
* @author chetan_patil
*/
public class EntityToAnalyticalServiceMapper {
/**
* Self reference
*/
private static EntityToAnalyticalServiceMapper entityServiceMapper = null;
/**
* Map to store the entity to service mapping
*/
private Map<String, List<String>> entityServiceNameMap = new HashMap<String, List<String>>();
/**
* Map to store the service to method mapping
*/
private Map<String, List<String>> serviceNameDetailClassNameMap = new HashMap<String, List<String>>();
/**
* Map to store the service detail class and the corresponding service invoker class.
*/
private Map<String, String> serviceDetailInvokerMap = new HashMap<String, String>();
/**
* Name of the Entity Service Mapper file
*/
private static final String ENTITY_SERVICE_MAPPER_FILENAME = "EntityToAnalyticalServiceMapping.xml";
/**
* Entity tag
*/
private static final String ENTITY = "entity";
/**
* Service tag
*/
private static final String SERVICE = "service";
/**
* Method tag
*/
private static final String METHOD = "method";
/**
* Name attribute
*/
private static final String ATTRIBUTE_NAME = "name";
/**
* Service Detail Class attribute
*/
private static final String ATTRIBUTE_SERVICE_DETAIL_CLASS = "serviceDetailClass";
/**
* Service Invoker Class attribute
*/
private static final String ATTRIBUTE_SERVICE_INVOKER_CLASS = "serviceInvokerClass";
/**
* Service name attribute
*/
private static final String ATTRIBUTE_SERVICE_NAME = "serviceName";
/**
* Private constructor
*/
private EntityToAnalyticalServiceMapper() {
parseEntityServiceMapperXMLFile();
}
/**
* This method returns an instance of this class
* @return an instance of this class
*/
public static synchronized EntityToAnalyticalServiceMapper getInstance() {
if (entityServiceMapper == null) {
entityServiceMapper = new EntityToAnalyticalServiceMapper();
}
return entityServiceMapper;
}
/**
* This method parses the EntityServiceMapper.XML file and stores the parsed data into an internally maintained Maps.
*/
private void parseEntityServiceMapperXMLFile() {
//Read the xml file
InputStream inputStream = this.getClass().getClassLoader().getResourceAsStream(
ENTITY_SERVICE_MAPPER_FILENAME);
if (inputStream == null) {
throw new RuntimeException("File not found: " + ENTITY_SERVICE_MAPPER_FILENAME);
}
//Parse xml into the Document
Document document = null;
try {
document = new SAXReader().read(inputStream);
} catch (DocumentException e) {
throw new RuntimeException("Unable to parse XML file: " + ENTITY_SERVICE_MAPPER_FILENAME, e);
}
//Traverse and fetch the data from the Document
Element entityServiceMapperElement = document.getRootElement();
if (entityServiceMapperElement != null) {
List<Element> serviceElementList = entityServiceMapperElement.elements(SERVICE);
if (serviceElementList == null || serviceElementList.isEmpty()) {
throw new RuntimeException("Invalid XML file: Service entries not found.");
}
registerServiceElements(serviceElementList);
List<Element> entityElementList = entityServiceMapperElement.elements(ENTITY);
if (entityElementList == null || entityElementList.isEmpty()) {
throw new RuntimeException("Invalid XML file: Entity entries not found.");
}
registerEntityElements(entityElementList);
} else {
throw new RuntimeException("Invalid XML file: Root element not found.");
}
}
/**
* This method stores the data of all the service tags into serviceMethodMap
* @param serviceElementList the root element of the XML document
*/
private void registerServiceElements(List<Element> serviceElementList) {
for (Element serviceElement : serviceElementList) {
List<String> serviceDetailClassList = new ArrayList<String>();
List<Element> methodElementList = serviceElement.elements(METHOD);
for (Element methodElement : methodElementList) {
String serviceDetailClassName = methodElement.attributeValue(ATTRIBUTE_SERVICE_DETAIL_CLASS);
String serviceInvokerClassName = methodElement.attributeValue(ATTRIBUTE_SERVICE_INVOKER_CLASS);
if (!serviceDetailClassList.contains(serviceDetailClassName)) {
serviceDetailClassList.add(serviceDetailClassName);
}
if (serviceDetailInvokerMap.get(serviceDetailClassName) == null) {
serviceDetailInvokerMap.put(serviceDetailClassName, serviceInvokerClassName);
}
}
String serviceName = serviceElement.attributeValue(ATTRIBUTE_NAME);
if (serviceNameDetailClassNameMap.get(serviceName) == null) {
serviceNameDetailClassNameMap.put(serviceName, serviceDetailClassList);
}
}
}
/**
* This method stores the data of all the entity tags into entityServiceMap
* @param entityElementList the root element of the XML document
*/
private void registerEntityElements(List<Element> entityElementList) {
for (Element entityElement : entityElementList) {
String entityName = entityElement.attributeValue(ATTRIBUTE_NAME);
String serviceName = entityElement.attributeValue(ATTRIBUTE_SERVICE_NAME);
List<String> serviceList = entityServiceNameMap.get(entityName);
if (serviceList == null) {
serviceList = new ArrayList<String>();
entityServiceNameMap.put(entityName, serviceList);
}
serviceList.add(serviceName);
}
}
/**
* This method returns the instance given the respective class name.
* @param className name of the class
* @return an instance of the given class name
*/
private <E> E getInstance(String className, Class<E> clazz) {
Object instance = null;
try {
Class classDefinition = Class.forName(className);
instance = classDefinition.newInstance();
} catch (InstantiationException e) {
} catch (IllegalAccessException e) {
} catch (ClassNotFoundException e) {
}
Class<?> instanceClass = instance.getClass();
if (!clazz.isAssignableFrom(instanceClass)) {
throw new RuntimeException(instanceClass.getName() + " does not implement the interface "
+ clazz.getName());
}
return (E) instance;
}
/**
* This method returns the List of the service names of the respective givne entity name.
* @param entityName the name of the entity
* @return the List of the service names
*/
private List<String> getServiceDetailClassNames(String entityName) {
List<String> serviceDetailClassList = new ArrayList<String>();
List<String> serviceNameList = entityServiceNameMap.get(entityName);
if (serviceNameList != null) {
for (String serviceName : serviceNameList) {
List<String> serviceDetailClassNameList = serviceNameDetailClassNameMap.get(serviceName);
if (serviceDetailClassNameList != null) {
<|fim▁hole|> }
}
}
return serviceDetailClassList;
}
/*
* (non-Javadoc)
* @see java.lang.Object#clone()
*/
/**
* Clones this object
* @return Cloned object
* @throws CloneNotSupportedException
*/
public Object clone() throws CloneNotSupportedException {
throw new CloneNotSupportedException();
}
/**
* This method returns the List of all the corresponding ServiceDetailsClass given the Entity.
* @param entity an instance of Entity
* @return List of all the ServiceDetailClass corresponding to the given Entity
*/
public List<ServiceDetailsInterface> getServices(EntityInterface entity) {
List<ServiceDetailsInterface> serviceDetailsInstanceList = new ArrayList<ServiceDetailsInterface>();
List<String> serviceDetailClassList = getServiceDetailClassNames(entity.getName());
//TODO This is a hack. To be deleted after testing.
//List<String> serviceDetailClassList = getServiceDetailClassNames("gov.nih.nci.mageom.domain.BioAssay.BioAssay");
for (String serviceDetailClassName : serviceDetailClassList) {
ServiceDetailsInterface serviceDetails = getInstance(serviceDetailClassName,
ServiceDetailsInterface.class);
serviceDetailsInstanceList.add(serviceDetails);
}
return serviceDetailsInstanceList;
}
/**
* This method returns an instance of the Service Invoker Class given the respective Service Details Class.
* @param serviceDetails the instance of the Service Details class
* @return an instance of the Service Invoker Class
*/
public ServiceInvokerInterface getServiceInvoker(ServiceDetailsInterface serviceDetails) {
String serviceDetailClassName = serviceDetails.getClass().getName();
String serviceInvokerClassName = serviceDetailInvokerMap.get(serviceDetailClassName);
ServiceInvokerInterface serviceInvoker = null;
if (serviceInvokerClassName != null) {
serviceInvoker = getInstance(serviceInvokerClassName, ServiceInvokerInterface.class);
}
return serviceInvoker;
}
// public static void main(String[] args) {
// EntityInterface entity = new Entity();
// entity.setName("Entity1");
//
// EntityToAnalyticalServiceMapper entityServiceMapper = EntityToAnalyticalServiceMapper.getInstance();
// List<ServiceDetailsInterface> serviceList = entityServiceMapper.getServices(entity);
// ServiceInvokerInterface serviceInvoker1 = entityServiceMapper.getServiceInvoker(serviceList.get(0));
// ServiceInvokerInterface serviceInvoker2 = entityServiceMapper.getServiceInvoker(serviceList.get(1));
// }
}<|fim▁end|> | serviceDetailClassList.addAll(serviceDetailClassNameList);
|
<|file_name|>slot.py<|end_file_name|><|fim▁begin|># This file is part of PARPG.
# PARPG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# PARPG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with PARPG. If not, see <http://www.gnu.org/licenses/>.
from fife.extensions.pychan import Icon
class Slot(Icon):
<|fim▁hole|>
def _getImage(self):
return self._image
image = property(_getImage, _setImage)<|fim▁end|> | def _setImage(self, source):
self._image = source |
<|file_name|>assertType.js<|end_file_name|><|fim▁begin|>var Accumulator, errorTypes, isConstructor, isType, isValidator, throwFailedValidator, throwFailure, throwInvalidType;
throwFailure = require("failure").throwFailure;
Accumulator = require("accumulator");
isConstructor = require("./isConstructor");
isValidator = require("./isValidator");
errorTypes = require("../errorTypes");
isType = require("./isType");
module.exports = function(value, type, key) {
var relevantData, result;
if (isConstructor(key, Object)) {
relevantData = key;
key = relevantData.key;
} else {
relevantData = {
key: key
};
}
if (!type) {
throwFailure(Error("Must provide a 'type'!"), {
value: value,
type: type,
key: key,
relevantData: relevantData
});
}
if (isValidator(type)) {
result = type.validate(value, key);
if (result !== true) {
throwFailedValidator(type, result, relevantData);
}
} else if (!isType(value, type)) {
throwInvalidType(type, value, relevantData);
}
};
throwFailedValidator = function(type, result, relevantData) {
var accumulated;
accumulated = Accumulator();
accumulated.push(result);
if (relevantData) {
accumulated.push(relevantData);
}
return type.fail(accumulated.flatten());
};
throwInvalidType = function(type, value, relevantData) {
var accumulated, error;
accumulated = Accumulator();
accumulated.push({
type: type,
value: value
});
if (relevantData) {
accumulated.push(relevantData);
}
error = errorTypes.invalidType(type, relevantData.key);
return throwFailure(error, accumulated.flatten());<|fim▁hole|>//# sourceMappingURL=../../../map/src/core/assertType.map<|fim▁end|> | };
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from utils import (FeedRecordUpdater, FeedRecordCleaner)<|fim▁end|> | from readers import (autodiscover, FeedReader, TimestampedModelFeedReader,
RSSFeedReader)
from library import (FeedLibrary)
from dispatch import (SubscriptionDispatcher, SubscriptionEmailer) |
<|file_name|>UTC2GPS.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on 2/3/2015
@author: Antonio Hermosilla Rodrigo.
@contact: [email protected]
@organization: Antonio Hermosilla Rodrigo.
@copyright: (C) 2015 by Antonio Hermosilla Rodrigo
@version: 1.0.0
'''
def UTC2GPS(fecha):
'''
@brief: Método para convertir un objeto de la clase datetime a tiempo GPS
@param fecha datetime: Objeto de la clase datetine con la fecha a transformar en tiempo GPS.
'''
#doy=fecha.strftime('%j')
name=fecha.strftime('%A')
if name=="Sunday" or name=="Domingo":
return 0+int(fecha.strftime('%H'))*3600+int(fecha.strftime('%M'))*60+int(fecha.strftime('%S'))
if name=="Monday" or name=="Lunes":
return 86400+int(fecha.strftime('%H'))*3600+int(fecha.strftime('%M'))*60+int(fecha.strftime('%S'))
if name=="Tuesday" or name=="Martes":
return 172800+int(fecha.strftime('%H'))*3600+int(fecha.strftime('%M'))*60+int(fecha.strftime('%S'))<|fim▁hole|> if name=="Wednesday" or name=="Miércoles":
return 259200+int(fecha.strftime('%H'))*3600+int(fecha.strftime('%M'))*60+int(fecha.strftime('%S'))
if name=="Thursday" or name=="Jueves":
return 345600+int(fecha.strftime('%H'))*3600+int(fecha.strftime('%M'))*60+int(fecha.strftime('%S'))
if name=="Friday" or name=="Viernes":
return 432000+int(fecha.strftime('%H'))*3600+int(fecha.strftime('%M'))*60+int(fecha.strftime('%S'))
if name=="Saturday" or name=="Sábado":
return 518400+int(fecha.strftime('%H'))*3600+int(fecha.strftime('%M'))*60+int(fecha.strftime('%S'))
def main():
from datetime import datetime
print(UTC2GPS(datetime(2014,10,28,17,0,0)))
if __name__=="__main__":
main()<|fim▁end|> | |
<|file_name|>ConfWriter.py<|end_file_name|><|fim▁begin|>from itertools import chain
from pyprint.ClosableObject import ClosableObject
from coalib.parsing.StringProcessing import escape
from coalib.settings.Section import Section
class ConfWriter(ClosableObject):
def __init__(self,
file_name,
key_value_delimiters=('=',),
comment_seperators=('#',),
key_delimiters=(',', ' '),
section_name_surroundings=None,
section_override_delimiters=(".",),
unsavable_keys=("save",)):
section_name_surroundings = section_name_surroundings or {"[": "]"}
ClosableObject.__init__(self)
self.__file_name = file_name
self.__file = open(self.__file_name, "w")
self.__key_value_delimiters = key_value_delimiters
self.__comment_seperators = comment_seperators
self.__key_delimiters = key_delimiters
self.__section_name_surroundings = section_name_surroundings
self.__section_override_delimiters = section_override_delimiters
self.__unsavable_keys = unsavable_keys
self.__wrote_newline = True
self.__closed = False
self.__key_delimiter = self.__key_delimiters[0]
self.__key_value_delimiter = self.__key_value_delimiters[0]
(self.__section_name_surrounding_beg,
self.__section_name_surrounding_end) = (
tuple(self.__section_name_surroundings.items())[0])
def _close(self):
self.__file.close()
def write_sections(self, sections):
assert not self.__closed
self.__wrote_newline = True
for section in sections:
self.write_section(sections[section])
def write_section(self, section):
assert not self.__closed
if not isinstance(section, Section):
raise TypeError
<|fim▁hole|> keys = []
val = None
section_iter = section.__iter__(ignore_defaults=True)
try:
while True:
setting = section[next(section_iter)]
if (str(setting) == val and
not self.is_comment(setting.key) and
(
(setting.key not in self.__unsavable_keys) or
(not setting.from_cli))):
keys.append(setting.key)
elif ((setting.key not in self.__unsavable_keys) or
(not setting.from_cli)):
self.__write_key_val(keys, val)
keys = [setting.key]
val = str(setting)
except StopIteration:
self.__write_key_val(keys, val)
def __write_section_name(self, name):
assert not self.__closed
if not self.__wrote_newline:
self.__file.write("\n")
self.__file.write(self.__section_name_surrounding_beg + name +
self.__section_name_surrounding_end + '\n')
self.__wrote_newline = False
def __write_key_val(self, keys, val):
assert not self.__closed
if keys == []:
return
if all(self.is_comment(key) for key in keys):
self.__file.write(val + "\n")
self.__wrote_newline = val == ""
return
# Add escape characters as appropriate
keys = [escape(key, chain(['\\'],
self.__key_value_delimiters,
self.__comment_seperators,
self.__key_delimiters,
self.__section_override_delimiters))
for key in keys]
val = escape(val, chain(['\\'], self.__comment_seperators))
self.__file.write((self.__key_delimiter + " ").join(keys) + " " +
self.__key_value_delimiter + " " + val + "\n")
self.__wrote_newline = False
@staticmethod
def is_comment(key):
return key.lower().startswith("comment")<|fim▁end|> | self.__write_section_name(section.name)
|
<|file_name|>model.py<|end_file_name|><|fim▁begin|>import codecs
import os
import re
import json
from . import WIKI_DIR
from collections import defaultdict
def _get_filename(slug):
return os.path.join(WIKI_DIR, '%s.md' % (slug,))
class Index(object):
def __init__(self):
self.texts, self.words = {}, set()
self.finvindex = defaultdict(set)
def update_index(self, doc_id, words):
for w in words:
self.finvindex[w].add((doc_id, self.texts[doc_id].index(w)))
def put(self, doc_id, content):
self.remove(doc_id)
txt = filter(None, map(lambda x: re.sub('[^a-z0-9]', '', x.lower()), filter(lambda w: len(w) > 3, content.split())))
self.texts[doc_id] = txt
self.update_index(doc_id, set(txt))
def remove(self, doc_id):
for k, v in self.finvindex.items():
to_delete = []
for w in v:
if w[0] == doc_id:
to_delete.append(w)
for t in to_delete:
v.remove(t)
def term_search(self, terms):
if not set(terms).issubset(set(self.finvindex.keys())):
return set()
return reduce(set.intersection,
(set(x[0] for x in txtindx)
for term, txtindx in self.finvindex.items()
if term in terms),
set(self.texts.keys()))
def search(self, phrase):
import difflib
wordsinphrase = phrase.strip().split()
tmp = []
for w in wordsinphrase:
r = difflib.get_close_matches(w, self.finvindex.keys(), cutoff=0.8)
if r:
tmp.append(r[0])
else:<|fim▁hole|>
if not set(wordsinphrase).issubset(set(self.finvindex.keys())):
return set()
if len(wordsinphrase) < 2:
firstword, otherwords = wordsinphrase[0], wordsinphrase[1:]
else:
firstword, otherwords = wordsinphrase[0], []
found = []
for txt in self.term_search(wordsinphrase):
for firstindx in (indx for t,indx in self.finvindex[firstword] if t == txt):
if all((txt, firstindx+1 + otherindx) in self.finvindex[otherword]
for otherindx, otherword in enumerate(otherwords)):
found.append(txt)
return found
class Post(object):
def __init__(self, title, body, created=None, modified=None, tags=None, **kwargs):
self.title = str(title).strip()
self.body = str(body.strip()) if body else None
self.slug = str(Post.build_slug(self.title))
self.tags = filter(None, tags.split(',') if isinstance(tags, basestring) else tags if tags else [])
self.created = str(created) if created else None
self.modified = str(modified) if modified else None
def __cmp__(self, other):
if not other:
return -1
return (int(self.created > other.created) or -1) if self.created != other.created else 0
def serialize(self):
buf = ['<!---']
for k, v in self.__dict__.items():
if k not in ['body', 'slug', 'tags']:
buf.append('='.join((str(k), str(v))))
elif k == 'tags':
buf.append('%s=%s' % (k, ','.join(self.tags)))
buf.append('--->')
buf.append(self.body)
return '\n'.join(buf)
@staticmethod
def build_slug(title):
return re.sub(r'[\.!,;/\?#\ ]+', '-', title).strip().lower()
@staticmethod
def build(data, title=None):
tmp = {}
body = []
header = False
for line in data.split('\n'):
if line == '<!---':
header = True
elif line == '--->':
header = False
elif header:
(k, v) = [v.strip() for v in line.split('=')]
tmp[k] = v
body.append(line)
tmp['body'] = '\n'.join(body)
if not tmp.get('title'):
tmp['title'] = ' '.join(title.replace('.md', '').split('-'))
return Post(**tmp)
class PostProxy(object):
def __init__(self, slug):
self.slug = slug
self.post = None
def __getattr__(self, name):
if not self.post:
with codecs.open(_get_filename(self.slug), 'r', 'utf8') as f:
self.post = Post.build(f.read())
if name == 'body' and not getattr(self.post, 'body', None):
with codecs.open(os.path.join(WIKI_DIR, '%s.md' % (self.slug,)), 'r', 'utf8') as f:
self.post.body = f.read()
return getattr(self.post, name)
class Wiki(object):
def add_post(self, post):
self._save_post(post)
def del_post(self, post):
os.remove(_get_filename(post.slug))
def get_post(self, slug):
if os.path.exists(_get_filename(slug)):
with codecs.open(_get_filename(slug), 'r', 'utf8') as f:
return Post.build(f.read())
def find_all(self):
return [PostProxy(f.replace('.md', '')) for f in os.listdir(WIKI_DIR)]
def _save_post(self, post):
with codecs.open(_get_filename(post.slug), 'w', 'utf8') as f:
tmp = post.__dict__.items()
body = tmp.pop('body', '')
f.write('<!---\n%s\n--->\n' % '\n'.join(['%s = %s' % (k, v) for k,v in tmp.items()]))
f.write(post.body)<|fim▁end|> | tmp.append(w)
wordsinphrase = tmp |
<|file_name|>qt_gui.py<|end_file_name|><|fim▁begin|># Irish Dictionary GUI app
# saved as qt_gui.py
# Last edit by Davis Sandefur 15.07.2015
import sys
import os
from PyQt5 import QtCore, QtWidgets, QtGui, QtMultimedia
from PyQt5 import QtNetwork
from irish_dictionary import irish_dictionary, gaeilge_gaeilge
from audio_grabber import entry_search, related_matches
class Text(QtWidgets.QWidget):
""" This class creates the text widget"""
def __init__(self, parent=None):
super().__init__(parent)
self.text_entry = QtWidgets.QTextEdit(parent)
self.text_entry.setReadOnly(True)
class IrishLabel(QtWidgets.QWidget):
def __init__(self, parent=None):
""" This class creates the Irish language label, entry box, and version switcher """
super().__init__(parent)
self.irish_label = QtWidgets.QLabel("Cuir d'fhocal anseo:")
self.irish_entry = QtWidgets.QLineEdit()
self.english_language_button = QtWidgets.QPushButton("English Version")
self.english_language_button.clicked.connect(lambda: self.irish_to_english())
@staticmethod
def irish_to_english():
""" This method converts the Irish language version to English """
irish_version.hide()
english_version.show()
irish_version.layout().removeWidget(irish_version.text_entry)
english_version.layout().addWidget(english_version.text_entry, 3, 0, 24, 8)
english_version.resize(200, 400)
english_version.center()
class IrishButtons(IrishLabel):
""" this class creates the Irish language buttons"""
def __init__(self, parent=None):
super().__init__(parent)
# Set buttons and enabled status
self.bearla_button = QtWidgets.QPushButton("Béarla")
self.gaeilge_button = QtWidgets.QPushButton("Gaeilge")
self.connacht_button = QtWidgets.QPushButton("Cúige Chonnacht")
self.ulster_button = QtWidgets.QPushButton("Cúige Uladh")
self.munster_button = QtWidgets.QPushButton("Cúige Mhumhan")
self.connacht_button.setEnabled(False)
self.ulster_button.setEnabled(False)
self.munster_button.setEnabled(False)
# Set callbacks
self.bearla_button.clicked.connect(lambda: self.audio_check('English'))
self.gaeilge_button.clicked.connect(lambda: self.audio_check('Irish'))
self.munster_button.clicked.connect(lambda: self.play_audio('Munster'))
self.connacht_button.clicked.connect(lambda: self.play_audio('Connacht'))
self.ulster_button.clicked.connect(lambda: self.play_audio('Ulster'))
def audio_check(self, language):
audio = self.callback(language)
if audio:
self.ulster_button.setEnabled(True)
self.connacht_button.setEnabled(True)
self.munster_button.setEnabled(True)
if not audio:
self.ulster_button.setEnabled(False)
self.connacht_button.setEnabled(False)
self.munster_button.setEnabled(False)
def callback(self, language):
""" Irish version search """
entry = str(self.irish_entry.text()).lower()
entries, suggestions, wordlist, grammatical = irish_dictionary(entry, language, 'gaeilge')
entries2 = None
if language == 'Irish':
entries2 = gaeilge_gaeilge(entry)
audio_exists = entry_search(entry)
if audio_exists:
related = related_matches(entry)
else:
related = 'Níl aon rud ann'
if grammatical is not None:
self.text_entry.moveCursor(QtGui.QTextCursor.End)
self.text_entry.insertPlainText(grammatical + '\n\n')
for i in entries:
self.text_entry.moveCursor(QtGui.QTextCursor.End)
self.text_entry.insertPlainText(i + '\n\n')
self.text_entry.moveCursor(QtGui.QTextCursor.End)
if entries2:
self.text_entry.insertPlainText("As Gaeilge:\n\n")
for i in entries2:
self.text_entry.moveCursor(QtGui.QTextCursor.End)
self.text_entry.insertPlainText(i + '\n\n')
self.text_entry.insertPlainText(suggestions + "\n\nNa focail is déanaí: " + str(wordlist) +
"\n\n" + '(Fuaim) Torthaí gaolmhara:' + str(related) + '\n\n')
self.text_entry.moveCursor(QtGui.QTextCursor.End)
return audio_exists
@staticmethod
def play_audio(dialect):
appdata = os.getenv('APPDATA')
file_names = {'Munster': 'CanM.mp3', 'Connacht': 'CanC.mp3', 'Ulster': 'CanU.mp3'}
if appdata:
url = QtCore.QUrl.fromLocalFile(os.path.abspath(os.path.join(appdata, file_names[dialect])))
else:
url = QtCore.QUrl.fromLocalFile(os.path.abspath(os.path.join("./", file_names[dialect])))
content = QtMultimedia.QMediaContent(url)
player = QtMultimedia.QMediaPlayer()
player.setMedia(content)
player.play()
player.stateChanged.connect(lambda: player.disconnect())
class IrishVersion(IrishButtons, Text):
""" This class brings together all the Irish version widgets and
lays them out in the correct order. Also controls window title and maximize button
"""
def __init__(self, parent=None):
super().__init__(parent)
grid = QtWidgets.QGridLayout()
grid.setSpacing(5)
grid.addWidget(self.irish_label, 0, 0)
grid.addWidget(self.irish_entry, 0, 1, 1, 4)
grid.addWidget(self.english_language_button, 0, 6)
grid.addWidget(self.bearla_button, 1, 2)
grid.addWidget(self.gaeilge_button, 1, 4)
grid.addWidget(self.ulster_button, 2, 2)
grid.addWidget(self.connacht_button, 2, 3)
grid.addWidget(self.munster_button, 2, 4)<|fim▁hole|>
self.setWindowTitle("Foclóir")
self.resize(200, 400)
def center(self):
qr = self.frameGeometry()
cp = QtWidgets.QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
class EnglishLabel(QtWidgets.QWidget):
""" This class Creates English labels"""
def __init__(self, parent=None):
super().__init__(parent)
self.english_label = QtWidgets.QLabel("Enter your word here:")
self.english_entry = QtWidgets.QLineEdit()
self.irish_language_button = QtWidgets.QPushButton("Leagan Gaeilge")
self.irish_language_button.clicked.connect(lambda: self.english_to_irish())
@staticmethod
def english_to_irish():
""" This method converts the English language version to Irish"""
english_version.hide()
global irish_version
irish_version = IrishVersion()
irish_version.show()
english_version.layout().removeWidget(english_version.text_entry)
irish_version.layout().addWidget(irish_version.text_entry, 3, 0, 24, 8)
irish_version.resize(200, 400)
irish_version.center()
class EnglishButtons(EnglishLabel):
""" This class creates the English version buttons"""
def __init__(self, parent=None):
super().__init__(parent)
# Define buttons
self.english_button = QtWidgets.QPushButton("English")
self.irish_button = QtWidgets.QPushButton("Irish")
self.audio = False # Initial audio setting
self.ulster_button = QtWidgets.QPushButton("Ulster Dialect")
self.connacht_button = QtWidgets.QPushButton("Connacht Dialect")
self.munster_button = QtWidgets.QPushButton("Munster Dialect")
# Define Callback procedures
self.english_button.clicked.connect(lambda: self.audio_check("English"))
self.irish_button.clicked.connect(lambda: self.audio_check('Irish'))
self.munster_button.clicked.connect(lambda: self.play_audio('Munster'))
self.connacht_button.clicked.connect(lambda: self.play_audio('Connacht'))
self.ulster_button.clicked.connect(lambda: self.play_audio('Ulster'))
# Initial disabling of audio buttons
self.ulster_button.setEnabled(False)
self.munster_button.setEnabled(False)
self.connacht_button.setEnabled(False)
def audio_check(self, language):
""" Runs callback which prints all entries, suggestions, grammatical forms, etc. Callback also determines if
an audio recording exists for the word in <language>. If it doesn't, it disables audio buttons. If audio exists,
it enables buttons.
"""
self.audio = self.callback(language)
if self.audio:
self.ulster_button.setEnabled(True)
self.connacht_button.setEnabled(True)
self.munster_button.setEnabled(True)
if not self.audio:
self.ulster_button.setEnabled(False)
self.connacht_button.setEnabled(False)
self.munster_button.setEnabled(False)
def callback(self, language):
""" Callback function that prints entries, suggestions, etc. and returns a boolean for whether the word(s)
contain(s) audio."""
entry = str(self.english_entry.text()).lower()
entries, suggestions, wordlist, grammatical = irish_dictionary(entry, language, 'english')
entries2 = None
if language == 'Irish':
entries2 = gaeilge_gaeilge(entry)
audio_exists = entry_search(entry)
if audio_exists:
related = related_matches(entry)
else:
related = 'None'
if grammatical is not None:
self.text_entry.moveCursor(QtGui.QTextCursor.End)
self.text_entry.insertPlainText(grammatical + '\n\n')
for i in entries:
self.text_entry.moveCursor(QtGui.QTextCursor.End)
self.text_entry.insertPlainText(i + '\n\n')
if entries2:
self.text_entry.insertPlainText("In Irish:\n\n")
for i in entries2:
self.text_entry.moveCursor(QtGui.QTextCursor.End)
self.text_entry.insertPlainText(i + '\n\n')
self.text_entry.moveCursor(QtGui.QTextCursor.End)
self.text_entry.insertPlainText(suggestions + "\n\nRecently used words: " + str(wordlist) +
"\n\n" + 'Related Audio Matches: ' + str(related) + '\n\n')
self.text_entry.moveCursor(QtGui.QTextCursor.End)
return audio_exists
@staticmethod
def play_audio(dialect):
appdata = os.getenv('APPDATA')
file_names = {'Munster': 'CanM.mp3', 'Connacht': 'CanC.mp3', 'Ulster': 'CanU.mp3'}
if appdata:
url = QtCore.QUrl.fromLocalFile(os.path.abspath(os.path.join(appdata, file_names[dialect])))
else:
url = QtCore.QUrl.fromLocalFile(os.path.abspath(os.path.join("./", file_names[dialect])))
content = QtMultimedia.QMediaContent(url)
player = QtMultimedia.QMediaPlayer()
player.setMedia(content)
player.play()
player.stateChanged.connect(lambda: player.disconnect())
class EnglishVersion(EnglishButtons, Text):
""" This class brings together all the English version widgets and lays them out in the correct
order. Also controls the English version window title and disables the maximize button
"""
def __init__(self, parent=None):
super().__init__(parent)
grid = QtWidgets.QGridLayout()
grid.setSpacing(5)
grid.addWidget(self.english_label, 0, 0)
grid.addWidget(self.english_entry, 0, 1, 1, 4)
grid.addWidget(self.irish_language_button, 0, 6)
grid.addWidget(self.english_button, 1, 2)
grid.addWidget(self.irish_button, 1, 4)
grid.addWidget(self.ulster_button, 2, 2)
grid.addWidget(self.connacht_button, 2, 3)
grid.addWidget(self.munster_button, 2, 4)
grid.addWidget(self.text_entry, 3, 0, 24, 8)
self.setLayout(grid)
self.setWindowFlags(QtCore.Qt.WindowMinimizeButtonHint)
self.setWindowFlags(QtCore.Qt.WindowCloseButtonHint)
self.setWindowTitle("teanglann.ie Searcher")
self.resize(200, 400)
def center(self):
qr = self.frameGeometry()
cp = QtWidgets.QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
def main():
app = QtWidgets.QApplication(sys.argv)
global english_version
english_version = EnglishVersion()
english_version.show()
english_version.center()
sys.exit(app.exec_())
if __name__ == '__main__':
main()<|fim▁end|> | self.setLayout(grid)
self.setWindowFlags(QtCore.Qt.WindowMinimizeButtonHint)
self.setWindowFlags(QtCore.Qt.WindowCloseButtonHint) |
<|file_name|>references_and_borrowing.rs<|end_file_name|><|fim▁begin|>pub fn references_and_borrowing() {
println!("***References and Borrowing***");
let v1 = vec![1, 2, 3, 4];
let v2 = vec![3, 4, 5, 6];
let res = foo_one(&v1, &v2); // borrowing a reference
println!("result is {}", res);
println!("v1[0] is {}", v1[0]);
let mut x = 5;
{
let y = &mut x;
*y += 1;
}
println!("x is now {}", x);
let v3 = vec![1, 2, 3];
for i in &v3 {
print!("{} ", i);
}
println!("");
let mut v4 = vec![1, 2, 3];
{
let v_r = &mut v4;
v_r.push(8);
}
println!("v4[3] is {}", v4[3]);
println!("");<|fim▁hole|>fn foo_one(v1: &Vec<i32>, v2: &Vec<i32>) -> i32 {
v1[0] + v2[0]
}<|fim▁end|> | }
|
<|file_name|>header.component.spec.ts<|end_file_name|><|fim▁begin|>import {async, ComponentFixture, TestBed} from '@angular/core/testing';
<|fim▁hole|> let component: HeaderComponent;
let fixture: ComponentFixture<HeaderComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [HeaderComponent]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(HeaderComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should be created', () => {
expect(component).toBeTruthy();
});
});<|fim▁end|> | import {HeaderComponent} from './header.component';
describe('HeaderComponent', () => { |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! This module contains things that aren't really part of the gba
//! but are part of the emulator. e.g. the debug module should actually be in
//! here but it's temporary until I can start using a better UI library so
//! it gets to stay where it is for now.
pub mod settings;
<|fim▁hole|>}
#[macro_export]
macro_rules! psetting {
($setting_name:ident) => ({
let settings = unsafe {
::pyrite::PYRITE_SETTINGS.get().as_mut().expect("Failed to get an instance of pyrite settings.")
};
settings.$setting_name
})
}<|fim▁end|> | use ::util::sync_unsafe_cell::SyncUnsafeCell;
lazy_static! {
pub static ref PYRITE_SETTINGS: SyncUnsafeCell<settings::PyriteSettings> = SyncUnsafeCell::new(Default::default()); |
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for lodash.camelCase 4.3
// Project: http://lodash.com/
// Definitions by: Brian Zengel <https://github.com/bczengel>, Ilya Mochalov <https://github.com/chrootsu>, Stepan Mikhaylyuk <https://github.com/stepancar>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
<|fim▁hole|>import { camelCase } from "lodash";
export = camelCase;<|fim▁end|> | |
<|file_name|>gridlayout.py<|end_file_name|><|fim▁begin|>'''
Grid Layout
===========
.. only:: html
.. image:: images/gridlayout.gif
:align: right
.. only:: latex
.. image:: images/gridlayout.png
:align: right
.. versionadded:: 1.0.4
The :class:`GridLayout` arranges children in a matrix. It takes the available
space and divides it into columns and rows, then adds widgets to the resulting
"cells".
.. versionchanged:: 1.0.7
The implementation has changed to use the widget size_hint for calculating
column/row sizes. `uniform_width` and `uniform_height` have been removed
and other properties have added to give you more control.
Background
----------
Unlike many other toolkits, you cannot explicitly place a widget in a specific
column/row. Each child is automatically assigned a position determined by the
layout configuration and the child's index in the children list.
A GridLayout must always have at least one input constraint:
:attr:`GridLayout.cols` or :attr:`GridLayout.rows`. If you do not specify cols
or rows, the Layout will throw an exception.
Column Width and Row Height
---------------------------
The column width/row height are determined in 3 steps:
- The initial size is given by the :attr:`col_default_width` and
:attr:`row_default_height` properties. To customize the size of a single
column or row, use :attr:`cols_minimum` or :attr:`rows_minimum`.
- The `size_hint_x`/`size_hint_y` of the children are taken into account.
If no widgets have a size hint, the maximum size is used for all
children.
- You can force the default size by setting the :attr:`col_force_default`
or :attr:`row_force_default` property. This will force the layout to
ignore the `width` and `size_hint` properties of children and use the
default size.
Using a GridLayout
------------------
In the example below, all widgets will have an equal size. By default, the
`size_hint` is (1, 1), so a Widget will take the full size of the parent::
layout = GridLayout(cols=2)
layout.add_widget(Button(text='Hello 1'))
layout.add_widget(Button(text='World 1'))
layout.add_widget(Button(text='Hello 2'))
layout.add_widget(Button(text='World 2'))
.. image:: images/gridlayout_1.jpg
Now, let's fix the size of Hello buttons to 100px instead of using
size_hint_x=1::
layout = GridLayout(cols=2)
layout.add_widget(Button(text='Hello 1', size_hint_x=None, width=100))
layout.add_widget(Button(text='World 1'))
layout.add_widget(Button(text='Hello 2', size_hint_x=None, width=100))
layout.add_widget(Button(text='World 2'))
.. image:: images/gridlayout_2.jpg
Next, let's fix the row height to a specific size::
layout = GridLayout(cols=2, row_force_default=True, row_default_height=40)
layout.add_widget(Button(text='Hello 1', size_hint_x=None, width=100))
layout.add_widget(Button(text='World 1'))
layout.add_widget(Button(text='Hello 2', size_hint_x=None, width=100))
layout.add_widget(Button(text='World 2'))
.. image:: images/gridlayout_3.jpg
'''
__all__ = ('GridLayout', 'GridLayoutException')
from kivy.logger import Logger
from kivy.uix.layout import Layout
from kivy.properties import NumericProperty, BooleanProperty, DictProperty, \
BoundedNumericProperty, ReferenceListProperty, VariableListProperty, \
ObjectProperty, StringProperty
from math import ceil
def nmax(*args):
# merge into one list
args = [x for x in args if x is not None]
return max(args)
def nmin(*args):
# merge into one list
args = [x for x in args if x is not None]
return min(args)
class GridLayoutException(Exception):
'''Exception for errors if the grid layout manipulation fails.
'''
pass
class GridLayout(Layout):
'''Grid layout class. See module documentation for more information.
'''
spacing = VariableListProperty([0, 0], length=2)
'''Spacing between children: [spacing_horizontal, spacing_vertical].
spacing also accepts a one argument form [spacing].
:attr:`spacing` is a
:class:`~kivy.properties.VariableListProperty` and defaults to [0, 0].
'''
padding = VariableListProperty([0, 0, 0, 0])
'''Padding between the layout box and it's children: [padding_left,
padding_top, padding_right, padding_bottom].
padding also accepts a two argument form [padding_horizontal,
padding_vertical] and a one argument form [padding].
.. versionchanged:: 1.7.0
Replaced NumericProperty with VariableListProperty.
:attr:`padding` is a :class:`~kivy.properties.VariableListProperty` and
defaults to [0, 0, 0, 0].
'''
cols = BoundedNumericProperty(None, min=0, allownone=True)
'''Number of columns in the grid.
.. versionchanged:: 1.0.8
Changed from a NumericProperty to BoundedNumericProperty. You can no
longer set this to a negative value.
:attr:`cols` is a :class:`~kivy.properties.NumericProperty` and defaults to
0.
'''
rows = BoundedNumericProperty(None, min=0, allownone=True)
'''Number of rows in the grid.
.. versionchanged:: 1.0.8
Changed from a NumericProperty to a BoundedNumericProperty. You can no
longer set this to a negative value.
:attr:`rows` is a :class:`~kivy.properties.NumericProperty` and defaults to
0.
'''
col_default_width = NumericProperty(0)
'''Default minimum size to use for a column.
.. versionadded:: 1.0.7
:attr:`col_default_width` is a :class:`~kivy.properties.NumericProperty`
and defaults to 0.
'''
row_default_height = NumericProperty(0)
'''Default minimum size to use for row.
.. versionadded:: 1.0.7
:attr:`row_default_height` is a :class:`~kivy.properties.NumericProperty`
and defaults to 0.
'''
col_force_default = BooleanProperty(False)
'''If True, ignore the width and size_hint_x of the child and use the
default column width.
.. versionadded:: 1.0.7
:attr:`col_force_default` is a :class:`~kivy.properties.BooleanProperty`
and defaults to False.
'''
row_force_default = BooleanProperty(False)
'''If True, ignore the height and size_hint_y of the child and use the
default row height.
.. versionadded:: 1.0.7
:attr:`row_force_default` is a :class:`~kivy.properties.BooleanProperty`
and defaults to False.
'''
cols_minimum = DictProperty({})
'''Dict of minimum width for each column. The dictionary keys are the
column numbers, e.g. 0, 1, 2...
.. versionadded:: 1.0.7
:attr:`cols_minimum` is a :class:`~kivy.properties.DictProperty` and
defaults to {}.
'''
rows_minimum = DictProperty({})
'''Dict of minimum height for each row. The dictionary keys are the
row numbers, e.g. 0, 1, 2...
.. versionadded:: 1.0.7
:attr:`rows_minimum` is a :class:`~kivy.properties.DictProperty` and
defaults to {}.
'''
minimum_width = NumericProperty(0)
'''Automatically computed minimum width needed to contain all children.
.. versionadded:: 1.0.8
:attr:`minimum_width` is a :class:`~kivy.properties.NumericProperty` and
defaults to 0. It is read only.
'''
minimum_height = NumericProperty(0)
'''Automatically computed minimum height needed to contain all children.
.. versionadded:: 1.0.8
:attr:`minimum_height` is a :class:`~kivy.properties.NumericProperty` and
defaults to 0. It is read only.
'''
minimum_size = ReferenceListProperty(minimum_width, minimum_height)
'''Automatically computed minimum size needed to contain all children.
.. versionadded:: 1.0.8
:attr:`minimum_size` is a
:class:`~kivy.properties.ReferenceListProperty` of
(:attr:`minimum_width`, :attr:`minimum_height`) properties. It is read
only.
'''
def __init__(self, **kwargs):
self._cols = self._rows = None
super(GridLayout, self).__init__(**kwargs)
fbind = self.fbind
update = self._trigger_layout
fbind('col_default_width', update)
fbind('row_default_height', update)
fbind('col_force_default', update)
fbind('row_force_default', update)
fbind('cols', update)
fbind('rows', update)
fbind('parent', update)
fbind('spacing', update)
fbind('padding', update)
fbind('children', update)
fbind('size', update)
fbind('pos', update)
def get_max_widgets(self):
if self.cols and self.rows:
return self.rows * self.cols
else:
return None
def on_children(self, instance, value):
# if that makes impossible to construct things with deffered method,
# migrate this test in do_layout, and/or issue a warning.
smax = self.get_max_widgets()
if smax and len(value) > smax:
raise GridLayoutException(
'Too many children in GridLayout. Increase rows/cols!')
def _init_rows_cols_sizes(self, count):
# the goal here is to calculate the minimum size of every cols/rows
# and determine if they have stretch or not
current_cols = self.cols
current_rows = self.rows
# if no cols or rows are set, we can't calculate minimum size.<|fim▁hole|> # the grid must be contrained at least on one side
if not current_cols and not current_rows:
Logger.warning('%r have no cols or rows set, '
'layout is not triggered.' % self)
return
if current_cols is None:
current_cols = int(ceil(count / float(current_rows)))
elif current_rows is None:
current_rows = int(ceil(count / float(current_cols)))
current_cols = max(1, current_cols)
current_rows = max(1, current_rows)
self._has_hint_bound_x = False
self._has_hint_bound_y = False
self._cols_min_size_none = 0. # min size from all the None hint
self._rows_min_size_none = 0. # min size from all the None hint
self._cols = cols = [self.col_default_width] * current_cols
self._cols_sh = [None] * current_cols
self._cols_sh_min = [None] * current_cols
self._cols_sh_max = [None] * current_cols
self._rows = rows = [self.row_default_height] * current_rows
self._rows_sh = [None] * current_rows
self._rows_sh_min = [None] * current_rows
self._rows_sh_max = [None] * current_rows
# update minimum size from the dicts
items = (i for i in self.cols_minimum.items() if i[0] < len(cols))
for index, value in items:
cols[index] = max(value, cols[index])
items = (i for i in self.rows_minimum.items() if i[0] < len(rows))
for index, value in items:
rows[index] = max(value, rows[index])
return True
def _fill_rows_cols_sizes(self):
cols, rows = self._cols, self._rows
cols_sh, rows_sh = self._cols_sh, self._rows_sh
cols_sh_min, rows_sh_min = self._cols_sh_min, self._rows_sh_min
cols_sh_max, rows_sh_max = self._cols_sh_max, self._rows_sh_max
# calculate minimum size for each columns and rows
n_cols = len(cols)
has_bound_y = has_bound_x = False
for i, child in enumerate(reversed(self.children)):
(shw, shh), (w, h) = child.size_hint, child.size
shw_min, shh_min = child.size_hint_min
shw_max, shh_max = child.size_hint_max
row, col = divmod(i, n_cols)
# compute minimum size / maximum stretch needed
if shw is None:
cols[col] = nmax(cols[col], w)
else:
cols_sh[col] = nmax(cols_sh[col], shw)
if shw_min is not None:
has_bound_x = True
cols_sh_min[col] = nmax(cols_sh_min[col], shw_min)
if shw_max is not None:
has_bound_x = True
cols_sh_max[col] = nmin(cols_sh_max[col], shw_max)
if shh is None:
rows[row] = nmax(rows[row], h)
else:
rows_sh[row] = nmax(rows_sh[row], shh)
if shh_min is not None:
has_bound_y = True
rows_sh_min[row] = nmax(rows_sh_min[row], shh_min)
if shh_max is not None:
has_bound_y = True
rows_sh_max[row] = nmin(rows_sh_max[row], shh_max)
self._has_hint_bound_x = has_bound_x
self._has_hint_bound_y = has_bound_y
def _update_minimum_size(self):
# calculate minimum width/height needed, starting from padding +
# spacing
l, t, r, b = self.padding
spacing_x, spacing_y = self.spacing
cols, rows = self._cols, self._rows
width = l + r + spacing_x * (len(cols) - 1)
self._cols_min_size_none = sum(cols) + width
# we need to subtract for the sh_max/min the already guaranteed size
# due to having a None in the col. So sh_min gets smaller by that size
# since it's already covered. Similarly for sh_max, because if we
# already exceeded the max, the subtracted max will be zero, so
# it won't get larger
if self._has_hint_bound_x:
cols_sh_min = self._cols_sh_min
cols_sh_max = self._cols_sh_max
for i, (c, sh_min, sh_max) in enumerate(
zip(cols, cols_sh_min, cols_sh_max)):
if sh_min is not None:
width += max(c, sh_min)
cols_sh_min[i] = max(0., sh_min - c)
else:
width += c
if sh_max is not None:
cols_sh_max[i] = max(0., sh_max - c)
else:
width = self._cols_min_size_none
height = t + b + spacing_y * (len(rows) - 1)
self._rows_min_size_none = sum(rows) + height
if self._has_hint_bound_y:
rows_sh_min = self._rows_sh_min
rows_sh_max = self._rows_sh_max
for i, (r, sh_min, sh_max) in enumerate(
zip(rows, rows_sh_min, rows_sh_max)):
if sh_min is not None:
height += max(r, sh_min)
rows_sh_min[i] = max(0., sh_min - r)
else:
height += r
if sh_max is not None:
rows_sh_max[i] = max(0., sh_max - r)
else:
height = self._rows_min_size_none
# finally, set the minimum size
self.minimum_size = (width, height)
def _finalize_rows_cols_sizes(self):
selfw = self.width
selfh = self.height
# resolve size for each column
if self.col_force_default:
cols = [self.col_default_width] * len(self._cols)
for index, value in self.cols_minimum.items():
cols[index] = value
self._cols = cols
else:
cols = self._cols
cols_sh = self._cols_sh
cols_sh_min = self._cols_sh_min
cols_weight = float(sum((x for x in cols_sh if x is not None)))
stretch_w = max(0., selfw - self._cols_min_size_none)
if stretch_w > 1e-9:
if self._has_hint_bound_x:
# fix the hints to be within bounds
self.layout_hint_with_bounds(
cols_weight, stretch_w,
sum((c for c in cols_sh_min if c is not None)),
cols_sh_min, self._cols_sh_max, cols_sh)
for index, col_stretch in enumerate(cols_sh):
# if the col don't have stretch information, nothing to do
if not col_stretch:
continue
# add to the min width whatever remains from size_hint
cols[index] += stretch_w * col_stretch / cols_weight
# same algo for rows
if self.row_force_default:
rows = [self.row_default_height] * len(self._rows)
for index, value in self.rows_minimum.items():
rows[index] = value
self._rows = rows
else:
rows = self._rows
rows_sh = self._rows_sh
rows_sh_min = self._rows_sh_min
rows_weight = float(sum((x for x in rows_sh if x is not None)))
stretch_h = max(0., selfh - self._rows_min_size_none)
if stretch_h > 1e-9:
if self._has_hint_bound_y:
# fix the hints to be within bounds
self.layout_hint_with_bounds(
rows_weight, stretch_h,
sum((r for r in rows_sh_min if r is not None)),
rows_sh_min, self._rows_sh_max, rows_sh)
for index, row_stretch in enumerate(rows_sh):
# if the row don't have stretch information, nothing to do
if not row_stretch:
continue
# add to the min height whatever remains from size_hint
rows[index] += stretch_h * row_stretch / rows_weight
def _iterate_layout(self, count):
selfx = self.x
padding_left = self.padding[0]
padding_top = self.padding[1]
spacing_x, spacing_y = self.spacing
i = count - 1
y = self.top - padding_top
cols = self._cols
for row_height in self._rows:
x = selfx + padding_left
for col_width in cols:
if i < 0:
break
yield i, x, y - row_height, col_width, row_height
i = i - 1
x = x + col_width + spacing_x
y -= row_height + spacing_y
def do_layout(self, *largs):
children = self.children
if not children or not self._init_rows_cols_sizes(len(children)):
l, t, r, b = self.padding
self.minimum_size = l + r, t + b
return
self._fill_rows_cols_sizes()
self._update_minimum_size()
self._finalize_rows_cols_sizes()
for i, x, y, w, h in self._iterate_layout(len(children)):
c = children[i]
c.pos = x, y
shw, shh = c.size_hint
shw_min, shh_min = c.size_hint_min
shw_max, shh_max = c.size_hint_max
if shw_min is not None:
if shw_max is not None:
w = max(min(w, shw_max), shw_min)
else:
w = max(w, shw_min)
else:
if shw_max is not None:
w = min(w, shw_max)
if shh_min is not None:
if shh_max is not None:
h = max(min(h, shh_max), shh_min)
else:
h = max(h, shh_min)
else:
if shh_max is not None:
h = min(h, shh_max)
if shw is None:
if shh is not None:
c.height = h
else:
if shh is None:
c.width = w
else:
c.size = (w, h)<|fim▁end|> | |
<|file_name|>converter.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# coding=utf-8
import sys
import argparse
parser = argparse.ArgumentParser(
description='convert a non-standord hostname like xx-xx-[1-3] to a '
'expansion state',
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Sample:
$ ./converter.py xxx-xxx-\[1-3\]
xxx-xxx-1
xxx-xxx-2
xxx-xxx-3
<|fim▁hole|>Tips: You can pass many args behind the command,and you need to not forget to
escape the character of [ and ]
""")
parser.add_argument(
'hostname_pattern',
help='',
type=str,
nargs='+')
args = parser.parse_args()
if __name__ == '__main__':
for arg in args.hostname_pattern:
basestr=arg.split('-')
prefix='-'.join(basestr[:-2])
range_li=basestr[-2:]
start_num=int(range_li[0][1:])
end_num=int(range_li[1][:-1])
for i in range(start_num,end_num+1):
print prefix + '-' + str(i)<|fim▁end|> | |
<|file_name|>instructions.rs<|end_file_name|><|fim▁begin|>/// A 16-bit address `NNN`
pub struct Address;
/// 8-bit information in an opcode. `NN`
pub struct Byte;
/// 4-bit information in an opcode. `N`
pub struct Nibble;
/// Represents an arbitrary 8-bit register.
pub type Vx = u8;
/// Represents an arbitrary 8-bit register.
pub type Vy = u8;
/// This contains all of the instructions in the Chip-8 instruction set.
pub enum Instructions {
/// Clear the screen.
///
/// Opcode: `00E0`
Clear,
/// Return from a subroutine.
///
/// Opcode: `00EE`
Return,
/// Jump to address `NNN`.
///
/// Opcode: `1NNN`
Jump(Address),
/// Call the subroutine at address `NNN`.
///
/// Opcode: `2NNN`
Call(Address),
/// Skip the following instruction
/// if the value of register `VX` equals `NNN`.
///
/// Opcode: `3XNN`
SkipEqualK(Vx, Byte),
/// Skip the following instruction
/// if the value of register `VX` does not equal `NNN`.
///
/// Opcode: `4XNN`
SkipNotEqualK(Vx, Byte),
/// Skip the following instruction if the value of register `VX`
/// is equal to the value of register `VY`.
///
/// Opcode: `5XY0`
SkipEqual(Vx, Vy),
/// Store number `NNN` in register `VX`.
///
/// Opcode: `6XNN`
StoreK(Vx, Byte),
/// Add the value `NNN` to register `VX`.
///
/// Opcode: `7XNN`
AddK(Vx, Byte),
/// Store the value of register `VY` in register `VX`.
///
/// Opcode: `8XY0`
Store(Vx, Vy),
/// Set `VX` to `VX` OR `VY`.
///
/// Opcode: `8XY1`
Or(Vx, Vy),
/// Set `VX` to `VX` AND `VY`.
///
/// Opcode: `8XY2`
And(Vx, Vy),
/// Set `VX` to `VX` XOR `VY`.
///
/// Opcode: `8XY3`
Xor(Vx, Vy),
/// Add the value of register `VY` to register `VX`.
///
/// * Set VF to `01` if a carry occurs.
/// * Set VF to `00` if a carry does not occur.
///
/// Opcode: `8XY4`
Add(Vx, Vy),
/// Subtract the value of register `VY` from register `VX`.
///
/// * Set `VF` to `00` if a borrow occurs.
/// * Set `VF` to `01` if a borrow does not occur.
///
/// Opcode: `8XY5`
Sub(Vx, Vy),
/// Store the value of register `VY` shifted right one bit in register `VX`.
///
/// * Set register `VF` to the least significant bit prior to the shift.
///
/// Opcode: `8XY6`
ShiftRight(Vx, Vy),
/// Set register `VX` to the value of `VY` minus `VX`.
///
/// * Set `VF` to `00` if a borrow occurs.
/// * Set `VF` to `01` if a borrow does not occur.
///
/// Opcode: `8XY7`
SubReversed(Vx, Vy),
/// Store the value of register `VY` shifted left one bit in register `VX`.
///
/// * Set register `VF` to the most significant bit prior to the shift.
///
/// Opcode: `8XYE`
ShiftLeft(Vx, Vy),
/// Skip the following instruction if the value of register `VX`
/// is not equal to the value of register `VY`.
///
/// Opcode: `9XY0`
SkipNotEqual(Vx, Vy),
/// Store memory address `NNN` in register `I`.
///
/// Opcode: `ANNN`
StoreI(Address),
/// Jump to address `NNN` + `V0`.
///
/// Opcode: `BNNN`
JumpOffset(Address),
/// Set `VX` to a random number with a mask of `NN`.
///
/// Opcode: `CXNN`
Random(Vx, Byte),
/// Draw a sprite at position `VX`,`VY` with `N` bytes of sprite
/// data starting at the address stored in `I`.
///
/// * Set `VF` to `01` if any set pixels are changed to unset, and `00` otherwise.
///
/// Opcode: `DXYN`
Draw(Vx, Vy, Nibble),
<|fim▁hole|> /// the hex value currently stored in register `VX` is pressed.
///
/// Opcode: `EX9E`
SkipKeyPressed(Vx),
/// Skip the following instruction if the key corresponding to
/// the hex value currently stored in register `VX` is not pressed.
///
/// Opcode: `EXA1`
SkipKeyNotPressed(Vx),
/// Store the current value of the delay timer in register `VX`.
///
/// Opcode: `FX07`
StoreDelayTimer(Vx),
/// Wait for a keypress and store the result in register `VX`.
///
/// Opcode: `FX0A`
WaitForKeypressAndStore(Vx),
/// Set the delay timer to the value of register `VX`.
///
/// Opcode: `FX15`
SetDelayTimer(Vx),
/// Set the sound timer to the value of register `VX`.
///
/// Opcode: `FX18`
SetSoundTimer(Vx),
/// Add the value stored in register `VX` to register `I`.
///
/// Opcode: `FX1E`
AddToI(Vx),
/// Set `I` to the memory address of the sprite data corresponding
/// to the hexadecimal digit stored in register `VX`.
///
/// Opcode: `FX29`
LoadHexGlyph(Vx),
/// Store the binary-coded decimal equivalent of the value
/// stored in register `VX` at addresses `I`, `I+1`, and `I+2`.
///
/// Opcode: `FX33`
StoreBCD(Vx),
/// Store the values of register `V0` to `VX` inclusive in memory
/// starting at address `I`.
///
/// * `I` is set to `I + X + 1` after operation.
///
/// Opcode: `FX55`
StoreRegisters(Vx),
/// Fill registers `V0` to `VX` inclusive with the values stored
/// in memory starting at address `I`.
///
/// * `I` is set to `I + X + 1` after operation.
///
/// Opcode: `FX65`
LoadRegisters(Vx),
/// Any instruction received that does not belong to the Chip-8 instruction set.
///
/// Opcode: `Unknown`
Invalid,
}<|fim▁end|> | /// Skip the following instruction if the key corresponding to |
<|file_name|>metadata.ts<|end_file_name|><|fim▁begin|>import {Type, DirectiveMetadata} from 'angular2/core';
import {DirectiveResolver} from 'angular2/compiler';
import {stringify} from './util';
var COMPONENT_SELECTOR = /^[\w|-]*$/;
var SKEWER_CASE = /-(\w)/g;
var directiveResolver = new DirectiveResolver();
export interface AttrProp {
prop: string;
attr: string;
bracketAttr: string;
bracketParenAttr: string;
parenAttr: string;
onAttr: string;
bindAttr: string;
bindonAttr: string;
}
export interface ComponentInfo {
type: Type;
selector: string;
inputs: AttrProp[];
outputs: AttrProp[];
}
export function getComponentInfo(type: Type): ComponentInfo {
var resolvedMetadata: DirectiveMetadata = directiveResolver.resolve(type);
var selector = resolvedMetadata.selector;
if (!selector.match(COMPONENT_SELECTOR)) {
throw new Error('Only selectors matching element names are supported, got: ' + selector);
}
var selector = selector.replace(SKEWER_CASE, (all, letter: string) => letter.toUpperCase());
return {
type: type,
selector: selector,
inputs: parseFields(resolvedMetadata.inputs),
outputs: parseFields(resolvedMetadata.outputs)
};
}
export function parseFields(names: string[]): AttrProp[] {
var attrProps: AttrProp[] = [];
if (names) {<|fim▁hole|> var attr = (parts[1] || parts[0]).trim();
var capitalAttr = attr.charAt(0).toUpperCase() + attr.substr(1);
attrProps.push(<AttrProp>{
prop: prop,
attr: attr,
bracketAttr: `[${attr}]`,
parenAttr: `(${attr})`,
bracketParenAttr: `[(${attr})]`,
onAttr: `on${capitalAttr}`,
bindAttr: `bind${capitalAttr}`,
bindonAttr: `bindon${capitalAttr}`
});
}
}
return attrProps;
}<|fim▁end|> | for (var i = 0; i < names.length; i++) {
var parts = names[i].split(':');
var prop = parts[0].trim(); |
<|file_name|>one.pb.go<|end_file_name|><|fim▁begin|>// Code generated by protoc-gen-gogo.
// source: combos/unmarshaler/one.proto
// DO NOT EDIT!
/*
Package one is a generated protocol buffer package.
It is generated from these files:
combos/unmarshaler/one.proto
It has these top-level messages:
Subby
SampleOneOf
*/
package one
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import _ "github.com/gogo/protobuf/gogoproto"
import github_com_gogo_protobuf_protoc_gen_gogo_descriptor "github.com/gogo/protobuf/protoc-gen-gogo/descriptor"
import github_com_gogo_protobuf_proto "github.com/gogo/protobuf/proto"
import compress_gzip "compress/gzip"
import bytes "bytes"
import io_ioutil "io/ioutil"
import strings "strings"
import reflect "reflect"
import io "io"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
type Subby struct {
Sub string `protobuf:"bytes,1,opt,name=sub,proto3" json:"sub,omitempty"`
}
func (m *Subby) Reset() { *m = Subby{} }
func (*Subby) ProtoMessage() {}
func (*Subby) Descriptor() ([]byte, []int) { return fileDescriptorOne, []int{0} }
type SampleOneOf struct {
// Types that are valid to be assigned to TestOneof:
// *SampleOneOf_Field1
// *SampleOneOf_Field2
// *SampleOneOf_Field3
// *SampleOneOf_Field4
// *SampleOneOf_Field5
// *SampleOneOf_Field6
// *SampleOneOf_Field7
// *SampleOneOf_Field8
// *SampleOneOf_Field9
// *SampleOneOf_Field10
// *SampleOneOf_Field11
// *SampleOneOf_Field12
// *SampleOneOf_Field13
// *SampleOneOf_Field14
// *SampleOneOf_Field15
// *SampleOneOf_SubMessage
TestOneof isSampleOneOf_TestOneof `protobuf_oneof:"test_oneof"`
}
func (m *SampleOneOf) Reset() { *m = SampleOneOf{} }
func (*SampleOneOf) ProtoMessage() {}
func (*SampleOneOf) Descriptor() ([]byte, []int) { return fileDescriptorOne, []int{1} }
type isSampleOneOf_TestOneof interface {
isSampleOneOf_TestOneof()
Equal(interface{}) bool
VerboseEqual(interface{}) error
Size() int
}
type SampleOneOf_Field1 struct {
Field1 float64 `protobuf:"fixed64,1,opt,name=Field1,proto3,oneof"`
}
type SampleOneOf_Field2 struct {
Field2 float32 `protobuf:"fixed32,2,opt,name=Field2,proto3,oneof"`
}
type SampleOneOf_Field3 struct {
Field3 int32 `protobuf:"varint,3,opt,name=Field3,proto3,oneof"`
}
type SampleOneOf_Field4 struct {
Field4 int64 `protobuf:"varint,4,opt,name=Field4,proto3,oneof"`
}
type SampleOneOf_Field5 struct {
Field5 uint32 `protobuf:"varint,5,opt,name=Field5,proto3,oneof"`
}
type SampleOneOf_Field6 struct {
Field6 uint64 `protobuf:"varint,6,opt,name=Field6,proto3,oneof"`
}
type SampleOneOf_Field7 struct {
Field7 int32 `protobuf:"zigzag32,7,opt,name=Field7,proto3,oneof"`
}
type SampleOneOf_Field8 struct {
Field8 int64 `protobuf:"zigzag64,8,opt,name=Field8,proto3,oneof"`
}
type SampleOneOf_Field9 struct {
Field9 uint32 `protobuf:"fixed32,9,opt,name=Field9,proto3,oneof"`
}
type SampleOneOf_Field10 struct {
Field10 int32 `protobuf:"fixed32,10,opt,name=Field10,proto3,oneof"`
}
type SampleOneOf_Field11 struct {
Field11 uint64 `protobuf:"fixed64,11,opt,name=Field11,proto3,oneof"`
}
type SampleOneOf_Field12 struct {
Field12 int64 `protobuf:"fixed64,12,opt,name=Field12,proto3,oneof"`
}
type SampleOneOf_Field13 struct {
Field13 bool `protobuf:"varint,13,opt,name=Field13,proto3,oneof"`
}
type SampleOneOf_Field14 struct {
Field14 string `protobuf:"bytes,14,opt,name=Field14,proto3,oneof"`
}
type SampleOneOf_Field15 struct {
Field15 []byte `protobuf:"bytes,15,opt,name=Field15,proto3,oneof"`
}
type SampleOneOf_SubMessage struct {
SubMessage *Subby `protobuf:"bytes,16,opt,name=sub_message,json=subMessage,oneof"`
}
func (*SampleOneOf_Field1) isSampleOneOf_TestOneof() {}
func (*SampleOneOf_Field2) isSampleOneOf_TestOneof() {}
func (*SampleOneOf_Field3) isSampleOneOf_TestOneof() {}
func (*SampleOneOf_Field4) isSampleOneOf_TestOneof() {}
func (*SampleOneOf_Field5) isSampleOneOf_TestOneof() {}
func (*SampleOneOf_Field6) isSampleOneOf_TestOneof() {}
func (*SampleOneOf_Field7) isSampleOneOf_TestOneof() {}
func (*SampleOneOf_Field8) isSampleOneOf_TestOneof() {}
func (*SampleOneOf_Field9) isSampleOneOf_TestOneof() {}
func (*SampleOneOf_Field10) isSampleOneOf_TestOneof() {}
func (*SampleOneOf_Field11) isSampleOneOf_TestOneof() {}
func (*SampleOneOf_Field12) isSampleOneOf_TestOneof() {}
func (*SampleOneOf_Field13) isSampleOneOf_TestOneof() {}
func (*SampleOneOf_Field14) isSampleOneOf_TestOneof() {}
func (*SampleOneOf_Field15) isSampleOneOf_TestOneof() {}
func (*SampleOneOf_SubMessage) isSampleOneOf_TestOneof() {}
func (m *SampleOneOf) GetTestOneof() isSampleOneOf_TestOneof {
if m != nil {
return m.TestOneof
}
return nil
}
func (m *SampleOneOf) GetField1() float64 {
if x, ok := m.GetTestOneof().(*SampleOneOf_Field1); ok {
return x.Field1
}
return 0
}
func (m *SampleOneOf) GetField2() float32 {
if x, ok := m.GetTestOneof().(*SampleOneOf_Field2); ok {
return x.Field2
}
return 0
}
func (m *SampleOneOf) GetField3() int32 {
if x, ok := m.GetTestOneof().(*SampleOneOf_Field3); ok {
return x.Field3
}
return 0
}
func (m *SampleOneOf) GetField4() int64 {
if x, ok := m.GetTestOneof().(*SampleOneOf_Field4); ok {
return x.Field4
}
return 0
}
func (m *SampleOneOf) GetField5() uint32 {
if x, ok := m.GetTestOneof().(*SampleOneOf_Field5); ok {
return x.Field5
}
return 0
}
func (m *SampleOneOf) GetField6() uint64 {
if x, ok := m.GetTestOneof().(*SampleOneOf_Field6); ok {
return x.Field6
}
return 0
}
func (m *SampleOneOf) GetField7() int32 {
if x, ok := m.GetTestOneof().(*SampleOneOf_Field7); ok {
return x.Field7
}
return 0
}
func (m *SampleOneOf) GetField8() int64 {
if x, ok := m.GetTestOneof().(*SampleOneOf_Field8); ok {
return x.Field8
}
return 0
}
func (m *SampleOneOf) GetField9() uint32 {
if x, ok := m.GetTestOneof().(*SampleOneOf_Field9); ok {
return x.Field9
}
return 0
}
func (m *SampleOneOf) GetField10() int32 {
if x, ok := m.GetTestOneof().(*SampleOneOf_Field10); ok {
return x.Field10
}
return 0
}
func (m *SampleOneOf) GetField11() uint64 {
if x, ok := m.GetTestOneof().(*SampleOneOf_Field11); ok {
return x.Field11
}
return 0
}
func (m *SampleOneOf) GetField12() int64 {
if x, ok := m.GetTestOneof().(*SampleOneOf_Field12); ok {
return x.Field12
}
return 0
}
func (m *SampleOneOf) GetField13() bool {
if x, ok := m.GetTestOneof().(*SampleOneOf_Field13); ok {
return x.Field13
}
return false
}
func (m *SampleOneOf) GetField14() string {
if x, ok := m.GetTestOneof().(*SampleOneOf_Field14); ok {
return x.Field14
}
return ""
}
func (m *SampleOneOf) GetField15() []byte {
if x, ok := m.GetTestOneof().(*SampleOneOf_Field15); ok {
return x.Field15
}
return nil
}
func (m *SampleOneOf) GetSubMessage() *Subby {
if x, ok := m.GetTestOneof().(*SampleOneOf_SubMessage); ok {
return x.SubMessage
}
return nil
}
// XXX_OneofFuncs is for the internal use of the proto package.
func (*SampleOneOf) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
return _SampleOneOf_OneofMarshaler, _SampleOneOf_OneofUnmarshaler, _SampleOneOf_OneofSizer, []interface{}{
(*SampleOneOf_Field1)(nil),
(*SampleOneOf_Field2)(nil),
(*SampleOneOf_Field3)(nil),
(*SampleOneOf_Field4)(nil),
(*SampleOneOf_Field5)(nil),
(*SampleOneOf_Field6)(nil),
(*SampleOneOf_Field7)(nil),
(*SampleOneOf_Field8)(nil),
(*SampleOneOf_Field9)(nil),
(*SampleOneOf_Field10)(nil),
(*SampleOneOf_Field11)(nil),
(*SampleOneOf_Field12)(nil),
(*SampleOneOf_Field13)(nil),
(*SampleOneOf_Field14)(nil),
(*SampleOneOf_Field15)(nil),
(*SampleOneOf_SubMessage)(nil),
}
}
func _SampleOneOf_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
m := msg.(*SampleOneOf)
// test_oneof
switch x := m.TestOneof.(type) {
case *SampleOneOf_Field1:
_ = b.EncodeVarint(1<<3 | proto.WireFixed64)
_ = b.EncodeFixed64(math.Float64bits(x.Field1))
case *SampleOneOf_Field2:
_ = b.EncodeVarint(2<<3 | proto.WireFixed32)
_ = b.EncodeFixed32(uint64(math.Float32bits(x.Field2)))
case *SampleOneOf_Field3:
_ = b.EncodeVarint(3<<3 | proto.WireVarint)
_ = b.EncodeVarint(uint64(x.Field3))
case *SampleOneOf_Field4:
_ = b.EncodeVarint(4<<3 | proto.WireVarint)
_ = b.EncodeVarint(uint64(x.Field4))
case *SampleOneOf_Field5:
_ = b.EncodeVarint(5<<3 | proto.WireVarint)
_ = b.EncodeVarint(uint64(x.Field5))
case *SampleOneOf_Field6:
_ = b.EncodeVarint(6<<3 | proto.WireVarint)
_ = b.EncodeVarint(uint64(x.Field6))
case *SampleOneOf_Field7:
_ = b.EncodeVarint(7<<3 | proto.WireVarint)
_ = b.EncodeZigzag32(uint64(x.Field7))
case *SampleOneOf_Field8:
_ = b.EncodeVarint(8<<3 | proto.WireVarint)
_ = b.EncodeZigzag64(uint64(x.Field8))
case *SampleOneOf_Field9:
_ = b.EncodeVarint(9<<3 | proto.WireFixed32)
_ = b.EncodeFixed32(uint64(x.Field9))
case *SampleOneOf_Field10:
_ = b.EncodeVarint(10<<3 | proto.WireFixed32)
_ = b.EncodeFixed32(uint64(x.Field10))
case *SampleOneOf_Field11:
_ = b.EncodeVarint(11<<3 | proto.WireFixed64)
_ = b.EncodeFixed64(uint64(x.Field11))
case *SampleOneOf_Field12:
_ = b.EncodeVarint(12<<3 | proto.WireFixed64)
_ = b.EncodeFixed64(uint64(x.Field12))
case *SampleOneOf_Field13:
t := uint64(0)
if x.Field13 {
t = 1
}
_ = b.EncodeVarint(13<<3 | proto.WireVarint)
_ = b.EncodeVarint(t)
case *SampleOneOf_Field14:
_ = b.EncodeVarint(14<<3 | proto.WireBytes)
_ = b.EncodeStringBytes(x.Field14)
case *SampleOneOf_Field15:
_ = b.EncodeVarint(15<<3 | proto.WireBytes)
_ = b.EncodeRawBytes(x.Field15)
case *SampleOneOf_SubMessage:
_ = b.EncodeVarint(16<<3 | proto.WireBytes)
if err := b.EncodeMessage(x.SubMessage); err != nil {
return err
}
case nil:
default:
return fmt.Errorf("SampleOneOf.TestOneof has unexpected type %T", x)
}
return nil
}
func _SampleOneOf_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
m := msg.(*SampleOneOf)
switch tag {
case 1: // test_oneof.Field1
if wire != proto.WireFixed64 {
return true, proto.ErrInternalBadWireType
}
x, err := b.DecodeFixed64()
m.TestOneof = &SampleOneOf_Field1{math.Float64frombits(x)}
return true, err
case 2: // test_oneof.Field2
if wire != proto.WireFixed32 {
return true, proto.ErrInternalBadWireType
}
x, err := b.DecodeFixed32()
m.TestOneof = &SampleOneOf_Field2{math.Float32frombits(uint32(x))}
return true, err
case 3: // test_oneof.Field3
if wire != proto.WireVarint {
return true, proto.ErrInternalBadWireType
}
x, err := b.DecodeVarint()
m.TestOneof = &SampleOneOf_Field3{int32(x)}
return true, err
case 4: // test_oneof.Field4
if wire != proto.WireVarint {
return true, proto.ErrInternalBadWireType
}
x, err := b.DecodeVarint()
m.TestOneof = &SampleOneOf_Field4{int64(x)}
return true, err
case 5: // test_oneof.Field5
if wire != proto.WireVarint {
return true, proto.ErrInternalBadWireType
}
x, err := b.DecodeVarint()
m.TestOneof = &SampleOneOf_Field5{uint32(x)}
return true, err
case 6: // test_oneof.Field6
if wire != proto.WireVarint {
return true, proto.ErrInternalBadWireType
}
x, err := b.DecodeVarint()
m.TestOneof = &SampleOneOf_Field6{x}
return true, err
case 7: // test_oneof.Field7
if wire != proto.WireVarint {
return true, proto.ErrInternalBadWireType
}
x, err := b.DecodeZigzag32()
m.TestOneof = &SampleOneOf_Field7{int32(x)}
return true, err
case 8: // test_oneof.Field8
if wire != proto.WireVarint {
return true, proto.ErrInternalBadWireType
}
x, err := b.DecodeZigzag64()
m.TestOneof = &SampleOneOf_Field8{int64(x)}
return true, err
case 9: // test_oneof.Field9
if wire != proto.WireFixed32 {
return true, proto.ErrInternalBadWireType
}
x, err := b.DecodeFixed32()
m.TestOneof = &SampleOneOf_Field9{uint32(x)}
return true, err
case 10: // test_oneof.Field10
if wire != proto.WireFixed32 {
return true, proto.ErrInternalBadWireType
}
x, err := b.DecodeFixed32()
m.TestOneof = &SampleOneOf_Field10{int32(x)}
return true, err
case 11: // test_oneof.Field11
if wire != proto.WireFixed64 {
return true, proto.ErrInternalBadWireType
}
x, err := b.DecodeFixed64()
m.TestOneof = &SampleOneOf_Field11{x}
return true, err
case 12: // test_oneof.Field12
if wire != proto.WireFixed64 {
return true, proto.ErrInternalBadWireType
}
x, err := b.DecodeFixed64()
m.TestOneof = &SampleOneOf_Field12{int64(x)}
return true, err
case 13: // test_oneof.Field13
if wire != proto.WireVarint {
return true, proto.ErrInternalBadWireType
}
x, err := b.DecodeVarint()
m.TestOneof = &SampleOneOf_Field13{x != 0}
return true, err
case 14: // test_oneof.Field14
if wire != proto.WireBytes {
return true, proto.ErrInternalBadWireType
}
x, err := b.DecodeStringBytes()
m.TestOneof = &SampleOneOf_Field14{x}
return true, err
case 15: // test_oneof.Field15
if wire != proto.WireBytes {
return true, proto.ErrInternalBadWireType
}
x, err := b.DecodeRawBytes(true)
m.TestOneof = &SampleOneOf_Field15{x}
return true, err
case 16: // test_oneof.sub_message
if wire != proto.WireBytes {
return true, proto.ErrInternalBadWireType
}
msg := new(Subby)
err := b.DecodeMessage(msg)
m.TestOneof = &SampleOneOf_SubMessage{msg}
return true, err
default:
return false, nil
}
}
func _SampleOneOf_OneofSizer(msg proto.Message) (n int) {
m := msg.(*SampleOneOf)
// test_oneof
switch x := m.TestOneof.(type) {
case *SampleOneOf_Field1:
n += proto.SizeVarint(1<<3 | proto.WireFixed64)
n += 8
case *SampleOneOf_Field2:
n += proto.SizeVarint(2<<3 | proto.WireFixed32)
n += 4
case *SampleOneOf_Field3:
n += proto.SizeVarint(3<<3 | proto.WireVarint)
n += proto.SizeVarint(uint64(x.Field3))
case *SampleOneOf_Field4:
n += proto.SizeVarint(4<<3 | proto.WireVarint)
n += proto.SizeVarint(uint64(x.Field4))
case *SampleOneOf_Field5:
n += proto.SizeVarint(5<<3 | proto.WireVarint)
n += proto.SizeVarint(uint64(x.Field5))
case *SampleOneOf_Field6:
n += proto.SizeVarint(6<<3 | proto.WireVarint)
n += proto.SizeVarint(uint64(x.Field6))
case *SampleOneOf_Field7:
n += proto.SizeVarint(7<<3 | proto.WireVarint)
n += proto.SizeVarint(uint64((uint32(x.Field7) << 1) ^ uint32((int32(x.Field7) >> 31))))
case *SampleOneOf_Field8:
n += proto.SizeVarint(8<<3 | proto.WireVarint)
n += proto.SizeVarint(uint64(uint64(x.Field8<<1) ^ uint64((int64(x.Field8) >> 63))))
case *SampleOneOf_Field9:
n += proto.SizeVarint(9<<3 | proto.WireFixed32)
n += 4
case *SampleOneOf_Field10:
n += proto.SizeVarint(10<<3 | proto.WireFixed32)
n += 4
case *SampleOneOf_Field11:
n += proto.SizeVarint(11<<3 | proto.WireFixed64)
n += 8
case *SampleOneOf_Field12:
n += proto.SizeVarint(12<<3 | proto.WireFixed64)
n += 8
case *SampleOneOf_Field13:
n += proto.SizeVarint(13<<3 | proto.WireVarint)
n += 1
case *SampleOneOf_Field14:
n += proto.SizeVarint(14<<3 | proto.WireBytes)
n += proto.SizeVarint(uint64(len(x.Field14)))
n += len(x.Field14)
case *SampleOneOf_Field15:
n += proto.SizeVarint(15<<3 | proto.WireBytes)
n += proto.SizeVarint(uint64(len(x.Field15)))
n += len(x.Field15)
case *SampleOneOf_SubMessage:
s := proto.Size(x.SubMessage)
n += proto.SizeVarint(16<<3 | proto.WireBytes)
n += proto.SizeVarint(uint64(s))
n += s
case nil:
default:
panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
}
return n
}
func init() {
proto.RegisterType((*Subby)(nil), "one.Subby")
proto.RegisterType((*SampleOneOf)(nil), "one.SampleOneOf")
}
func (this *Subby) Description() (desc *github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet) {
return OneDescription()
}
func (this *SampleOneOf) Description() (desc *github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet) {
return OneDescription()
}
func OneDescription() (desc *github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet) {
d := &github_com_gogo_protobuf_protoc_gen_gogo_descriptor.FileDescriptorSet{}
var gzipped = []byte{
// 3748 bytes of a gzipped FileDescriptorSet
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x5a, 0x5b, 0x6c, 0xe4, 0xe6,
0x75, 0x16, 0xe7, 0xa6, 0x99, 0x33, 0xa3, 0x11, 0xf5, 0x4b, 0x5e, 0x73, 0x65, 0x7b, 0x56, 0xab,
0xd8, 0xb1, 0x6c, 0xd7, 0x5a, 0x5b, 0x97, 0xbd, 0xcc, 0x36, 0x31, 0x46, 0xd2, 0x58, 0xab, 0x85,
0x6e, 0xe1, 0x48, 0x89, 0x9d, 0x3c, 0x10, 0x1c, 0xce, 0x3f, 0x23, 0xee, 0x72, 0xc8, 0x29, 0xc9,
0x59, 0x5b, 0x7e, 0xda, 0xc0, 0xbd, 0x20, 0x08, 0x7a, 0x2f, 0xd0, 0xc4, 0x71, 0xdc, 0x34, 0x40,
0xeb, 0x34, 0xbd, 0x25, 0xbd, 0xa4, 0x41, 0x9f, 0xfa, 0x92, 0xd6, 0x4f, 0x45, 0xf2, 0xd6, 0x87,
0x3c, 0x64, 0x55, 0x03, 0x4d, 0x5b, 0xb7, 0x75, 0x1b, 0x03, 0x0d, 0xb0, 0x2f, 0xc5, 0x7f, 0x23,
0x39, 0x17, 0x2d, 0x47, 0x01, 0x12, 0xf7, 0x49, 0xe2, 0x39, 0xe7, 0xfb, 0x78, 0x78, 0xfe, 0xf3,
0x9f, 0x73, 0xf8, 0x0f, 0xe1, 0xb3, 0x2b, 0x30, 0xd7, 0x72, 0x9c, 0x96, 0x85, 0x2f, 0x75, 0x5c,
0xc7, 0x77, 0xea, 0xdd, 0xe6, 0xa5, 0x06, 0xf6, 0x0c, 0xd7, 0xec, 0xf8, 0x8e, 0xbb, 0x48, 0x65,
0x68, 0x92, 0x59, 0x2c, 0x0a, 0x8b, 0xf9, 0x1d, 0x98, 0x7a, 0xd1, 0xb4, 0xf0, 0x46, 0x60, 0x58,
0xc3, 0x3e, 0xba, 0x0a, 0xa9, 0xa6, 0x69, 0x61, 0x45, 0x9a, 0x4b, 0x2e, 0xe4, 0x97, 0x1e, 0x5f,
0xec, 0x03, 0x2d, 0xf6, 0x22, 0xf6, 0x89, 0x58, 0xa5, 0x88, 0xf9, 0x77, 0x53, 0x30, 0x3d, 0x44,
0x8b, 0x10, 0xa4, 0x6c, 0xbd, 0x4d, 0x18, 0xa5, 0x85, 0x9c, 0x4a, 0xff, 0x47, 0x0a, 0x8c, 0x77,
0x74, 0xe3, 0xb6, 0xde, 0xc2, 0x4a, 0x82, 0x8a, 0xc5, 0x25, 0x2a, 0x01, 0x34, 0x70, 0x07, 0xdb,
0x0d, 0x6c, 0x1b, 0xc7, 0x4a, 0x72, 0x2e, 0xb9, 0x90, 0x53, 0x23, 0x12, 0xf4, 0x0c, 0x4c, 0x75,
0xba, 0x75, 0xcb, 0x34, 0xb4, 0x88, 0x19, 0xcc, 0x25, 0x17, 0xd2, 0xaa, 0xcc, 0x14, 0x1b, 0xa1,
0xf1, 0x93, 0x30, 0xf9, 0x0a, 0xd6, 0x6f, 0x47, 0x4d, 0xf3, 0xd4, 0xb4, 0x48, 0xc4, 0x11, 0xc3,
0x75, 0x28, 0xb4, 0xb1, 0xe7, 0xe9, 0x2d, 0xac, 0xf9, 0xc7, 0x1d, 0xac, 0xa4, 0xe8, 0xd3, 0xcf,
0x0d, 0x3c, 0x7d, 0xff, 0x93, 0xe7, 0x39, 0xea, 0xe0, 0xb8, 0x83, 0x51, 0x05, 0x72, 0xd8, 0xee,
0xb6, 0x19, 0x43, 0xfa, 0x94, 0xf8, 0x55, 0xed, 0x6e, 0xbb, 0x9f, 0x25, 0x4b, 0x60, 0x9c, 0x62,
0xdc, 0xc3, 0xee, 0x1d, 0xd3, 0xc0, 0x4a, 0x86, 0x12, 0x3c, 0x39, 0x40, 0x50, 0x63, 0xfa, 0x7e,
0x0e, 0x81, 0x43, 0xeb, 0x90, 0xc3, 0xaf, 0xfa, 0xd8, 0xf6, 0x4c, 0xc7, 0x56, 0xc6, 0x29, 0xc9,
0x13, 0x43, 0x56, 0x11, 0x5b, 0x8d, 0x7e, 0x8a, 0x10, 0x87, 0x2e, 0xc3, 0xb8, 0xd3, 0xf1, 0x4d,
0xc7, 0xf6, 0x94, 0xec, 0x9c, 0xb4, 0x90, 0x5f, 0x7a, 0x74, 0x68, 0x22, 0xec, 0x31, 0x1b, 0x55,
0x18, 0xa3, 0x2d, 0x90, 0x3d, 0xa7, 0xeb, 0x1a, 0x58, 0x33, 0x9c, 0x06, 0xd6, 0x4c, 0xbb, 0xe9,
0x28, 0x39, 0x4a, 0x70, 0x61, 0xf0, 0x41, 0xa8, 0xe1, 0xba, 0xd3, 0xc0, 0x5b, 0x76, 0xd3, 0x51,
0x8b, 0x5e, 0xcf, 0x35, 0x3a, 0x07, 0x19, 0xef, 0xd8, 0xf6, 0xf5, 0x57, 0x95, 0x02, 0xcd, 0x10,
0x7e, 0x35, 0xff, 0xbf, 0x69, 0x98, 0x1c, 0x25, 0xc5, 0xae, 0x43, 0xba, 0x49, 0x9e, 0x52, 0x49,
0x9c, 0x25, 0x06, 0x0c, 0xd3, 0x1b, 0xc4, 0xcc, 0x4f, 0x18, 0xc4, 0x0a, 0xe4, 0x6d, 0xec, 0xf9,
0xb8, 0xc1, 0x32, 0x22, 0x39, 0x62, 0x4e, 0x01, 0x03, 0x0d, 0xa6, 0x54, 0xea, 0x27, 0x4a, 0xa9,
0x97, 0x60, 0x32, 0x70, 0x49, 0x73, 0x75, 0xbb, 0x25, 0x72, 0xf3, 0x52, 0x9c, 0x27, 0x8b, 0x55,
0x81, 0x53, 0x09, 0x4c, 0x2d, 0xe2, 0x9e, 0x6b, 0xb4, 0x01, 0xe0, 0xd8, 0xd8, 0x69, 0x6a, 0x0d,
0x6c, 0x58, 0x4a, 0xf6, 0x94, 0x28, 0xed, 0x11, 0x93, 0x81, 0x28, 0x39, 0x4c, 0x6a, 0x58, 0xe8,
0x5a, 0x98, 0x6a, 0xe3, 0xa7, 0x64, 0xca, 0x0e, 0xdb, 0x64, 0x03, 0xd9, 0x76, 0x08, 0x45, 0x17,
0x93, 0xbc, 0xc7, 0x0d, 0xfe, 0x64, 0x39, 0xea, 0xc4, 0x62, 0xec, 0x93, 0xa9, 0x1c, 0xc6, 0x1e,
0x6c, 0xc2, 0x8d, 0x5e, 0xa2, 0x8f, 0x40, 0x20, 0xd0, 0x68, 0x5a, 0x01, 0xad, 0x42, 0x05, 0x21,
0xdc, 0xd5, 0xdb, 0x78, 0xf6, 0x2a, 0x14, 0x7b, 0xc3, 0x83, 0x66, 0x20, 0xed, 0xf9, 0xba, 0xeb,
0xd3, 0x2c, 0x4c, 0xab, 0xec, 0x02, 0xc9, 0x90, 0xc4, 0x76, 0x83, 0x56, 0xb9, 0xb4, 0x4a, 0xfe,
0x9d, 0xbd, 0x02, 0x13, 0x3d, 0xb7, 0x1f, 0x15, 0x38, 0xff, 0x85, 0x0c, 0xcc, 0x0c, 0xcb, 0xb9,
0xa1, 0xe9, 0x7f, 0x0e, 0x32, 0x76, 0xb7, 0x5d, 0xc7, 0xae, 0x92, 0xa4, 0x0c, 0xfc, 0x0a, 0x55,
0x20, 0x6d, 0xe9, 0x75, 0x6c, 0x29, 0xa9, 0x39, 0x69, 0xa1, 0xb8, 0xf4, 0xcc, 0x48, 0x59, 0xbd,
0xb8, 0x4d, 0x20, 0x2a, 0x43, 0xa2, 0x8f, 0x43, 0x8a, 0x97, 0x38, 0xc2, 0xf0, 0xf4, 0x68, 0x0c,
0x24, 0x17, 0x55, 0x8a, 0x43, 0x8f, 0x40, 0x8e, 0xfc, 0x65, 0xb1, 0xcd, 0x50, 0x9f, 0xb3, 0x44,
0x40, 0xe2, 0x8a, 0x66, 0x21, 0x4b, 0xd3, 0xac, 0x81, 0x45, 0x6b, 0x08, 0xae, 0xc9, 0xc2, 0x34,
0x70, 0x53, 0xef, 0x5a, 0xbe, 0x76, 0x47, 0xb7, 0xba, 0x98, 0x26, 0x4c, 0x4e, 0x2d, 0x70, 0xe1,
0x27, 0x89, 0x0c, 0x5d, 0x80, 0x3c, 0xcb, 0x4a, 0xd3, 0x6e, 0xe0, 0x57, 0x69, 0xf5, 0x49, 0xab,
0x2c, 0x51, 0xb7, 0x88, 0x84, 0xdc, 0xfe, 0x96, 0xe7, 0xd8, 0x62, 0x69, 0xe9, 0x2d, 0x88, 0x80,
0xde, 0xfe, 0x4a, 0x7f, 0xe1, 0x7b, 0x6c, 0xf8, 0xe3, 0xf5, 0xe7, 0xe2, 0xfc, 0xb7, 0x12, 0x90,
0xa2, 0xfb, 0x6d, 0x12, 0xf2, 0x07, 0x2f, 0xef, 0x57, 0xb5, 0x8d, 0xbd, 0xc3, 0xb5, 0xed, 0xaa,
0x2c, 0xa1, 0x22, 0x00, 0x15, 0xbc, 0xb8, 0xbd, 0x57, 0x39, 0x90, 0x13, 0xc1, 0xf5, 0xd6, 0xee,
0xc1, 0xe5, 0x15, 0x39, 0x19, 0x00, 0x0e, 0x99, 0x20, 0x15, 0x35, 0x58, 0x5e, 0x92, 0xd3, 0x48,
0x86, 0x02, 0x23, 0xd8, 0x7a, 0xa9, 0xba, 0x71, 0x79, 0x45, 0xce, 0xf4, 0x4a, 0x96, 0x97, 0xe4,
0x71, 0x34, 0x01, 0x39, 0x2a, 0x59, 0xdb, 0xdb, 0xdb, 0x96, 0xb3, 0x01, 0x67, 0xed, 0x40, 0xdd,
0xda, 0xdd, 0x94, 0x73, 0x01, 0xe7, 0xa6, 0xba, 0x77, 0xb8, 0x2f, 0x43, 0xc0, 0xb0, 0x53, 0xad,
0xd5, 0x2a, 0x9b, 0x55, 0x39, 0x1f, 0x58, 0xac, 0xbd, 0x7c, 0x50, 0xad, 0xc9, 0x85, 0x1e, 0xb7,
0x96, 0x97, 0xe4, 0x89, 0xe0, 0x16, 0xd5, 0xdd, 0xc3, 0x1d, 0xb9, 0x88, 0xa6, 0x60, 0x82, 0xdd,
0x42, 0x38, 0x31, 0xd9, 0x27, 0xba, 0xbc, 0x22, 0xcb, 0xa1, 0x23, 0x8c, 0x65, 0xaa, 0x47, 0x70,
0x79, 0x45, 0x46, 0xf3, 0xeb, 0x90, 0xa6, 0xd9, 0x85, 0x10, 0x14, 0xb7, 0x2b, 0x6b, 0xd5, 0x6d,
0x6d, 0x6f, 0xff, 0x60, 0x6b, 0x6f, 0xb7, 0xb2, 0x2d, 0x4b, 0xa1, 0x4c, 0xad, 0x7e, 0xe2, 0x70,
0x4b, 0xad, 0x6e, 0xc8, 0x89, 0xa8, 0x6c, 0xbf, 0x5a, 0x39, 0xa8, 0x6e, 0xc8, 0xc9, 0x79, 0x03,
0x66, 0x86, 0xd5, 0x99, 0xa1, 0x3b, 0x23, 0xb2, 0xc4, 0x89, 0x53, 0x96, 0x98, 0x72, 0x0d, 0x2c,
0xf1, 0x57, 0x25, 0x98, 0x1e, 0x52, 0x6b, 0x87, 0xde, 0xe4, 0x05, 0x48, 0xb3, 0x14, 0x65, 0xdd,
0xe7, 0xa9, 0xa1, 0x45, 0x9b, 0x26, 0xec, 0x40, 0x07, 0xa2, 0xb8, 0x68, 0x07, 0x4e, 0x9e, 0xd2,
0x81, 0x09, 0xc5, 0x80, 0x93, 0xaf, 0x4b, 0xa0, 0x9c, 0xc6, 0x1d, 0x53, 0x28, 0x12, 0x3d, 0x85,
0xe2, 0x7a, 0xbf, 0x03, 0x17, 0x4f, 0x7f, 0x86, 0x01, 0x2f, 0xde, 0x96, 0xe0, 0xdc, 0xf0, 0x41,
0x65, 0xa8, 0x0f, 0x1f, 0x87, 0x4c, 0x1b, 0xfb, 0x47, 0x8e, 0x68, 0xd6, 0x1f, 0x1d, 0xd2, 0x02,
0x88, 0xba, 0x3f, 0x56, 0x1c, 0x15, 0xed, 0x21, 0xc9, 0xd3, 0xa6, 0x0d, 0xe6, 0xcd, 0x80, 0xa7,
0x9f, 0x4b, 0xc0, 0x43, 0x43, 0xc9, 0x87, 0x3a, 0xfa, 0x18, 0x80, 0x69, 0x77, 0xba, 0x3e, 0x6b,
0xc8, 0xac, 0x3e, 0xe5, 0xa8, 0x84, 0xee, 0x7d, 0x52, 0x7b, 0xba, 0x7e, 0xa0, 0x4f, 0x52, 0x3d,
0x30, 0x11, 0x35, 0xb8, 0x1a, 0x3a, 0x9a, 0xa2, 0x8e, 0x96, 0x4e, 0x79, 0xd2, 0x81, 0x5e, 0xf7,
0x1c, 0xc8, 0x86, 0x65, 0x62, 0xdb, 0xd7, 0x3c, 0xdf, 0xc5, 0x7a, 0xdb, 0xb4, 0x5b, 0xb4, 0x00,
0x67, 0xcb, 0xe9, 0xa6, 0x6e, 0x79, 0x58, 0x9d, 0x64, 0xea, 0x9a, 0xd0, 0x12, 0x04, 0xed, 0x32,
0x6e, 0x04, 0x91, 0xe9, 0x41, 0x30, 0x75, 0x80, 0x98, 0xff, 0xfc, 0x38, 0xe4, 0x23, 0x63, 0x1d,
0xba, 0x08, 0x85, 0x5b, 0xfa, 0x1d, 0x5d, 0x13, 0xa3, 0x3a, 0x8b, 0x44, 0x9e, 0xc8, 0xf6, 0xf9,
0xb8, 0xfe, 0x1c, 0xcc, 0x50, 0x13, 0xa7, 0xeb, 0x63, 0x57, 0x33, 0x2c, 0xdd, 0xf3, 0x68, 0xd0,
0xb2, 0xd4, 0x14, 0x11, 0xdd, 0x1e, 0x51, 0xad, 0x0b, 0x0d, 0x5a, 0x85, 0x69, 0x8a, 0x68, 0x77,
0x2d, 0xdf, 0xec, 0x58, 0x58, 0x23, 0x2f, 0x0f, 0x1e, 0x2d, 0xc4, 0x81, 0x67, 0x53, 0xc4, 0x62,
0x87, 0x1b, 0x10, 0x8f, 0x3c, 0xb4, 0x09, 0x8f, 0x51, 0x58, 0x0b, 0xdb, 0xd8, 0xd5, 0x7d, 0xac,
0xe1, 0x5f, 0xe8, 0xea, 0x96, 0xa7, 0xe9, 0x76, 0x43, 0x3b, 0xd2, 0xbd, 0x23, 0x65, 0x26, 0x4a,
0x70, 0x9e, 0xd8, 0x6e, 0x72, 0xd3, 0x2a, 0xb5, 0xac, 0xd8, 0x8d, 0x1b, 0xba, 0x77, 0x84, 0xca,
0x70, 0x8e, 0x12, 0x79, 0xbe, 0x6b, 0xda, 0x2d, 0xcd, 0x38, 0xc2, 0xc6, 0x6d, 0xad, 0xeb, 0x37,
0xaf, 0x2a, 0x8f, 0x44, 0x19, 0xa8, 0x93, 0x35, 0x6a, 0xb3, 0x4e, 0x4c, 0x0e, 0xfd, 0xe6, 0x55,
0x54, 0x83, 0x02, 0x59, 0x8f, 0xb6, 0xf9, 0x1a, 0xd6, 0x9a, 0x8e, 0x4b, 0x9b, 0x4b, 0x71, 0xc8,
0xe6, 0x8e, 0x04, 0x71, 0x71, 0x8f, 0x03, 0x76, 0x9c, 0x06, 0x2e, 0xa7, 0x6b, 0xfb, 0xd5, 0xea,
0x86, 0x9a, 0x17, 0x2c, 0x2f, 0x3a, 0x2e, 0xc9, 0xa9, 0x96, 0x13, 0xc4, 0x38, 0xcf, 0x72, 0xaa,
0xe5, 0x88, 0x08, 0xaf, 0xc2, 0xb4, 0x61, 0xb0, 0xc7, 0x36, 0x0d, 0x8d, 0x4f, 0xf9, 0x9e, 0x22,
0xf7, 0xc4, 0xcb, 0x30, 0x36, 0x99, 0x01, 0x4f, 0x73, 0x0f, 0x5d, 0x83, 0x87, 0xc2, 0x78, 0x45,
0x81, 0x53, 0x03, 0x4f, 0xd9, 0x0f, 0x5d, 0x85, 0xe9, 0xce, 0xf1, 0x20, 0x10, 0xf5, 0xdc, 0xb1,
0x73, 0xdc, 0x0f, 0x7b, 0x82, 0xbe, 0xb9, 0xb9, 0xd8, 0xd0, 0x7d, 0xdc, 0x50, 0x1e, 0x8e, 0x5a,
0x47, 0x14, 0xe8, 0x12, 0xc8, 0x86, 0xa1, 0x61, 0x5b, 0xaf, 0x5b, 0x58, 0xd3, 0x5d, 0x6c, 0xeb,
0x9e, 0x72, 0x21, 0x6a, 0x5c, 0x34, 0x8c, 0x2a, 0xd5, 0x56, 0xa8, 0x12, 0x3d, 0x0d, 0x53, 0x4e,
0xfd, 0x96, 0xc1, 0x92, 0x4b, 0xeb, 0xb8, 0xb8, 0x69, 0xbe, 0xaa, 0x3c, 0x4e, 0xc3, 0x34, 0x49,
0x14, 0x34, 0xb5, 0xf6, 0xa9, 0x18, 0x3d, 0x05, 0xb2, 0xe1, 0x1d, 0xe9, 0x6e, 0x87, 0x76, 0x77,
0xaf, 0xa3, 0x1b, 0x58, 0x79, 0x82, 0x99, 0x32, 0xf9, 0xae, 0x10, 0xa3, 0x97, 0x60, 0xa6, 0x6b,
0x9b, 0xb6, 0x8f, 0xdd, 0x8e, 0x8b, 0xc9, 0x90, 0xce, 0x76, 0x9a, 0xf2, 0x2f, 0xe3, 0xa7, 0x8c,
0xd9, 0x87, 0x51, 0x6b, 0xb6, 0xba, 0xea, 0x74, 0x77, 0x50, 0x38, 0x5f, 0x86, 0x42, 0x74, 0xd1,
0x51, 0x0e, 0xd8, 0xb2, 0xcb, 0x12, 0xe9, 0xa1, 0xeb, 0x7b, 0x1b, 0xa4, 0xfb, 0x7d, 0xba, 0x2a,
0x27, 0x48, 0x17, 0xde, 0xde, 0x3a, 0xa8, 0x6a, 0xea, 0xe1, 0xee, 0xc1, 0xd6, 0x4e, 0x55, 0x4e,
0x3e, 0x9d, 0xcb, 0xfe, 0x70, 0x5c, 0xbe, 0x7b, 0xf7, 0xee, 0xdd, 0xc4, 0xfc, 0x77, 0x12, 0x50,
0xec, 0x9d, 0x7c, 0xd1, 0xcf, 0xc3, 0xc3, 0xe2, 0x35, 0xd5, 0xc3, 0xbe, 0xf6, 0x8a, 0xe9, 0xd2,
0x3c, 0x6c, 0xeb, 0x6c, 0x76, 0x0c, 0x42, 0x38, 0xc3, 0xad, 0x6a, 0xd8, 0xff, 0x94, 0xe9, 0x92,
0x2c, 0x6b, 0xeb, 0x3e, 0xda, 0x86, 0x0b, 0xb6, 0xa3, 0x79, 0xbe, 0x6e, 0x37, 0x74, 0xb7, 0xa1,
0x85, 0x07, 0x04, 0x9a, 0x6e, 0x18, 0xd8, 0xf3, 0x1c, 0xd6, 0x02, 0x02, 0x96, 0x47, 0x6d, 0xa7,
0xc6, 0x8d, 0xc3, 0xda, 0x58, 0xe1, 0xa6, 0x7d, 0xcb, 0x9d, 0x3c, 0x6d, 0xb9, 0x1f, 0x81, 0x5c,
0x5b, 0xef, 0x68, 0xd8, 0xf6, 0xdd, 0x63, 0x3a, 0xaf, 0x65, 0xd5, 0x6c, 0x5b, 0xef, 0x54, 0xc9,
0xf5, 0x4f, 0x6f, 0x0d, 0xa2, 0x71, 0xfc, 0x7e, 0x12, 0x0a, 0xd1, 0x99, 0x8d, 0x8c, 0xc0, 0x06,
0xad, 0xcf, 0x12, 0xdd, 0xbe, 0x1f, 0x79, 0xe0, 0x84, 0xb7, 0xb8, 0x4e, 0x0a, 0x77, 0x39, 0xc3,
0x26, 0x29, 0x95, 0x21, 0x49, 0xd3, 0x24, 0x1b, 0x16, 0xb3, 0xf9, 0x3c, 0xab, 0xf2, 0x2b, 0xb4,
0x09, 0x99, 0x5b, 0x1e, 0xe5, 0xce, 0x50, 0xee, 0xc7, 0x1f, 0xcc, 0x7d, 0xb3, 0x46, 0xc9, 0x73,
0x37, 0x6b, 0xda, 0xee, 0x9e, 0xba, 0x53, 0xd9, 0x56, 0x39, 0x1c, 0x9d, 0x87, 0x94, 0xa5, 0xbf,
0x76, 0xdc, 0x5b, 0xe2, 0xa9, 0x68, 0xd4, 0xc0, 0x9f, 0x87, 0xd4, 0x2b, 0x58, 0xbf, 0xdd, 0x5b,
0x58, 0xa9, 0xe8, 0xa7, 0x98, 0xfa, 0x97, 0x20, 0x4d, 0xe3, 0x85, 0x00, 0x78, 0xc4, 0xe4, 0x31,
0x94, 0x85, 0xd4, 0xfa, 0x9e, 0x4a, 0xd2, 0x5f, 0x86, 0x02, 0x93, 0x6a, 0xfb, 0x5b, 0xd5, 0xf5,
0xaa, 0x9c, 0x98, 0x5f, 0x85, 0x0c, 0x0b, 0x02, 0xd9, 0x1a, 0x41, 0x18, 0xe4, 0x31, 0x7e, 0xc9,
0x39, 0x24, 0xa1, 0x3d, 0xdc, 0x59, 0xab, 0xaa, 0x72, 0x22, 0xba, 0xbc, 0x1e, 0x14, 0xa2, 0xe3,
0xda, 0xcf, 0x26, 0xa7, 0xfe, 0x56, 0x82, 0x7c, 0x64, 0xfc, 0x22, 0x8d, 0x5f, 0xb7, 0x2c, 0xe7,
0x15, 0x4d, 0xb7, 0x4c, 0xdd, 0xe3, 0x49, 0x01, 0x54, 0x54, 0x21, 0x92, 0x51, 0x17, 0xed, 0x67,
0xe2, 0xfc, 0x5b, 0x12, 0xc8, 0xfd, 0xa3, 0x5b, 0x9f, 0x83, 0xd2, 0x87, 0xea, 0xe0, 0x9b, 0x12,
0x14, 0x7b, 0xe7, 0xb5, 0x3e, 0xf7, 0x2e, 0x7e, 0xa8, 0xee, 0x7d, 0x49, 0x82, 0x89, 0x9e, 0x29,
0xed, 0xff, 0x95, 0x77, 0x6f, 0x24, 0x61, 0x7a, 0x08, 0x0e, 0x55, 0xf8, 0x38, 0xcb, 0x26, 0xec,
0x67, 0x47, 0xb9, 0xd7, 0x22, 0xe9, 0x96, 0xfb, 0xba, 0xeb, 0xf3, 0xe9, 0xf7, 0x29, 0x90, 0xcd,
0x06, 0xb6, 0x7d, 0xb3, 0x69, 0x62, 0x97, 0xbf, 0x82, 0xb3, 0x19, 0x77, 0x32, 0x94, 0xb3, 0xb7,
0xf0, 0x9f, 0x03, 0xd4, 0x71, 0x3c, 0xd3, 0x37, 0xef, 0x60, 0xcd, 0xb4, 0xc5, 0xfb, 0x3a, 0x99,
0x79, 0x53, 0xaa, 0x2c, 0x34, 0x5b, 0xb6, 0x1f, 0x58, 0xdb, 0xb8, 0xa5, 0xf7, 0x59, 0x93, 0xda,
0x97, 0x54, 0x65, 0xa1, 0x09, 0xac, 0x2f, 0x42, 0xa1, 0xe1, 0x74, 0xc9, 0xf8, 0xc0, 0xec, 0x48,
0xa9, 0x95, 0xd4, 0x3c, 0x93, 0x05, 0x26, 0x7c, 0xbe, 0x0b, 0x0f, 0x0a, 0x0a, 0x6a, 0x9e, 0xc9,
0x98, 0xc9, 0x93, 0x30, 0xa9, 0xb7, 0x5a, 0x2e, 0x21, 0x17, 0x44, 0x6c, 0x68, 0x2d, 0x06, 0x62,
0x6a, 0x38, 0x7b, 0x13, 0xb2, 0x22, 0x0e, 0xa4, 0x9b, 0x91, 0x48, 0x68, 0x1d, 0x76, 0x5c, 0x93,
0x58, 0xc8, 0xa9, 0x59, 0x5b, 0x28, 0x2f, 0x42, 0xc1, 0xf4, 0xb4, 0xf0, 0xdc, 0x30, 0x31, 0x97,
0x58, 0xc8, 0xaa, 0x79, 0xd3, 0x0b, 0x0e, 0x8a, 0xe6, 0xdf, 0x4e, 0x40, 0xb1, 0xf7, 0xdc, 0x13,
0x6d, 0x40, 0xd6, 0x72, 0x0c, 0x9d, 0x26, 0x02, 0x3b, 0x74, 0x5f, 0x88, 0x39, 0x2a, 0x5d, 0xdc,
0xe6, 0xf6, 0x6a, 0x80, 0x9c, 0xfd, 0x47, 0x09, 0xb2, 0x42, 0x8c, 0xce, 0x41, 0xaa, 0xa3, 0xfb,
0x47, 0x94, 0x2e, 0xbd, 0x96, 0x90, 0x25, 0x95, 0x5e, 0x13, 0xb9, 0xd7, 0xd1, 0x6d, 0x9a, 0x02,
0x5c, 0x4e, 0xae, 0xc9, 0xba, 0x5a, 0x58, 0x6f, 0xd0, 0x71, 0xd8, 0x69, 0xb7, 0xb1, 0xed, 0x7b,
0x62, 0x5d, 0xb9, 0x7c, 0x9d, 0x8b, 0xd1, 0x33, 0x30, 0xe5, 0xbb, 0xba, 0x69, 0xf5, 0xd8, 0xa6,
0xa8, 0xad, 0x2c, 0x14, 0x81, 0x71, 0x19, 0xce, 0x0b, 0xde, 0x06, 0xf6, 0x75, 0xe3, 0x08, 0x37,
0x42, 0x50, 0x86, 0x1e, 0xaa, 0x3d, 0xcc, 0x0d, 0x36, 0xb8, 0x5e, 0x60, 0xe7, 0xbf, 0x27, 0xc1,
0x94, 0x18, 0xe0, 0x1b, 0x41, 0xb0, 0x76, 0x00, 0x74, 0xdb, 0x76, 0xfc, 0x68, 0xb8, 0x06, 0x53,
0x79, 0x00, 0xb7, 0x58, 0x09, 0x40, 0x6a, 0x84, 0x60, 0xb6, 0x0d, 0x10, 0x6a, 0x4e, 0x0d, 0xdb,
0x05, 0xc8, 0xf3, 0x43, 0x6d, 0xfa, 0xcb, 0x08, 0x7b, 0xeb, 0x03, 0x26, 0x22, 0x93, 0x3e, 0x9a,
0x81, 0x74, 0x1d, 0xb7, 0x4c, 0x9b, 0x1f, 0xb5, 0xb1, 0x0b, 0x71, 0x80, 0x97, 0x0a, 0x0e, 0xf0,
0xd6, 0x3e, 0x03, 0xd3, 0x86, 0xd3, 0xee, 0x77, 0x77, 0x4d, 0xee, 0x7b, 0xf3, 0xf4, 0x6e, 0x48,
0x9f, 0x86, 0x70, 0x3a, 0xfb, 0x8a, 0x24, 0x7d, 0x35, 0x91, 0xdc, 0xdc, 0x5f, 0xfb, 0x7a, 0x62,
0x76, 0x93, 0x41, 0xf7, 0xc5, 0x93, 0xaa, 0xb8, 0x69, 0x61, 0x83, 0x78, 0x0f, 0x3f, 0xfa, 0x28,
0x3c, 0xdb, 0x32, 0xfd, 0xa3, 0x6e, 0x7d, 0xd1, 0x70, 0xda, 0x97, 0x5a, 0x4e, 0xcb, 0x09, 0x7f,
0x0c, 0x22, 0x57, 0xf4, 0x82, 0xfe, 0xc7, 0x7f, 0x10, 0xca, 0x05, 0xd2, 0xd9, 0xd8, 0x5f, 0x8f,
0xca, 0xbb, 0x30, 0xcd, 0x8d, 0x35, 0x7a, 0x22, 0xcd, 0xe6, 0x70, 0xf4, 0xc0, 0x53, 0x09, 0xe5,
0x9b, 0xef, 0xd2, 0x4e, 0xa7, 0x4e, 0x71, 0x28, 0xd1, 0xb1, 0x49, 0xbd, 0xac, 0xc2, 0x43, 0x3d,
0x7c, 0x6c, 0x6b, 0x62, 0x37, 0x86, 0xf1, 0x3b, 0x9c, 0x71, 0x3a, 0xc2, 0x58, 0xe3, 0xd0, 0xf2,
0x3a, 0x4c, 0x9c, 0x85, 0xeb, 0xef, 0x39, 0x57, 0x01, 0x47, 0x49, 0x36, 0x61, 0x92, 0x92, 0x18,
0x5d, 0xcf, 0x77, 0xda, 0xb4, 0xee, 0x3d, 0x98, 0xe6, 0x1f, 0xde, 0x65, 0x7b, 0xa5, 0x48, 0x60,
0xeb, 0x01, 0xaa, 0x5c, 0x06, 0x7a, 0x08, 0xdf, 0xc0, 0x86, 0x15, 0xc3, 0xf0, 0x0e, 0x77, 0x24,
0xb0, 0x2f, 0x7f, 0x12, 0x66, 0xc8, 0xff, 0xb4, 0x2c, 0x45, 0x3d, 0x89, 0x3f, 0x83, 0x51, 0xbe,
0xf7, 0x3a, 0xdb, 0x8e, 0xd3, 0x01, 0x41, 0xc4, 0xa7, 0xc8, 0x2a, 0xb6, 0xb0, 0xef, 0x63, 0xd7,
0xd3, 0x74, 0x6b, 0x98, 0x7b, 0x91, 0x37, 0x58, 0xe5, 0x8b, 0xef, 0xf5, 0xae, 0xe2, 0x26, 0x43,
0x56, 0x2c, 0xab, 0x7c, 0x08, 0x0f, 0x0f, 0xc9, 0x8a, 0x11, 0x38, 0xdf, 0xe0, 0x9c, 0x33, 0x03,
0x99, 0x41, 0x68, 0xf7, 0x41, 0xc8, 0x83, 0xb5, 0x1c, 0x81, 0xf3, 0x4b, 0x9c, 0x13, 0x71, 0xac,
0x58, 0x52, 0xc2, 0x78, 0x13, 0xa6, 0xee, 0x60, 0xb7, 0xee, 0x78, 0xfc, 0xe0, 0x60, 0x04, 0xba,
0x37, 0x39, 0xdd, 0x24, 0x07, 0xd2, 0x63, 0x04, 0xc2, 0x75, 0x0d, 0xb2, 0x4d, 0xdd, 0xc0, 0x23,
0x50, 0x7c, 0x99, 0x53, 0x8c, 0x13, 0x7b, 0x02, 0xad, 0x40, 0xa1, 0xe5, 0xf0, 0xce, 0x14, 0x0f,
0x7f, 0x8b, 0xc3, 0xf3, 0x02, 0xc3, 0x29, 0x3a, 0x4e, 0xa7, 0x6b, 0x91, 0xb6, 0x15, 0x4f, 0xf1,
0x7b, 0x82, 0x42, 0x60, 0x38, 0xc5, 0x19, 0xc2, 0xfa, 0x15, 0x41, 0xe1, 0x45, 0xe2, 0xf9, 0x02,
0xe4, 0x1d, 0xdb, 0x3a, 0x76, 0xec, 0x51, 0x9c, 0xf8, 0x7d, 0xce, 0x00, 0x1c, 0x42, 0x08, 0xae,
0x43, 0x6e, 0xd4, 0x85, 0xf8, 0x83, 0xf7, 0xc4, 0xf6, 0x10, 0x2b, 0xb0, 0x09, 0x93, 0xa2, 0x40,
0x99, 0x8e, 0x3d, 0x02, 0xc5, 0x1f, 0x72, 0x8a, 0x62, 0x04, 0xc6, 0x1f, 0xc3, 0xc7, 0x9e, 0xdf,
0xc2, 0xa3, 0x90, 0xbc, 0x2d, 0x1e, 0x83, 0x43, 0x78, 0x28, 0xeb, 0xd8, 0x36, 0x8e, 0x46, 0x63,
0xf8, 0x9a, 0x08, 0xa5, 0xc0, 0x10, 0x8a, 0x75, 0x98, 0x68, 0xeb, 0xae, 0x77, 0xa4, 0x5b, 0x23,
0x2d, 0xc7, 0x1f, 0x71, 0x8e, 0x42, 0x00, 0xe2, 0x11, 0xe9, 0xda, 0x67, 0xa1, 0xf9, 0xba, 0x88,
0x48, 0x04, 0xc6, 0xb7, 0x9e, 0xe7, 0xd3, 0xb3, 0x99, 0xb3, 0xb0, 0xfd, 0xb1, 0xd8, 0x7a, 0x0c,
0xbb, 0x13, 0x65, 0xbc, 0x0e, 0x39, 0xcf, 0x7c, 0x6d, 0x24, 0x9a, 0x3f, 0x11, 0x2b, 0x4d, 0x01,
0x04, 0xfc, 0x32, 0x9c, 0x1f, 0xda, 0x26, 0x46, 0x20, 0xfb, 0x53, 0x4e, 0x76, 0x6e, 0x48, 0xab,
0xe0, 0x25, 0xe1, 0xac, 0x94, 0x7f, 0x26, 0x4a, 0x02, 0xee, 0xe3, 0xda, 0x27, 0x93, 0xbd, 0xa7,
0x37, 0xcf, 0x16, 0xb5, 0x3f, 0x17, 0x51, 0x63, 0xd8, 0x9e, 0xa8, 0x1d, 0xc0, 0x39, 0xce, 0x78,
0xb6, 0x75, 0xfd, 0x86, 0x28, 0xac, 0x0c, 0x7d, 0xd8, 0xbb, 0xba, 0x9f, 0x81, 0xd9, 0x20, 0x9c,
0x62, 0x28, 0xf5, 0xb4, 0xb6, 0xde, 0x19, 0x81, 0xf9, 0x9b, 0x9c, 0x59, 0x54, 0xfc, 0x60, 0xaa,
0xf5, 0x76, 0xf4, 0x0e, 0x21, 0x7f, 0x09, 0x14, 0x41, 0xde, 0xb5, 0x5d, 0x6c, 0x38, 0x2d, 0xdb,
0x7c, 0x0d, 0x37, 0x46, 0xa0, 0xfe, 0x8b, 0xbe, 0xa5, 0x3a, 0x8c, 0xc0, 0x09, 0xf3, 0x16, 0xc8,
0xc1, 0xac, 0xa2, 0x99, 0xed, 0x8e, 0xe3, 0xfa, 0x31, 0x8c, 0x7f, 0x29, 0x56, 0x2a, 0xc0, 0x6d,
0x51, 0x58, 0xb9, 0x0a, 0x45, 0x7a, 0x39, 0x6a, 0x4a, 0xfe, 0x15, 0x27, 0x9a, 0x08, 0x51, 0xbc,
0x70, 0x18, 0x4e, 0xbb, 0xa3, 0xbb, 0xa3, 0xd4, 0xbf, 0xbf, 0x16, 0x85, 0x83, 0x43, 0x78, 0xe1,
0xf0, 0x8f, 0x3b, 0x98, 0x74, 0xfb, 0x11, 0x18, 0xbe, 0x25, 0x0a, 0x87, 0xc0, 0x70, 0x0a, 0x31,
0x30, 0x8c, 0x40, 0xf1, 0x37, 0x82, 0x42, 0x60, 0x08, 0xc5, 0x27, 0xc2, 0x46, 0xeb, 0xe2, 0x96,
0xe9, 0xf9, 0x2e, 0x1b, 0x85, 0x1f, 0x4c, 0xf5, 0xed, 0xf7, 0x7a, 0x87, 0x30, 0x35, 0x02, 0x2d,
0xdf, 0x84, 0xc9, 0xbe, 0x11, 0x03, 0xc5, 0xfd, 0xa2, 0xaf, 0x7c, 0xf6, 0x03, 0x5e, 0x8c, 0x7a,
0x27, 0x8c, 0xf2, 0x36, 0x59, 0xf7, 0xde, 0x39, 0x20, 0x9e, 0xec, 0xf5, 0x0f, 0x82, 0xa5, 0xef,
0x19, 0x03, 0xca, 0x2f, 0xc2, 0x44, 0xcf, 0x0c, 0x10, 0x4f, 0xf5, 0x8b, 0x9c, 0xaa, 0x10, 0x1d,
0x01, 0xca, 0xab, 0x90, 0x22, 0xfd, 0x3c, 0x1e, 0xfe, 0x4b, 0x1c, 0x4e, 0xcd, 0xcb, 0x1f, 0x83,
0xac, 0xe8, 0xe3, 0xf1, 0xd0, 0x5f, 0xe6, 0xd0, 0x00, 0x42, 0xe0, 0xa2, 0x87, 0xc7, 0xc3, 0x7f,
0x45, 0xc0, 0x05, 0x84, 0xc0, 0x47, 0x0f, 0xe1, 0xdf, 0x7d, 0x3e, 0xc5, 0xeb, 0xb0, 0x88, 0xdd,
0x75, 0x18, 0xe7, 0xcd, 0x3b, 0x1e, 0xfd, 0x39, 0x7e, 0x73, 0x81, 0x28, 0x5f, 0x81, 0xf4, 0x88,
0x01, 0xff, 0x55, 0x0e, 0x65, 0xf6, 0xe5, 0x75, 0xc8, 0x47, 0x1a, 0x76, 0x3c, 0xfc, 0xd7, 0x38,
0x3c, 0x8a, 0x22, 0xae, 0xf3, 0x86, 0x1d, 0x4f, 0xf0, 0xeb, 0xc2, 0x75, 0x8e, 0x20, 0x61, 0x13,
0xbd, 0x3a, 0x1e, 0xfd, 0x1b, 0x22, 0xea, 0x02, 0x52, 0x7e, 0x01, 0x72, 0x41, 0xfd, 0x8d, 0xc7,
0xff, 0x26, 0xc7, 0x87, 0x18, 0x12, 0x81, 0x48, 0xfd, 0x8f, 0xa7, 0xf8, 0x2d, 0x11, 0x81, 0x08,
0x8a, 0x6c, 0xa3, 0xfe, 0x9e, 0x1e, 0xcf, 0xf4, 0xdb, 0x62, 0x1b, 0xf5, 0xb5, 0x74, 0xb2, 0x9a,
0xb4, 0x0c, 0xc6, 0x53, 0xfc, 0x8e, 0x58, 0x4d, 0x6a, 0x4f, 0xdc, 0xe8, 0x6f, 0x92, 0xf1, 0x1c,
0xbf, 0x2b, 0xdc, 0xe8, 0xeb, 0x91, 0xe5, 0x7d, 0x40, 0x83, 0x0d, 0x32, 0x9e, 0xef, 0x0b, 0x9c,
0x6f, 0x6a, 0xa0, 0x3f, 0x96, 0x3f, 0x05, 0xe7, 0x86, 0x37, 0xc7, 0x78, 0xd6, 0x2f, 0x7e, 0xd0,
0xf7, 0x3a, 0x13, 0xed, 0x8d, 0xe5, 0x83, 0xb0, 0xca, 0x46, 0x1b, 0x63, 0x3c, 0xed, 0x1b, 0x1f,
0xf4, 0x16, 0xda, 0x68, 0x5f, 0x2c, 0x57, 0x00, 0xc2, 0x9e, 0x14, 0xcf, 0xf5, 0x26, 0xe7, 0x8a,
0x80, 0xc8, 0xd6, 0xe0, 0x2d, 0x29, 0x1e, 0xff, 0x65, 0xb1, 0x35, 0x38, 0x82, 0x6c, 0x0d, 0xd1,
0x8d, 0xe2, 0xd1, 0x6f, 0x89, 0xad, 0x21, 0x20, 0xe5, 0xeb, 0x90, 0xb5, 0xbb, 0x96, 0x45, 0x72,
0x0b, 0x3d, 0xf8, 0x23, 0x1b, 0xe5, 0x5f, 0xef, 0x73, 0xb0, 0x00, 0x94, 0x57, 0x21, 0x8d, 0xdb,
0x75, 0xdc, 0x88, 0x43, 0xfe, 0xdb, 0x7d, 0x51, 0x4f, 0x88, 0x75, 0xf9, 0x05, 0x00, 0xf6, 0x32,
0x4d, 0x7f, 0x63, 0x89, 0xc1, 0xfe, 0xfb, 0x7d, 0xfe, 0xfb, 0x7d, 0x08, 0x09, 0x09, 0xd8, 0xd7,
0x00, 0x0f, 0x26, 0x78, 0xaf, 0x97, 0x80, 0xbe, 0x80, 0x5f, 0x83, 0xf1, 0x5b, 0x9e, 0x63, 0xfb,
0x7a, 0x2b, 0x0e, 0xfd, 0x1f, 0x1c, 0x2d, 0xec, 0x49, 0xc0, 0xda, 0x8e, 0x8b, 0x7d, 0xbd, 0xe5,
0xc5, 0x61, 0xff, 0x93, 0x63, 0x03, 0x00, 0x01, 0x1b, 0xba, 0xe7, 0x8f, 0xf2, 0xdc, 0xff, 0x25,
0xc0, 0x02, 0x40, 0x9c, 0x26, 0xff, 0xdf, 0xc6, 0xc7, 0x71, 0xd8, 0xf7, 0x85, 0xd3, 0xdc, 0xbe,
0xfc, 0x31, 0xc8, 0x91, 0x7f, 0xd9, 0x37, 0x2d, 0x31, 0xe0, 0xff, 0xe6, 0xe0, 0x10, 0x41, 0xee,
0xec, 0xf9, 0x0d, 0xdf, 0x8c, 0x0f, 0xf6, 0xff, 0xf0, 0x95, 0x16, 0xf6, 0xe5, 0x0a, 0xe4, 0x3d,
0xbf, 0xd1, 0xe8, 0xf2, 0x89, 0x26, 0x06, 0xfe, 0xa3, 0xfb, 0xc1, 0x4b, 0x6e, 0x80, 0x59, 0xbb,
0x38, 0xfc, 0xbc, 0x0e, 0x36, 0x9d, 0x4d, 0x87, 0x9d, 0xd4, 0xc1, 0x1b, 0x69, 0x78, 0xd4, 0x70,
0xda, 0x75, 0xc7, 0xbb, 0x14, 0x29, 0x43, 0x97, 0x1c, 0x9b, 0xdb, 0xa3, 0xa4, 0x63, 0xe3, 0xd9,
0xb3, 0x1d, 0xcc, 0xcd, 0x9f, 0x87, 0x74, 0xad, 0x5b, 0xaf, 0x1f, 0x23, 0x19, 0x92, 0x5e, 0xb7,
0xce, 0xbf, 0xbb, 0x20, 0xff, 0xce, 0x7f, 0x3f, 0x09, 0xf9, 0x9a, 0xde, 0xee, 0x58, 0x78, 0xcf,
0xc6, 0x7b, 0x4d, 0xa4, 0x40, 0x86, 0x3e, 0xc7, 0xf3, 0xd4, 0x48, 0xba, 0x31, 0xa6, 0xf2, 0xeb,
0x40, 0xb3, 0x44, 0x0f, 0x2c, 0x13, 0x81, 0x66, 0x29, 0xd0, 0x2c, 0xb3, 0xf3, 0xca, 0x40, 0xb3,
0x1c, 0x68, 0x56, 0xe8, 0xa9, 0x65, 0x32, 0xd0, 0xac, 0x04, 0x9a, 0x55, 0x7a, 0x2a, 0x3f, 0x11,
0x68, 0x56, 0x03, 0xcd, 0x65, 0x7a, 0x0e, 0x9f, 0x0a, 0x34, 0x97, 0x03, 0xcd, 0x15, 0x7a, 0xfc,
0x3e, 0x15, 0x68, 0xae, 0x04, 0x9a, 0xab, 0xf4, 0xc8, 0x1d, 0x05, 0x9a, 0xab, 0x81, 0xe6, 0x1a,
0xfd, 0xb6, 0x62, 0x3c, 0xd0, 0x5c, 0x43, 0xb3, 0x30, 0xce, 0x9e, 0xec, 0x39, 0xfa, 0x93, 0xe6,
0xe4, 0x8d, 0x31, 0x55, 0x08, 0x42, 0xdd, 0xf3, 0xf4, 0xfb, 0x89, 0x4c, 0xa8, 0x7b, 0x3e, 0xd4,
0x2d, 0xd1, 0x0f, 0x89, 0xe5, 0x50, 0xb7, 0x14, 0xea, 0x96, 0x95, 0x09, 0xb2, 0xfc, 0xa1, 0x6e,
0x39, 0xd4, 0xad, 0x28, 0x45, 0x12, 0xff, 0x50, 0xb7, 0x12, 0xea, 0x56, 0x95, 0xc9, 0x39, 0x69,
0xa1, 0x10, 0xea, 0x56, 0xd1, 0xb3, 0x90, 0xf7, 0xba, 0x75, 0x8d, 0xff, 0x02, 0x4f, 0xbf, 0xd3,
0xc8, 0x2f, 0xc1, 0x22, 0xc9, 0x08, 0xba, 0xa8, 0x37, 0xc6, 0x54, 0xf0, 0xba, 0x75, 0x5e, 0x1f,
0xd7, 0x0a, 0x40, 0x8f, 0x13, 0x34, 0xfa, 0x81, 0xe2, 0xda, 0xc6, 0x3b, 0xf7, 0x4a, 0x63, 0xdf,
0xbd, 0x57, 0x1a, 0xfb, 0xa7, 0x7b, 0xa5, 0xb1, 0x1f, 0xdc, 0x2b, 0x49, 0xef, 0xdf, 0x2b, 0x49,
0x3f, 0xbe, 0x57, 0x92, 0xee, 0x9e, 0x94, 0xa4, 0xaf, 0x9d, 0x94, 0xa4, 0x6f, 0x9c, 0x94, 0xa4,
0x6f, 0x9f, 0x94, 0xa4, 0x77, 0x4e, 0x4a, 0x63, 0xdf, 0x3d, 0x29, 0x49, 0x3f, 0x38, 0x29, 0x49,
0x3f, 0x3c, 0x29, 0x8d, 0xbd, 0x7f, 0x52, 0x92, 0x7e, 0x7c, 0x52, 0x1a, 0xbb, 0xfb, 0xcf, 0xa5,
0xb1, 0x7a, 0x86, 0xa6, 0xd1, 0xf2, 0xff, 0x05, 0x00, 0x00, 0xff, 0xff, 0xe7, 0x35, 0x67, 0x61,
0x17, 0x30, 0x00, 0x00,
}
r := bytes.NewReader(gzipped)
gzipr, err := compress_gzip.NewReader(r)
if err != nil {
panic(err)
}
ungzipped, err := io_ioutil.ReadAll(gzipr)
if err != nil {
panic(err)
}
if err := github_com_gogo_protobuf_proto.Unmarshal(ungzipped, d); err != nil {
panic(err)
}
return d
}
func (this *Subby) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*Subby)
if !ok {
that2, ok := that.(Subby)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *Subby")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *Subby but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *Subby but is not nil && this == nil")
}
if this.Sub != that1.Sub {
return fmt.Errorf("Sub this(%v) Not Equal that(%v)", this.Sub, that1.Sub)
}
return nil
}
func (this *Subby) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*Subby)
if !ok {
that2, ok := that.(Subby)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Sub != that1.Sub {
return false
}
return true
}
func (this *SampleOneOf) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*SampleOneOf)
if !ok {
that2, ok := that.(SampleOneOf)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *SampleOneOf")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *SampleOneOf but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *SampleOneOf but is not nil && this == nil")
}
if that1.TestOneof == nil {
if this.TestOneof != nil {
return fmt.Errorf("this.TestOneof != nil && that1.TestOneof == nil")
}
} else if this.TestOneof == nil {
return fmt.Errorf("this.TestOneof == nil && that1.TestOneof != nil")
} else if err := this.TestOneof.VerboseEqual(that1.TestOneof); err != nil {
return err
}
return nil
}
func (this *SampleOneOf_Field1) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*SampleOneOf_Field1)
if !ok {
that2, ok := that.(SampleOneOf_Field1)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *SampleOneOf_Field1")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *SampleOneOf_Field1 but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *SampleOneOf_Field1 but is not nil && this == nil")
}
if this.Field1 != that1.Field1 {
return fmt.Errorf("Field1 this(%v) Not Equal that(%v)", this.Field1, that1.Field1)
}
return nil
}
func (this *SampleOneOf_Field2) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*SampleOneOf_Field2)
if !ok {
that2, ok := that.(SampleOneOf_Field2)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *SampleOneOf_Field2")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *SampleOneOf_Field2 but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *SampleOneOf_Field2 but is not nil && this == nil")
}
if this.Field2 != that1.Field2 {
return fmt.Errorf("Field2 this(%v) Not Equal that(%v)", this.Field2, that1.Field2)
}
return nil
}
func (this *SampleOneOf_Field3) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*SampleOneOf_Field3)
if !ok {
that2, ok := that.(SampleOneOf_Field3)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *SampleOneOf_Field3")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *SampleOneOf_Field3 but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *SampleOneOf_Field3 but is not nil && this == nil")
}
if this.Field3 != that1.Field3 {
return fmt.Errorf("Field3 this(%v) Not Equal that(%v)", this.Field3, that1.Field3)
}
return nil
}
func (this *SampleOneOf_Field4) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*SampleOneOf_Field4)
if !ok {
that2, ok := that.(SampleOneOf_Field4)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *SampleOneOf_Field4")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *SampleOneOf_Field4 but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *SampleOneOf_Field4 but is not nil && this == nil")
}
if this.Field4 != that1.Field4 {
return fmt.Errorf("Field4 this(%v) Not Equal that(%v)", this.Field4, that1.Field4)
}
return nil
}
func (this *SampleOneOf_Field5) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*SampleOneOf_Field5)
if !ok {
that2, ok := that.(SampleOneOf_Field5)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *SampleOneOf_Field5")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *SampleOneOf_Field5 but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *SampleOneOf_Field5 but is not nil && this == nil")
}
if this.Field5 != that1.Field5 {
return fmt.Errorf("Field5 this(%v) Not Equal that(%v)", this.Field5, that1.Field5)
}
return nil
}
func (this *SampleOneOf_Field6) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*SampleOneOf_Field6)
if !ok {
that2, ok := that.(SampleOneOf_Field6)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *SampleOneOf_Field6")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *SampleOneOf_Field6 but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *SampleOneOf_Field6 but is not nil && this == nil")
}
if this.Field6 != that1.Field6 {
return fmt.Errorf("Field6 this(%v) Not Equal that(%v)", this.Field6, that1.Field6)
}
return nil
}
func (this *SampleOneOf_Field7) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*SampleOneOf_Field7)
if !ok {
that2, ok := that.(SampleOneOf_Field7)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *SampleOneOf_Field7")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *SampleOneOf_Field7 but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *SampleOneOf_Field7 but is not nil && this == nil")
}
if this.Field7 != that1.Field7 {
return fmt.Errorf("Field7 this(%v) Not Equal that(%v)", this.Field7, that1.Field7)
}
return nil
}
func (this *SampleOneOf_Field8) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*SampleOneOf_Field8)
if !ok {
that2, ok := that.(SampleOneOf_Field8)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *SampleOneOf_Field8")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *SampleOneOf_Field8 but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *SampleOneOf_Field8 but is not nil && this == nil")
}
if this.Field8 != that1.Field8 {
return fmt.Errorf("Field8 this(%v) Not Equal that(%v)", this.Field8, that1.Field8)
}
return nil
}
func (this *SampleOneOf_Field9) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*SampleOneOf_Field9)
if !ok {
that2, ok := that.(SampleOneOf_Field9)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *SampleOneOf_Field9")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *SampleOneOf_Field9 but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *SampleOneOf_Field9 but is not nil && this == nil")
}
if this.Field9 != that1.Field9 {
return fmt.Errorf("Field9 this(%v) Not Equal that(%v)", this.Field9, that1.Field9)
}
return nil
}
func (this *SampleOneOf_Field10) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*SampleOneOf_Field10)
if !ok {
that2, ok := that.(SampleOneOf_Field10)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *SampleOneOf_Field10")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *SampleOneOf_Field10 but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *SampleOneOf_Field10 but is not nil && this == nil")
}
if this.Field10 != that1.Field10 {
return fmt.Errorf("Field10 this(%v) Not Equal that(%v)", this.Field10, that1.Field10)
}
return nil
}
func (this *SampleOneOf_Field11) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*SampleOneOf_Field11)
if !ok {
that2, ok := that.(SampleOneOf_Field11)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *SampleOneOf_Field11")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *SampleOneOf_Field11 but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *SampleOneOf_Field11 but is not nil && this == nil")
}
if this.Field11 != that1.Field11 {
return fmt.Errorf("Field11 this(%v) Not Equal that(%v)", this.Field11, that1.Field11)
}
return nil
}
func (this *SampleOneOf_Field12) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*SampleOneOf_Field12)
if !ok {
that2, ok := that.(SampleOneOf_Field12)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *SampleOneOf_Field12")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *SampleOneOf_Field12 but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *SampleOneOf_Field12 but is not nil && this == nil")
}
if this.Field12 != that1.Field12 {
return fmt.Errorf("Field12 this(%v) Not Equal that(%v)", this.Field12, that1.Field12)
}
return nil
}
func (this *SampleOneOf_Field13) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*SampleOneOf_Field13)
if !ok {
that2, ok := that.(SampleOneOf_Field13)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *SampleOneOf_Field13")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *SampleOneOf_Field13 but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *SampleOneOf_Field13 but is not nil && this == nil")
}
if this.Field13 != that1.Field13 {
return fmt.Errorf("Field13 this(%v) Not Equal that(%v)", this.Field13, that1.Field13)
}
return nil
}
func (this *SampleOneOf_Field14) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*SampleOneOf_Field14)
if !ok {
that2, ok := that.(SampleOneOf_Field14)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *SampleOneOf_Field14")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *SampleOneOf_Field14 but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *SampleOneOf_Field14 but is not nil && this == nil")
}
if this.Field14 != that1.Field14 {
return fmt.Errorf("Field14 this(%v) Not Equal that(%v)", this.Field14, that1.Field14)
}
return nil
}
func (this *SampleOneOf_Field15) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*SampleOneOf_Field15)
if !ok {
that2, ok := that.(SampleOneOf_Field15)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *SampleOneOf_Field15")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *SampleOneOf_Field15 but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *SampleOneOf_Field15 but is not nil && this == nil")
}
if !bytes.Equal(this.Field15, that1.Field15) {
return fmt.Errorf("Field15 this(%v) Not Equal that(%v)", this.Field15, that1.Field15)
}
return nil
}
func (this *SampleOneOf_SubMessage) VerboseEqual(that interface{}) error {
if that == nil {
if this == nil {
return nil
}
return fmt.Errorf("that == nil && this != nil")
}
that1, ok := that.(*SampleOneOf_SubMessage)
if !ok {
that2, ok := that.(SampleOneOf_SubMessage)
if ok {
that1 = &that2
} else {
return fmt.Errorf("that is not of type *SampleOneOf_SubMessage")
}
}
if that1 == nil {
if this == nil {
return nil
}
return fmt.Errorf("that is type *SampleOneOf_SubMessage but is nil && this != nil")
} else if this == nil {
return fmt.Errorf("that is type *SampleOneOf_SubMessage but is not nil && this == nil")
}
if !this.SubMessage.Equal(that1.SubMessage) {
return fmt.Errorf("SubMessage this(%v) Not Equal that(%v)", this.SubMessage, that1.SubMessage)
}
return nil
}
func (this *SampleOneOf) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*SampleOneOf)
if !ok {
that2, ok := that.(SampleOneOf)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if that1.TestOneof == nil {
if this.TestOneof != nil {
return false
}
} else if this.TestOneof == nil {
return false
} else if !this.TestOneof.Equal(that1.TestOneof) {
return false
}
return true
}
func (this *SampleOneOf_Field1) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*SampleOneOf_Field1)
if !ok {
that2, ok := that.(SampleOneOf_Field1)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Field1 != that1.Field1 {
return false
}
return true
}
func (this *SampleOneOf_Field2) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*SampleOneOf_Field2)
if !ok {
that2, ok := that.(SampleOneOf_Field2)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Field2 != that1.Field2 {
return false
}
return true
}
func (this *SampleOneOf_Field3) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*SampleOneOf_Field3)
if !ok {
that2, ok := that.(SampleOneOf_Field3)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Field3 != that1.Field3 {
return false
}
return true
}
func (this *SampleOneOf_Field4) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*SampleOneOf_Field4)
if !ok {
that2, ok := that.(SampleOneOf_Field4)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Field4 != that1.Field4 {
return false
}
return true
}
func (this *SampleOneOf_Field5) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*SampleOneOf_Field5)
if !ok {
that2, ok := that.(SampleOneOf_Field5)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Field5 != that1.Field5 {
return false
}
return true
}
func (this *SampleOneOf_Field6) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*SampleOneOf_Field6)
if !ok {
that2, ok := that.(SampleOneOf_Field6)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Field6 != that1.Field6 {
return false
}
return true
}
func (this *SampleOneOf_Field7) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*SampleOneOf_Field7)
if !ok {
that2, ok := that.(SampleOneOf_Field7)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Field7 != that1.Field7 {
return false
}
return true
}
func (this *SampleOneOf_Field8) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*SampleOneOf_Field8)
if !ok {
that2, ok := that.(SampleOneOf_Field8)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Field8 != that1.Field8 {
return false
}
return true
}
func (this *SampleOneOf_Field9) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*SampleOneOf_Field9)
if !ok {
that2, ok := that.(SampleOneOf_Field9)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Field9 != that1.Field9 {
return false
}
return true
}
func (this *SampleOneOf_Field10) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*SampleOneOf_Field10)
if !ok {
that2, ok := that.(SampleOneOf_Field10)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Field10 != that1.Field10 {
return false
}
return true
}
func (this *SampleOneOf_Field11) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*SampleOneOf_Field11)
if !ok {
that2, ok := that.(SampleOneOf_Field11)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Field11 != that1.Field11 {
return false
}
return true
}
func (this *SampleOneOf_Field12) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*SampleOneOf_Field12)
if !ok {
that2, ok := that.(SampleOneOf_Field12)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Field12 != that1.Field12 {
return false
}
return true
}
func (this *SampleOneOf_Field13) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*SampleOneOf_Field13)
if !ok {
that2, ok := that.(SampleOneOf_Field13)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Field13 != that1.Field13 {
return false
}
return true
}
func (this *SampleOneOf_Field14) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*SampleOneOf_Field14)
if !ok {
that2, ok := that.(SampleOneOf_Field14)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Field14 != that1.Field14 {
return false
}
return true
}
func (this *SampleOneOf_Field15) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*SampleOneOf_Field15)
if !ok {
that2, ok := that.(SampleOneOf_Field15)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if !bytes.Equal(this.Field15, that1.Field15) {
return false
}
return true
}
func (this *SampleOneOf_SubMessage) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*SampleOneOf_SubMessage)
if !ok {
that2, ok := that.(SampleOneOf_SubMessage)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if !this.SubMessage.Equal(that1.SubMessage) {
return false
}
return true
}
func (this *Subby) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 5)
s = append(s, "&one.Subby{")
s = append(s, "Sub: "+fmt.Sprintf("%#v", this.Sub)+",\n")
s = append(s, "}")
return strings.Join(s, "")
}
func (this *SampleOneOf) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 20)
s = append(s, "&one.SampleOneOf{")
if this.TestOneof != nil {
s = append(s, "TestOneof: "+fmt.Sprintf("%#v", this.TestOneof)+",\n")
}
s = append(s, "}")
return strings.Join(s, "")
}
func (this *SampleOneOf_Field1) GoString() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&one.SampleOneOf_Field1{` +
`Field1:` + fmt.Sprintf("%#v", this.Field1) + `}`}, ", ")
return s
}
func (this *SampleOneOf_Field2) GoString() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&one.SampleOneOf_Field2{` +
`Field2:` + fmt.Sprintf("%#v", this.Field2) + `}`}, ", ")
return s
}
func (this *SampleOneOf_Field3) GoString() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&one.SampleOneOf_Field3{` +
`Field3:` + fmt.Sprintf("%#v", this.Field3) + `}`}, ", ")
return s
}
func (this *SampleOneOf_Field4) GoString() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&one.SampleOneOf_Field4{` +
`Field4:` + fmt.Sprintf("%#v", this.Field4) + `}`}, ", ")
return s
}
func (this *SampleOneOf_Field5) GoString() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&one.SampleOneOf_Field5{` +
`Field5:` + fmt.Sprintf("%#v", this.Field5) + `}`}, ", ")
return s
}
func (this *SampleOneOf_Field6) GoString() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&one.SampleOneOf_Field6{` +
`Field6:` + fmt.Sprintf("%#v", this.Field6) + `}`}, ", ")
return s
}
func (this *SampleOneOf_Field7) GoString() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&one.SampleOneOf_Field7{` +
`Field7:` + fmt.Sprintf("%#v", this.Field7) + `}`}, ", ")
return s
}
func (this *SampleOneOf_Field8) GoString() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&one.SampleOneOf_Field8{` +
`Field8:` + fmt.Sprintf("%#v", this.Field8) + `}`}, ", ")
return s
}
func (this *SampleOneOf_Field9) GoString() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&one.SampleOneOf_Field9{` +
`Field9:` + fmt.Sprintf("%#v", this.Field9) + `}`}, ", ")
return s
}
func (this *SampleOneOf_Field10) GoString() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&one.SampleOneOf_Field10{` +
`Field10:` + fmt.Sprintf("%#v", this.Field10) + `}`}, ", ")
return s
}
func (this *SampleOneOf_Field11) GoString() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&one.SampleOneOf_Field11{` +
`Field11:` + fmt.Sprintf("%#v", this.Field11) + `}`}, ", ")
return s
}
func (this *SampleOneOf_Field12) GoString() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&one.SampleOneOf_Field12{` +
`Field12:` + fmt.Sprintf("%#v", this.Field12) + `}`}, ", ")
return s
}
func (this *SampleOneOf_Field13) GoString() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&one.SampleOneOf_Field13{` +
`Field13:` + fmt.Sprintf("%#v", this.Field13) + `}`}, ", ")
return s
}
func (this *SampleOneOf_Field14) GoString() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&one.SampleOneOf_Field14{` +
`Field14:` + fmt.Sprintf("%#v", this.Field14) + `}`}, ", ")
return s
}
func (this *SampleOneOf_Field15) GoString() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&one.SampleOneOf_Field15{` +
`Field15:` + fmt.Sprintf("%#v", this.Field15) + `}`}, ", ")
return s
}
func (this *SampleOneOf_SubMessage) GoString() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&one.SampleOneOf_SubMessage{` +
`SubMessage:` + fmt.Sprintf("%#v", this.SubMessage) + `}`}, ", ")
return s
}
func valueToGoStringOne(v interface{}, typ string) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("func(v %v) *%v { return &v } ( %#v )", typ, typ, pv)
}
func NewPopulatedSubby(r randyOne, easy bool) *Subby {
this := &Subby{}
this.Sub = string(randStringOne(r))
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedSampleOneOf(r randyOne, easy bool) *SampleOneOf {
this := &SampleOneOf{}
oneofNumber_TestOneof := []int32{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}[r.Intn(16)]
switch oneofNumber_TestOneof {
case 1:
this.TestOneof = NewPopulatedSampleOneOf_Field1(r, easy)
case 2:
this.TestOneof = NewPopulatedSampleOneOf_Field2(r, easy)
case 3:
this.TestOneof = NewPopulatedSampleOneOf_Field3(r, easy)
case 4:
this.TestOneof = NewPopulatedSampleOneOf_Field4(r, easy)
case 5:
this.TestOneof = NewPopulatedSampleOneOf_Field5(r, easy)
case 6:
this.TestOneof = NewPopulatedSampleOneOf_Field6(r, easy)
case 7:
this.TestOneof = NewPopulatedSampleOneOf_Field7(r, easy)
case 8:
this.TestOneof = NewPopulatedSampleOneOf_Field8(r, easy)
case 9:
this.TestOneof = NewPopulatedSampleOneOf_Field9(r, easy)
case 10:
this.TestOneof = NewPopulatedSampleOneOf_Field10(r, easy)
case 11:
this.TestOneof = NewPopulatedSampleOneOf_Field11(r, easy)
case 12:
this.TestOneof = NewPopulatedSampleOneOf_Field12(r, easy)
case 13:
this.TestOneof = NewPopulatedSampleOneOf_Field13(r, easy)
case 14:
this.TestOneof = NewPopulatedSampleOneOf_Field14(r, easy)
case 15:
this.TestOneof = NewPopulatedSampleOneOf_Field15(r, easy)
case 16:
this.TestOneof = NewPopulatedSampleOneOf_SubMessage(r, easy)
}
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedSampleOneOf_Field1(r randyOne, easy bool) *SampleOneOf_Field1 {
this := &SampleOneOf_Field1{}
this.Field1 = float64(r.Float64())
if r.Intn(2) == 0 {
this.Field1 *= -1
}
return this
}
func NewPopulatedSampleOneOf_Field2(r randyOne, easy bool) *SampleOneOf_Field2 {
this := &SampleOneOf_Field2{}
this.Field2 = float32(r.Float32())
if r.Intn(2) == 0 {
this.Field2 *= -1
}
return this
}
func NewPopulatedSampleOneOf_Field3(r randyOne, easy bool) *SampleOneOf_Field3 {
this := &SampleOneOf_Field3{}
this.Field3 = int32(r.Int31())
if r.Intn(2) == 0 {
this.Field3 *= -1
}
return this
}
func NewPopulatedSampleOneOf_Field4(r randyOne, easy bool) *SampleOneOf_Field4 {
this := &SampleOneOf_Field4{}
this.Field4 = int64(r.Int63())
if r.Intn(2) == 0 {
this.Field4 *= -1
}
return this
}
func NewPopulatedSampleOneOf_Field5(r randyOne, easy bool) *SampleOneOf_Field5 {
this := &SampleOneOf_Field5{}
this.Field5 = uint32(r.Uint32())
return this
}
func NewPopulatedSampleOneOf_Field6(r randyOne, easy bool) *SampleOneOf_Field6 {
this := &SampleOneOf_Field6{}
this.Field6 = uint64(uint64(r.Uint32()))
return this
}
func NewPopulatedSampleOneOf_Field7(r randyOne, easy bool) *SampleOneOf_Field7 {
this := &SampleOneOf_Field7{}
this.Field7 = int32(r.Int31())
if r.Intn(2) == 0 {
this.Field7 *= -1
}
return this
}
func NewPopulatedSampleOneOf_Field8(r randyOne, easy bool) *SampleOneOf_Field8 {
this := &SampleOneOf_Field8{}
this.Field8 = int64(r.Int63())
if r.Intn(2) == 0 {
this.Field8 *= -1
}
return this
}
func NewPopulatedSampleOneOf_Field9(r randyOne, easy bool) *SampleOneOf_Field9 {
this := &SampleOneOf_Field9{}
this.Field9 = uint32(r.Uint32())
return this
}
func NewPopulatedSampleOneOf_Field10(r randyOne, easy bool) *SampleOneOf_Field10 {
this := &SampleOneOf_Field10{}
this.Field10 = int32(r.Int31())
if r.Intn(2) == 0 {
this.Field10 *= -1
}
return this
}
func NewPopulatedSampleOneOf_Field11(r randyOne, easy bool) *SampleOneOf_Field11 {
this := &SampleOneOf_Field11{}
this.Field11 = uint64(uint64(r.Uint32()))
return this
}
func NewPopulatedSampleOneOf_Field12(r randyOne, easy bool) *SampleOneOf_Field12 {
this := &SampleOneOf_Field12{}
this.Field12 = int64(r.Int63())
if r.Intn(2) == 0 {
this.Field12 *= -1
}
return this
}
func NewPopulatedSampleOneOf_Field13(r randyOne, easy bool) *SampleOneOf_Field13 {
this := &SampleOneOf_Field13{}
this.Field13 = bool(bool(r.Intn(2) == 0))
return this
}
func NewPopulatedSampleOneOf_Field14(r randyOne, easy bool) *SampleOneOf_Field14 {
this := &SampleOneOf_Field14{}
this.Field14 = string(randStringOne(r))
return this
}
func NewPopulatedSampleOneOf_Field15(r randyOne, easy bool) *SampleOneOf_Field15 {
this := &SampleOneOf_Field15{}
v1 := r.Intn(100)
this.Field15 = make([]byte, v1)
for i := 0; i < v1; i++ {
this.Field15[i] = byte(r.Intn(256))
}
return this
}
func NewPopulatedSampleOneOf_SubMessage(r randyOne, easy bool) *SampleOneOf_SubMessage {
this := &SampleOneOf_SubMessage{}
this.SubMessage = NewPopulatedSubby(r, easy)
return this
}
type randyOne interface {
Float32() float32
Float64() float64
Int63() int64
Int31() int32
Uint32() uint32
Intn(n int) int
}
func randUTF8RuneOne(r randyOne) rune {
ru := r.Intn(62)
if ru < 10 {
return rune(ru + 48)
} else if ru < 36 {
return rune(ru + 55)
}
return rune(ru + 61)
}
func randStringOne(r randyOne) string {
v2 := r.Intn(100)
tmps := make([]rune, v2)
for i := 0; i < v2; i++ {
tmps[i] = randUTF8RuneOne(r)
}
return string(tmps)
}
func randUnrecognizedOne(r randyOne, maxFieldNumber int) (dAtA []byte) {
l := r.Intn(5)
for i := 0; i < l; i++ {
wire := r.Intn(4)
if wire == 3 {
wire = 5
}
fieldNumber := maxFieldNumber + r.Intn(100)
dAtA = randFieldOne(dAtA, r, fieldNumber, wire)
}
return dAtA
}
func randFieldOne(dAtA []byte, r randyOne, fieldNumber int, wire int) []byte {
key := uint32(fieldNumber)<<3 | uint32(wire)
switch wire {
case 0:
dAtA = encodeVarintPopulateOne(dAtA, uint64(key))
v3 := r.Int63()
if r.Intn(2) == 0 {
v3 *= -1
}
dAtA = encodeVarintPopulateOne(dAtA, uint64(v3))
case 1:
dAtA = encodeVarintPopulateOne(dAtA, uint64(key))
dAtA = append(dAtA, byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)))
case 2:
dAtA = encodeVarintPopulateOne(dAtA, uint64(key))
ll := r.Intn(100)
dAtA = encodeVarintPopulateOne(dAtA, uint64(ll))
for j := 0; j < ll; j++ {
dAtA = append(dAtA, byte(r.Intn(256)))
}
default:
dAtA = encodeVarintPopulateOne(dAtA, uint64(key))
dAtA = append(dAtA, byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)))
}
return dAtA
}
func encodeVarintPopulateOne(dAtA []byte, v uint64) []byte {
for v >= 1<<7 {
dAtA = append(dAtA, uint8(uint64(v)&0x7f|0x80))
v >>= 7
}
dAtA = append(dAtA, uint8(v))
return dAtA
}
func (m *Subby) Size() (n int) {
var l int
_ = l
l = len(m.Sub)
if l > 0 {
n += 1 + l + sovOne(uint64(l))
}
return n
}
func (m *SampleOneOf) Size() (n int) {
var l int
_ = l
if m.TestOneof != nil {
n += m.TestOneof.Size()
}
return n
}
func (m *SampleOneOf_Field1) Size() (n int) {
var l int
_ = l
n += 9
return n
}
func (m *SampleOneOf_Field2) Size() (n int) {
var l int
_ = l
n += 5
return n
}
func (m *SampleOneOf_Field3) Size() (n int) {
var l int
_ = l
n += 1 + sovOne(uint64(m.Field3))
return n
}
func (m *SampleOneOf_Field4) Size() (n int) {
var l int
_ = l
n += 1 + sovOne(uint64(m.Field4))
return n
}
func (m *SampleOneOf_Field5) Size() (n int) {
var l int
_ = l
n += 1 + sovOne(uint64(m.Field5))
return n
}
func (m *SampleOneOf_Field6) Size() (n int) {
var l int
_ = l
n += 1 + sovOne(uint64(m.Field6))
return n
}
func (m *SampleOneOf_Field7) Size() (n int) {
var l int
_ = l
n += 1 + sozOne(uint64(m.Field7))
return n
}
func (m *SampleOneOf_Field8) Size() (n int) {
var l int
_ = l
n += 1 + sozOne(uint64(m.Field8))
return n
}
func (m *SampleOneOf_Field9) Size() (n int) {
var l int
_ = l
n += 5
return n
}
func (m *SampleOneOf_Field10) Size() (n int) {
var l int
_ = l
n += 5
return n
}
func (m *SampleOneOf_Field11) Size() (n int) {
var l int
_ = l
n += 9
return n
}
func (m *SampleOneOf_Field12) Size() (n int) {
var l int
_ = l
n += 9
return n
}
func (m *SampleOneOf_Field13) Size() (n int) {
var l int
_ = l
n += 2
return n
}
func (m *SampleOneOf_Field14) Size() (n int) {
var l int
_ = l
l = len(m.Field14)
n += 1 + l + sovOne(uint64(l))
return n
}
func (m *SampleOneOf_Field15) Size() (n int) {
var l int
_ = l
if m.Field15 != nil {
l = len(m.Field15)
n += 1 + l + sovOne(uint64(l))
}
return n
}
func (m *SampleOneOf_SubMessage) Size() (n int) {
var l int
_ = l
if m.SubMessage != nil {
l = m.SubMessage.Size()
n += 2 + l + sovOne(uint64(l))
}
return n
}
func sovOne(x uint64) (n int) {
for {
n++
x >>= 7
if x == 0 {
break
}
}
return n
}
func sozOne(x uint64) (n int) {
return sovOne(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (this *Subby) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&Subby{`,
`Sub:` + fmt.Sprintf("%v", this.Sub) + `,`,
`}`,
}, "")
return s
}
func (this *SampleOneOf) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SampleOneOf{`,
`TestOneof:` + fmt.Sprintf("%v", this.TestOneof) + `,`,
`}`,
}, "")
return s
}
func (this *SampleOneOf_Field1) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SampleOneOf_Field1{`,
`Field1:` + fmt.Sprintf("%v", this.Field1) + `,`,
`}`,
}, "")
return s
}
func (this *SampleOneOf_Field2) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SampleOneOf_Field2{`,
`Field2:` + fmt.Sprintf("%v", this.Field2) + `,`,
`}`,
}, "")
return s
}
func (this *SampleOneOf_Field3) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SampleOneOf_Field3{`,
`Field3:` + fmt.Sprintf("%v", this.Field3) + `,`,
`}`,
}, "")
return s
}
func (this *SampleOneOf_Field4) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SampleOneOf_Field4{`,
`Field4:` + fmt.Sprintf("%v", this.Field4) + `,`,
`}`,
}, "")
return s
}
func (this *SampleOneOf_Field5) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SampleOneOf_Field5{`,
`Field5:` + fmt.Sprintf("%v", this.Field5) + `,`,
`}`,
}, "")
return s
}
func (this *SampleOneOf_Field6) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SampleOneOf_Field6{`,
`Field6:` + fmt.Sprintf("%v", this.Field6) + `,`,
`}`,
}, "")
return s
}
func (this *SampleOneOf_Field7) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SampleOneOf_Field7{`,
`Field7:` + fmt.Sprintf("%v", this.Field7) + `,`,
`}`,
}, "")
return s
}
func (this *SampleOneOf_Field8) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SampleOneOf_Field8{`,
`Field8:` + fmt.Sprintf("%v", this.Field8) + `,`,
`}`,
}, "")
return s
}
func (this *SampleOneOf_Field9) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SampleOneOf_Field9{`,
`Field9:` + fmt.Sprintf("%v", this.Field9) + `,`,
`}`,
}, "")
return s
}
func (this *SampleOneOf_Field10) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SampleOneOf_Field10{`,
`Field10:` + fmt.Sprintf("%v", this.Field10) + `,`,
`}`,
}, "")
return s
}
func (this *SampleOneOf_Field11) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SampleOneOf_Field11{`,
`Field11:` + fmt.Sprintf("%v", this.Field11) + `,`,
`}`,
}, "")
return s
}
func (this *SampleOneOf_Field12) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SampleOneOf_Field12{`,
`Field12:` + fmt.Sprintf("%v", this.Field12) + `,`,
`}`,
}, "")
return s
}
func (this *SampleOneOf_Field13) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SampleOneOf_Field13{`,
`Field13:` + fmt.Sprintf("%v", this.Field13) + `,`,
`}`,
}, "")
return s
}
func (this *SampleOneOf_Field14) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SampleOneOf_Field14{`,
`Field14:` + fmt.Sprintf("%v", this.Field14) + `,`,
`}`,
}, "")
return s
}
func (this *SampleOneOf_Field15) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SampleOneOf_Field15{`,
`Field15:` + fmt.Sprintf("%v", this.Field15) + `,`,
`}`,
}, "")
return s
}
func (this *SampleOneOf_SubMessage) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SampleOneOf_SubMessage{`,
`SubMessage:` + strings.Replace(fmt.Sprintf("%v", this.SubMessage), "Subby", "Subby", 1) + `,`,
`}`,
}, "")
return s
}
func valueToStringOne(v interface{}) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("*%v", pv)
}
func (m *Subby) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowOne
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: Subby: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: Subby: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Sub", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowOne
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthOne
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Sub = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipOne(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthOne
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}<|fim▁hole|> return io.ErrUnexpectedEOF
}
return nil
}
func (m *SampleOneOf) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowOne
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: SampleOneOf: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: SampleOneOf: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 1 {
return fmt.Errorf("proto: wrong wireType = %d for field Field1", wireType)
}
var v uint64
if (iNdEx + 8) > l {
return io.ErrUnexpectedEOF
}
iNdEx += 8
v = uint64(dAtA[iNdEx-8])
v |= uint64(dAtA[iNdEx-7]) << 8
v |= uint64(dAtA[iNdEx-6]) << 16
v |= uint64(dAtA[iNdEx-5]) << 24
v |= uint64(dAtA[iNdEx-4]) << 32
v |= uint64(dAtA[iNdEx-3]) << 40
v |= uint64(dAtA[iNdEx-2]) << 48
v |= uint64(dAtA[iNdEx-1]) << 56
m.TestOneof = &SampleOneOf_Field1{float64(math.Float64frombits(v))}
case 2:
if wireType != 5 {
return fmt.Errorf("proto: wrong wireType = %d for field Field2", wireType)
}
var v uint32
if (iNdEx + 4) > l {
return io.ErrUnexpectedEOF
}
iNdEx += 4
v = uint32(dAtA[iNdEx-4])
v |= uint32(dAtA[iNdEx-3]) << 8
v |= uint32(dAtA[iNdEx-2]) << 16
v |= uint32(dAtA[iNdEx-1]) << 24
m.TestOneof = &SampleOneOf_Field2{float32(math.Float32frombits(v))}
case 3:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Field3", wireType)
}
var v int32
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowOne
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= (int32(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
m.TestOneof = &SampleOneOf_Field3{v}
case 4:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Field4", wireType)
}
var v int64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowOne
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= (int64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
m.TestOneof = &SampleOneOf_Field4{v}
case 5:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Field5", wireType)
}
var v uint32
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowOne
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= (uint32(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
m.TestOneof = &SampleOneOf_Field5{v}
case 6:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Field6", wireType)
}
var v uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowOne
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
m.TestOneof = &SampleOneOf_Field6{v}
case 7:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Field7", wireType)
}
var v int32
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowOne
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= (int32(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
v = int32((uint32(v) >> 1) ^ uint32(((v&1)<<31)>>31))
m.TestOneof = &SampleOneOf_Field7{v}
case 8:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Field8", wireType)
}
var v uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowOne
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
v = (v >> 1) ^ uint64((int64(v&1)<<63)>>63)
m.TestOneof = &SampleOneOf_Field8{int64(v)}
case 9:
if wireType != 5 {
return fmt.Errorf("proto: wrong wireType = %d for field Field9", wireType)
}
var v uint32
if (iNdEx + 4) > l {
return io.ErrUnexpectedEOF
}
iNdEx += 4
v = uint32(dAtA[iNdEx-4])
v |= uint32(dAtA[iNdEx-3]) << 8
v |= uint32(dAtA[iNdEx-2]) << 16
v |= uint32(dAtA[iNdEx-1]) << 24
m.TestOneof = &SampleOneOf_Field9{v}
case 10:
if wireType != 5 {
return fmt.Errorf("proto: wrong wireType = %d for field Field10", wireType)
}
var v int32
if (iNdEx + 4) > l {
return io.ErrUnexpectedEOF
}
iNdEx += 4
v = int32(dAtA[iNdEx-4])
v |= int32(dAtA[iNdEx-3]) << 8
v |= int32(dAtA[iNdEx-2]) << 16
v |= int32(dAtA[iNdEx-1]) << 24
m.TestOneof = &SampleOneOf_Field10{v}
case 11:
if wireType != 1 {
return fmt.Errorf("proto: wrong wireType = %d for field Field11", wireType)
}
var v uint64
if (iNdEx + 8) > l {
return io.ErrUnexpectedEOF
}
iNdEx += 8
v = uint64(dAtA[iNdEx-8])
v |= uint64(dAtA[iNdEx-7]) << 8
v |= uint64(dAtA[iNdEx-6]) << 16
v |= uint64(dAtA[iNdEx-5]) << 24
v |= uint64(dAtA[iNdEx-4]) << 32
v |= uint64(dAtA[iNdEx-3]) << 40
v |= uint64(dAtA[iNdEx-2]) << 48
v |= uint64(dAtA[iNdEx-1]) << 56
m.TestOneof = &SampleOneOf_Field11{v}
case 12:
if wireType != 1 {
return fmt.Errorf("proto: wrong wireType = %d for field Field12", wireType)
}
var v int64
if (iNdEx + 8) > l {
return io.ErrUnexpectedEOF
}
iNdEx += 8
v = int64(dAtA[iNdEx-8])
v |= int64(dAtA[iNdEx-7]) << 8
v |= int64(dAtA[iNdEx-6]) << 16
v |= int64(dAtA[iNdEx-5]) << 24
v |= int64(dAtA[iNdEx-4]) << 32
v |= int64(dAtA[iNdEx-3]) << 40
v |= int64(dAtA[iNdEx-2]) << 48
v |= int64(dAtA[iNdEx-1]) << 56
m.TestOneof = &SampleOneOf_Field12{v}
case 13:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Field13", wireType)
}
var v int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowOne
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
b := bool(v != 0)
m.TestOneof = &SampleOneOf_Field13{b}
case 14:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Field14", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowOne
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthOne
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.TestOneof = &SampleOneOf_Field14{string(dAtA[iNdEx:postIndex])}
iNdEx = postIndex
case 15:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Field15", wireType)
}
var byteLen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowOne
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
byteLen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if byteLen < 0 {
return ErrInvalidLengthOne
}
postIndex := iNdEx + byteLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
v := make([]byte, postIndex-iNdEx)
copy(v, dAtA[iNdEx:postIndex])
m.TestOneof = &SampleOneOf_Field15{v}
iNdEx = postIndex
case 16:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field SubMessage", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowOne
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthOne
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
v := &Subby{}
if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
m.TestOneof = &SampleOneOf_SubMessage{v}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipOne(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthOne
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipOne(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowOne
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowOne
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
return iNdEx, nil
case 1:
iNdEx += 8
return iNdEx, nil
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowOne
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthOne
}
return iNdEx, nil
case 3:
for {
var innerWire uint64
var start int = iNdEx
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowOne
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
innerWire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
innerWireType := int(innerWire & 0x7)
if innerWireType == 4 {
break
}
next, err := skipOne(dAtA[start:])
if err != nil {
return 0, err
}
iNdEx = start + next
}
return iNdEx, nil
case 4:
return iNdEx, nil
case 5:
iNdEx += 4
return iNdEx, nil
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
}
panic("unreachable")
}
var (
ErrInvalidLengthOne = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowOne = fmt.Errorf("proto: integer overflow")
)
func init() { proto.RegisterFile("combos/unmarshaler/one.proto", fileDescriptorOne) }
var fileDescriptorOne = []byte{
// 409 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x44, 0xd2, 0xbf, 0x4f, 0x1b, 0x31,
0x14, 0x07, 0x70, 0x3f, 0x8e, 0x24, 0xe0, 0x84, 0x92, 0xde, 0xf4, 0x8a, 0xaa, 0x27, 0x8b, 0xc9,
0x0b, 0x49, 0x73, 0x97, 0xf0, 0x63, 0x45, 0x55, 0x95, 0xa5, 0x42, 0x0a, 0x7f, 0x00, 0x8a, 0xa9,
0x13, 0x90, 0x72, 0x67, 0x94, 0xcb, 0x0d, 0xdd, 0xf8, 0x73, 0x3a, 0x76, 0xec, 0x9f, 0xc0, 0xc8,
0xd8, 0xa1, 0x03, 0xe7, 0x2e, 0x1d, 0x19, 0x33, 0x56, 0xb9, 0xb4, 0xcf, 0xdb, 0xfb, 0xfa, 0x63,
0x0f, 0xb6, 0xbf, 0xf2, 0xfd, 0xad, 0xcb, 0x8c, 0x2b, 0xfa, 0x65, 0x9e, 0x4d, 0x97, 0xc5, 0xdd,
0x74, 0x61, 0x97, 0x7d, 0x97, 0xdb, 0xde, 0xc3, 0xd2, 0xad, 0x5c, 0x1c, 0xb9, 0xdc, 0x1e, 0x9d,
0xcc, 0xef, 0x57, 0x77, 0xa5, 0xe9, 0xdd, 0xba, 0xac, 0x3f, 0x77, 0x73, 0xd7, 0xaf, 0xcd, 0x94,
0xb3, 0x3a, 0xd5, 0xa1, 0x9e, 0xb6, 0x67, 0x8e, 0xdf, 0xc9, 0xc6, 0x75, 0x69, 0xcc, 0xd7, 0xb8,
0x2b, 0xa3, 0xa2, 0x34, 0x08, 0x0a, 0xf4, 0xfe, 0x64, 0x33, 0x1e, 0xff, 0x8a, 0x64, 0xfb, 0x7a,
0x9a, 0x3d, 0x2c, 0xec, 0x55, 0x6e, 0xaf, 0x66, 0x31, 0xca, 0xe6, 0xa7, 0x7b, 0xbb, 0xf8, 0x32,
0xa8, 0x37, 0xc1, 0x58, 0x4c, 0xfe, 0x65, 0x96, 0x04, 0x77, 0x14, 0xe8, 0x1d, 0x96, 0x84, 0x25,
0xc5, 0x48, 0x81, 0x6e, 0xb0, 0xa4, 0x2c, 0x43, 0xdc, 0x55, 0xa0, 0x23, 0x96, 0x21, 0xcb, 0x08,
0x1b, 0x0a, 0xf4, 0x01, 0xcb, 0x88, 0xe5, 0x14, 0x9b, 0x0a, 0xf4, 0x2e, 0xcb, 0x29, 0xcb, 0x19,
0xb6, 0x14, 0xe8, 0xb7, 0x2c, 0x67, 0x2c, 0xe7, 0xb8, 0xa7, 0x40, 0xc7, 0x2c, 0xe7, 0x2c, 0x17,
0xb8, 0xaf, 0x40, 0xb7, 0x58, 0x2e, 0xe2, 0x23, 0xd9, 0xda, 0xde, 0xec, 0x03, 0x4a, 0x05, 0xfa,
0x70, 0x2c, 0x26, 0xff, 0x17, 0x82, 0x0d, 0xb0, 0xad, 0x40, 0x37, 0x83, 0x0d, 0x82, 0x25, 0xd8,
0x51, 0xa0, 0xbb, 0xc1, 0x92, 0x60, 0x29, 0x1e, 0x28, 0xd0, 0x7b, 0xc1, 0xd2, 0x60, 0x43, 0x7c,
0xb3, 0x79, 0xff, 0x60, 0xc3, 0x60, 0x23, 0x3c, 0x54, 0xa0, 0x3b, 0xc1, 0x46, 0xf1, 0x89, 0x6c,
0x17, 0xa5, 0xb9, 0xc9, 0x6c, 0x51, 0x4c, 0xe7, 0x16, 0xbb, 0x0a, 0x74, 0x3b, 0x91, 0xbd, 0x4d,
0x23, 0xea, 0x4f, 0x1d, 0x8b, 0x89, 0x2c, 0x4a, 0xf3, 0x79, 0xeb, 0x97, 0x1d, 0x29, 0x57, 0xb6,
0x58, 0xdd, 0xb8, 0xdc, 0xba, 0xd9, 0xe5, 0xc7, 0xa7, 0x8a, 0xc4, 0x73, 0x45, 0xe2, 0x67, 0x45,
0xe2, 0xa5, 0x22, 0x78, 0xad, 0x08, 0xd6, 0x15, 0xc1, 0xa3, 0x27, 0xf8, 0xe6, 0x09, 0xbe, 0x7b,
0x82, 0x1f, 0x9e, 0xe0, 0xc9, 0x93, 0x78, 0xf6, 0x04, 0x2f, 0x9e, 0xe0, 0x8f, 0x27, 0xf1, 0xea,
0x09, 0xd6, 0x9e, 0xc4, 0xe3, 0x6f, 0x12, 0xa6, 0x59, 0xd7, 0x28, 0xfd, 0x1b, 0x00, 0x00, 0xff,
0xff, 0x3b, 0xfb, 0xd3, 0x99, 0x9a, 0x02, 0x00, 0x00,
}<|fim▁end|> |
if iNdEx > l { |
<|file_name|>gl_headers.hpp<|end_file_name|><|fim▁begin|>#ifndef GL_HEADER_HPP
#define GL_HEADER_HPP
#define GLEW_STATIC
extern "C" {<|fim▁hole|>}
#include <cinttypes>
#if defined(__APPLE__)
# include <OpenGL/gl.h>
# include <OpenGL/gl3.h>
# define OGL32CTX
# ifdef GL_ARB_instanced_arrays
# define glVertexAttribDivisor glVertexAttribDivisorARB
# endif
# ifndef GL_TEXTURE_SWIZZLE_RGBA
# define GL_TEXTURE_SWIZZLE_RGBA 0x8E46
# endif
#elif defined(ANDROID)
# include <GLES/gl.h>
#elif defined(WIN32)
# define _WINSOCKAPI_
# include <windows.h>
#else
#define GL_GLEXT_PROTOTYPES
#define DEBUG_OUTPUT_DECLARED
# include <GL/gl.h>
# include <GL/glext.h>
#endif
struct DrawElementsIndirectCommand{
GLuint count;
GLuint instanceCount;
GLuint firstIndex;
GLuint baseVertex;
GLuint baseInstance;
};
#endif<|fim▁end|> | #include <GL/glew.h> |
<|file_name|>app.component.ts<|end_file_name|><|fim▁begin|>/**
* Angular 2 decorators and services
*/
import { Component, OnInit, ViewEncapsulation } from '@angular/core';
import { environment } from 'environments/environment';
import { AppState } from './app.service';
/**
* App Component
* Top Level Component
*/
@Component({
selector: 'my-app',
encapsulation: ViewEncapsulation.None,
template: `
<nav>
<a [routerLink]=" ['./'] "
routerLinkActive="active" [routerLinkActiveOptions]= "{exact: true}">
Index<|fim▁hole|>
<main>
<router-outlet></router-outlet>
</main>
<pre class="app-state">this.appState.state = {{ appState.state | json }}</pre>
<footer>
<span>Angular Starter by <a [href]="twitter">@gdi2290</a></span>
<div>
<a [href]="url">
<img [src]="tipe" width="25%">
</a>
</div>
</footer>
`
})
export class AppComponent implements OnInit {
public name = 'Angular Starter';
public tipe = 'assets/img/tipe.png';
public twitter = 'https://twitter.com/gdi2290';
public url = 'https://tipe.io';
public showDevModule: boolean = environment.showDevModule;
constructor(
public appState: AppState
) {}
public ngOnInit() {
console.log('Initial App State', this.appState.state);
}
}
/**
* Please review the https://github.com/AngularClass/angular2-examples/ repo for
* more angular app examples that you may copy/paste
* (The examples may not be updated as quickly. Please open an issue on github for us to update it)
* For help or questions please contact us at @AngularClass on twitter
* or our chat on Slack at https://AngularClass.com/slack-join
*/<|fim▁end|> | </a>
</nav> |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
class QuoteAdmin(admin.ModelAdmin):
list_display = ('message', 'name', 'program', 'class_of',
'submission_time')
admin.site.register(Quote, QuoteAdmin)<|fim▁end|> | from quotes.models import Quote
from django.contrib import admin |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | from .plot import * |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>try:
from django.conf.urls import *
except ImportError: # django < 1.4
from django.conf.urls.defaults import *
<|fim▁hole|> url(r"^$", EventList.as_view(template_name='events/event_list_calendar.html'), name='list'),
#url(r"^$", EventList.as_view(), name='list'),
url(r"^create/$", EventCreate.as_view(), name='create'),
url(r"^create/json/$", EventCreateJSON.as_view(), name='create_json'),
url(r"^(?P<pk>\d+)/$", EventDetail.as_view(), name='detail'),
url(r"^(?P<pk>\d+)/update$", EventUpdate.as_view(), name='update'),
url(r"^(?P<pk>\d+)/delete/$", EventDelete.as_view(), name='delete'),
url(r"^(?P<event_id>\d+)/rsvp/$", 'rsvp_event', name='rsvp'),
url(r"^(?P<event_id>\d+)/attend/$", 'attend_event', name='attend'),
#url(r"^calendar/(?P<year>\d+)/(?P<month>\d+)/$", 'calendar', name='calendar'),
#url(r"^calendar/$", CalendarRedirectView.as_view(), name='calendar-redirect'),
)<|fim▁end|> | from .views import EventDetail, EventList, EventCreate, EventCreateJSON, EventDelete, EventUpdate
urlpatterns = patterns("events.views", |
<|file_name|>download_entered_data_test.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoAlertPresentException
import unittest, time, re
class DownloadEnteredDataTest(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
self.driver.implicitly_wait(30)
self.base_url = "http://kc.kbtdev.org/"
self.verificationErrors = []
self.accept_next_alert = True
def test_download_entered_data(self):
# Open KoBoCAT.
driver = self.driver
driver.get(self.base_url + "")
# Assert that our form's title is in the list of projects and follow its link.
self.assertTrue(self.is_element_present(By.LINK_TEXT, "Selenium test form title."))
driver.find_element_by_link_text("Selenium test form title.").click()
# Wait for and click the "Download data" link.
for _ in xrange(self.DEFAULT_WAIT_SECONDS):
self.check_timeout('Waiting for "Download data" link.')
try:
if self.is_element_present(By.LINK_TEXT, "Download data"): break
except: pass
time.sleep(1)
else: self.fail("time out")
driver.find_element_by_link_text("Download data").click()
# Wait for and click the "XLS" link.
for _ in xrange(self.DEFAULT_WAIT_SECONDS):
self.check_timeout('Waiting for "XLS" link.')
try:
if self.is_element_present(By.LINK_TEXT, "XLS"): break
except: pass
time.sleep(1)
else: self.fail("time out")
driver.find_element_by_link_text("XLS").click()
# Wait for the download page's header and ensure it contains the word "excel" (case insensitive).
for _ in xrange(self.DEFAULT_WAIT_SECONDS):
self.check_timeout('Waiting for download page\'s header.')
try:
if self.is_element_present(By.CSS_SELECTOR, ".data-page__header"): break
except: pass
time.sleep(1)
else: self.fail("time out")
self.assertIsNotNone(re.compile('excel', re.IGNORECASE).search(driver.find_element_by_css_selector(".data-page__header").text))
# Wait for the export progress status.
for _ in xrange(self.DEFAULT_WAIT_SECONDS):
self.check_timeout('Waiting for the export progress status.')
try:
if self.is_element_present(By.CSS_SELECTOR, ".refresh-export-progress"): break
except: pass
time.sleep(1)
else: self.fail("time out")
# Wait (a little more than usual) for the export's download link and click it.
for _ in xrange(30):
self.check_timeout('Waiting for the export\'s download link.')
try:
if re.search(r"^Selenium_test_form_title_[\s\S]*$", driver.find_element_by_css_selector("#forms-table a").text): break
except: pass
time.sleep(1)
else: self.fail("time out")
driver.find_element_by_css_selector("#forms-table a").click()
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException: return False
return True
def is_alert_present(self):
try: self.driver.switch_to_alert()
except NoAlertPresentException: return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text<|fim▁hole|> def tearDown(self):
self.driver.quit()
self.assertEqual([], self.verificationErrors)
if __name__ == "__main__":
unittest.main()<|fim▁end|> | finally: self.accept_next_alert = True
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#~ # -*- coding: utf-8 -*-
#~ from os.path import join
from distutils.core import setup
from yamlweb import __version__, __progname as name
# readme is needed at register/upload time, not install time
try:
with open('readme.rst') as f:
long_description = f.read()
except IOError:
long_description = ''
setup(
name = name,<|fim▁hole|> version = __version__,
description = 'Converts YAML to HTML and CSS.',
author = 'Mike Miller',
author_email = '[email protected]',
url = 'https://github.com/mixmastamyk/%s' % name,
download_url = ('https://github.com/mixmastamyk/%s/archive/master.zip'
% name),
license = 'GPLv3+',
requires = ['PyYAML(>=3.10,<4.0)', ], #+ requires, # for pypi page
install_requires = ['PyYAML>=3.10,<4.0a0', ], #+ requires, # real reqs
packages = [name],
scripts = ['yaml2html', 'yaml2css'],
#~ package_data = {name: ['', '']},
#~ extras_require = {
#~ 'name': ['pkg1', 'pkg2'],
#~ },
long_description = long_description,
classifiers = [
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3'
' or later (GPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
)<|fim▁end|> | |
<|file_name|>graph.py<|end_file_name|><|fim▁begin|>from typing import (
IO,
Any,
BinaryIO,
Iterable,
Optional,
TextIO,
Union,
Type,
cast,
overload,
Generator,
Tuple,
)
import logging
from warnings import warn
import random
from rdflib.namespace import Namespace, RDF
from rdflib import plugin, exceptions, query, namespace
import rdflib.term
from rdflib.term import BNode, IdentifiedNode, Node, URIRef, Literal, Genid
from rdflib.paths import Path
from rdflib.store import Store
from rdflib.serializer import Serializer
from rdflib.parser import InputSource, Parser, create_input_source
from rdflib.namespace import NamespaceManager
from rdflib.resource import Resource
from rdflib.collection import Collection
import rdflib.util # avoid circular dependency
from rdflib.exceptions import ParserError
import os
import shutil
import tempfile
import pathlib
from io import BytesIO
from urllib.parse import urlparse
from urllib.request import url2pathname
assert Literal # avoid warning
assert Namespace # avoid warning
logger = logging.getLogger(__name__)
__doc__ = """\
RDFLib defines the following kinds of Graphs:
* :class:`~rdflib.graph.Graph`
* :class:`~rdflib.graph.QuotedGraph`
* :class:`~rdflib.graph.ConjunctiveGraph`
* :class:`~rdflib.graph.Dataset`
Graph
-----
An RDF graph is a set of RDF triples. Graphs support the python ``in``
operator, as well as iteration and some operations like union,
difference and intersection.
see :class:`~rdflib.graph.Graph`
Conjunctive Graph
-----------------
A Conjunctive Graph is the most relevant collection of graphs that are
considered to be the boundary for closed world assumptions. This
boundary is equivalent to that of the store instance (which is itself
uniquely identified and distinct from other instances of
:class:`Store` that signify other Conjunctive Graphs). It is
equivalent to all the named graphs within it and associated with a
``_default_`` graph which is automatically assigned a :class:`BNode`
for an identifier - if one isn't given.
see :class:`~rdflib.graph.ConjunctiveGraph`
Quoted graph
------------
The notion of an RDF graph [14] is extended to include the concept of
a formula node. A formula node may occur wherever any other kind of
node can appear. Associated with a formula node is an RDF graph that
is completely disjoint from all other graphs; i.e. has no nodes in
common with any other graph. (It may contain the same labels as other
RDF graphs; because this is, by definition, a separate graph,
considerations of tidiness do not apply between the graph at a formula
node and any other graph.)
This is intended to map the idea of "{ N3-expression }" that is used
by N3 into an RDF graph upon which RDF semantics is defined.
see :class:`~rdflib.graph.QuotedGraph`
Dataset
-------
The RDF 1.1 Dataset, a small extension to the Conjunctive Graph. The
primary term is "graphs in the datasets" and not "contexts with quads"
so there is a separate method to set/retrieve a graph in a dataset and
to operate with dataset graphs. As a consequence of this approach,
dataset graphs cannot be identified with blank nodes, a name is always
required (RDFLib will automatically add a name if one is not provided
at creation time). This implementation includes a convenience method
to directly add a single quad to a dataset graph.
see :class:`~rdflib.graph.Dataset`
Working with graphs
===================
Instantiating Graphs with default store (Memory) and default identifier
(a BNode):
>>> g = Graph()
>>> g.store.__class__
<class 'rdflib.plugins.stores.memory.Memory'>
>>> g.identifier.__class__
<class 'rdflib.term.BNode'>
Instantiating Graphs with a Memory store and an identifier -
<http://rdflib.net>:
>>> g = Graph('Memory', URIRef("http://rdflib.net"))
>>> g.identifier
rdflib.term.URIRef('http://rdflib.net')
>>> str(g) # doctest: +NORMALIZE_WHITESPACE
"<http://rdflib.net> a rdfg:Graph;rdflib:storage
[a rdflib:Store;rdfs:label 'Memory']."
Creating a ConjunctiveGraph - The top level container for all named Graphs
in a "database":
>>> g = ConjunctiveGraph()
>>> str(g.default_context)
"[a rdfg:Graph;rdflib:storage [a rdflib:Store;rdfs:label 'Memory']]."
Adding / removing reified triples to Graph and iterating over it directly or
via triple pattern:
>>> g = Graph()
>>> statementId = BNode()
>>> print(len(g))
0
>>> g.add((statementId, RDF.type, RDF.Statement)) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g.add((statementId, RDF.subject,
... URIRef("http://rdflib.net/store/ConjunctiveGraph"))) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g.add((statementId, RDF.predicate, namespace.RDFS.label)) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g.add((statementId, RDF.object, Literal("Conjunctive Graph"))) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> print(len(g))
4
>>> for s, p, o in g:
... print(type(s))
...
<class 'rdflib.term.BNode'>
<class 'rdflib.term.BNode'>
<class 'rdflib.term.BNode'>
<class 'rdflib.term.BNode'>
>>> for s, p, o in g.triples((None, RDF.object, None)):
... print(o)
...
Conjunctive Graph
>>> g.remove((statementId, RDF.type, RDF.Statement)) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> print(len(g))
3
``None`` terms in calls to :meth:`~rdflib.graph.Graph.triples` can be
thought of as "open variables".
Graph support set-theoretic operators, you can add/subtract graphs, as
well as intersection (with multiplication operator g1*g2) and xor (g1
^ g2).
Note that BNode IDs are kept when doing set-theoretic operations, this
may or may not be what you want. Two named graphs within the same
application probably want share BNode IDs, two graphs with data from
different sources probably not. If your BNode IDs are all generated
by RDFLib they are UUIDs and unique.
>>> g1 = Graph()
>>> g2 = Graph()
>>> u = URIRef("http://example.com/foo")
>>> g1.add([u, namespace.RDFS.label, Literal("foo")]) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g1.add([u, namespace.RDFS.label, Literal("bar")]) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g2.add([u, namespace.RDFS.label, Literal("foo")]) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g2.add([u, namespace.RDFS.label, Literal("bing")]) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> len(g1 + g2) # adds bing as label
3
>>> len(g1 - g2) # removes foo
1
>>> len(g1 * g2) # only foo
1
>>> g1 += g2 # now g1 contains everything
Graph Aggregation - ConjunctiveGraphs and ReadOnlyGraphAggregate within
the same store:
>>> store = plugin.get("Memory", Store)()
>>> g1 = Graph(store)
>>> g2 = Graph(store)
>>> g3 = Graph(store)
>>> stmt1 = BNode()
>>> stmt2 = BNode()
>>> stmt3 = BNode()
>>> g1.add((stmt1, RDF.type, RDF.Statement)) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g1.add((stmt1, RDF.subject,
... URIRef('http://rdflib.net/store/ConjunctiveGraph'))) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g1.add((stmt1, RDF.predicate, namespace.RDFS.label)) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g1.add((stmt1, RDF.object, Literal('Conjunctive Graph'))) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g2.add((stmt2, RDF.type, RDF.Statement)) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g2.add((stmt2, RDF.subject,
... URIRef('http://rdflib.net/store/ConjunctiveGraph'))) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g2.add((stmt2, RDF.predicate, RDF.type)) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g2.add((stmt2, RDF.object, namespace.RDFS.Class)) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g3.add((stmt3, RDF.type, RDF.Statement)) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g3.add((stmt3, RDF.subject,
... URIRef('http://rdflib.net/store/ConjunctiveGraph'))) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g3.add((stmt3, RDF.predicate, namespace.RDFS.comment)) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g3.add((stmt3, RDF.object, Literal(
... 'The top-level aggregate graph - The sum ' +
... 'of all named graphs within a Store'))) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> len(list(ConjunctiveGraph(store).subjects(RDF.type, RDF.Statement)))
3
>>> len(list(ReadOnlyGraphAggregate([g1,g2]).subjects(
... RDF.type, RDF.Statement)))
2
ConjunctiveGraphs have a :meth:`~rdflib.graph.ConjunctiveGraph.quads` method
which returns quads instead of triples, where the fourth item is the Graph
(or subclass thereof) instance in which the triple was asserted:
>>> uniqueGraphNames = set(
... [graph.identifier for s, p, o, graph in ConjunctiveGraph(store
... ).quads((None, RDF.predicate, None))])
>>> len(uniqueGraphNames)
3
>>> unionGraph = ReadOnlyGraphAggregate([g1, g2])
>>> uniqueGraphNames = set(
... [graph.identifier for s, p, o, graph in unionGraph.quads(
... (None, RDF.predicate, None))])
>>> len(uniqueGraphNames)
2
Parsing N3 from a string
>>> g2 = Graph()
>>> src = '''
... @prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
... @prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
... [ a rdf:Statement ;
... rdf:subject <http://rdflib.net/store#ConjunctiveGraph>;
... rdf:predicate rdfs:label;
... rdf:object "Conjunctive Graph" ] .
... '''
>>> g2 = g2.parse(data=src, format="n3")
>>> print(len(g2))
4
Using Namespace class:
>>> RDFLib = Namespace("http://rdflib.net/")
>>> RDFLib.ConjunctiveGraph
rdflib.term.URIRef('http://rdflib.net/ConjunctiveGraph')
>>> RDFLib["Graph"]
rdflib.term.URIRef('http://rdflib.net/Graph')
"""
__all__ = [
"Graph",
"ConjunctiveGraph",
"QuotedGraph",
"Seq",
"ModificationException",
"Dataset",
"UnSupportedAggregateOperation",
"ReadOnlyGraphAggregate",
"BatchAddGraph",
]
class Graph(Node):
"""An RDF Graph
The constructor accepts one argument, the "store"
that will be used to store the graph data (see the "store"
package for stores currently shipped with rdflib).
Stores can be context-aware or unaware. Unaware stores take up
(some) less space but cannot support features that require
context, such as true merging/demerging of sub-graphs and
provenance.
Even if used with a context-aware store, Graph will only expose the quads which
belong to the default graph. To access the rest of the data, `ConjunctiveGraph` or
`Dataset` classes can be used instead.
The Graph constructor can take an identifier which identifies the Graph
by name. If none is given, the graph is assigned a BNode for its
identifier.
For more on named graphs, see: http://www.w3.org/2004/03/trix/
"""
def __init__(
self,
store: Union[Store, str] = "default",
identifier: Optional[Union[IdentifiedNode, str]] = None,
namespace_manager: Optional[NamespaceManager] = None,
base: Optional[str] = None,
):
super(Graph, self).__init__()
self.base = base
self.__identifier: Node
self.__identifier = identifier or BNode() # type: ignore[assignment]
if not isinstance(self.__identifier, Node):
self.__identifier = URIRef(self.__identifier) # type: ignore[unreachable]
self.__store: Store
if not isinstance(store, Store):
# TODO: error handling
self.__store = store = plugin.get(store, Store)()
else:
self.__store = store
self.__namespace_manager = namespace_manager
self.context_aware = False
self.formula_aware = False
self.default_union = False
@property
def store(self):
return self.__store
@property
def identifier(self):
return self.__identifier
@property
def namespace_manager(self):
"""
this graph's namespace-manager
"""
if self.__namespace_manager is None:
self.__namespace_manager = NamespaceManager(self)
return self.__namespace_manager
@namespace_manager.setter
def namespace_manager(self, nm):
self.__namespace_manager = nm
def __repr__(self):
return "<Graph identifier=%s (%s)>" % (self.identifier, type(self))
def __str__(self):
if isinstance(self.identifier, URIRef):
return (
"%s a rdfg:Graph;rdflib:storage " + "[a rdflib:Store;rdfs:label '%s']."
) % (self.identifier.n3(), self.store.__class__.__name__)
else:
return (
"[a rdfg:Graph;rdflib:storage " + "[a rdflib:Store;rdfs:label '%s']]."
) % self.store.__class__.__name__
def toPython(self):
return self
def destroy(self, configuration):
"""Destroy the store identified by `configuration` if supported"""
self.__store.destroy(configuration)
return self
# Transactional interfaces (optional)
def commit(self):
"""Commits active transactions"""
self.__store.commit()
return self
def rollback(self):
"""Rollback active transactions"""
self.__store.rollback()
return self
def open(self, configuration, create=False):
"""Open the graph store
Might be necessary for stores that require opening a connection to a
database or acquiring some resource.
"""
return self.__store.open(configuration, create)
def close(self, commit_pending_transaction=False):
"""Close the graph store
Might be necessary for stores that require closing a connection to a
database or releasing some resource.
"""
return self.__store.close(commit_pending_transaction=commit_pending_transaction)
def add(self, triple: Tuple[Node, Node, Node]):
"""Add a triple with self as context"""
s, p, o = triple
assert isinstance(s, Node), "Subject %s must be an rdflib term" % (s,)
assert isinstance(p, Node), "Predicate %s must be an rdflib term" % (p,)
assert isinstance(o, Node), "Object %s must be an rdflib term" % (o,)
self.__store.add((s, p, o), self, quoted=False)
return self
def addN(self, quads: Iterable[Tuple[Node, Node, Node, Any]]):
"""Add a sequence of triple with context"""
self.__store.addN(
(s, p, o, c)
for s, p, o, c in quads
if isinstance(c, Graph)
and c.identifier is self.identifier
and _assertnode(s, p, o)
)
return self
def remove(self, triple):
"""Remove a triple from the graph
If the triple does not provide a context attribute, removes the triple
from all contexts.
"""
self.__store.remove(triple, context=self)
return self
@overload
def triples(
self,
triple: Tuple[
Optional[IdentifiedNode], Optional[IdentifiedNode], Optional[Node]
],
) -> Iterable[Tuple[IdentifiedNode, IdentifiedNode, Node]]:
...
@overload
def triples(
self,
triple: Tuple[Optional[IdentifiedNode], Path, Optional[Node]],
) -> Iterable[Tuple[IdentifiedNode, Path, Node]]:
...
@overload
def triples(
self,
triple: Tuple[
Optional[IdentifiedNode], Union[None, Path, IdentifiedNode], Optional[Node]
],
) -> Iterable[Tuple[IdentifiedNode, Union[IdentifiedNode, Path], Node]]:
...
def triples(
self,
triple: Tuple[
Optional[IdentifiedNode], Union[None, Path, IdentifiedNode], Optional[Node]
],
) -> Iterable[Tuple[IdentifiedNode, Union[IdentifiedNode, Path], Node]]:
"""Generator over the triple store
Returns triples that match the given triple pattern. If triple pattern
does not provide a context, all contexts will be searched.
"""
s, p, o = triple
if isinstance(p, Path):
for _s, _o in p.eval(self, s, o):
yield _s, p, _o
else:
for (_s, _p, _o), cg in self.__store.triples((s, p, o), context=self):
yield _s, _p, _o
def __getitem__(self, item):
"""
A graph can be "sliced" as a shortcut for the triples method
The python slice syntax is (ab)used for specifying triples.
A generator over matches is returned,
the returned tuples include only the parts not given
>>> import rdflib
>>> g = rdflib.Graph()
>>> g.add((rdflib.URIRef("urn:bob"), namespace.RDFS.label, rdflib.Literal("Bob"))) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> list(g[rdflib.URIRef("urn:bob")]) # all triples about bob
[(rdflib.term.URIRef('http://www.w3.org/2000/01/rdf-schema#label'), rdflib.term.Literal('Bob'))]
>>> list(g[:namespace.RDFS.label]) # all label triples
[(rdflib.term.URIRef('urn:bob'), rdflib.term.Literal('Bob'))]
>>> list(g[::rdflib.Literal("Bob")]) # all triples with bob as object
[(rdflib.term.URIRef('urn:bob'), rdflib.term.URIRef('http://www.w3.org/2000/01/rdf-schema#label'))]
Combined with SPARQL paths, more complex queries can be
written concisely:
Name of all Bobs friends:
g[bob : FOAF.knows/FOAF.name ]
Some label for Bob:
g[bob : DC.title|FOAF.name|RDFS.label]
All friends and friends of friends of Bob
g[bob : FOAF.knows * "+"]
etc.
.. versionadded:: 4.0
"""
if isinstance(item, slice):
s, p, o = item.start, item.stop, item.step
if s is None and p is None and o is None:
return self.triples((s, p, o))
elif s is None and p is None:
return self.subject_predicates(o)
elif s is None and o is None:
return self.subject_objects(p)
elif p is None and o is None:
return self.predicate_objects(s)
elif s is None:
return self.subjects(p, o)
elif p is None:
return self.predicates(s, o)
elif o is None:
return self.objects(s, p)
else:
# all given
return (s, p, o) in self
elif isinstance(item, (Path, Node)):
return self.predicate_objects(item)
else:
raise TypeError(
"You can only index a graph by a single rdflib term or path, or a slice of rdflib terms."
)
def __len__(self):
"""Returns the number of triples in the graph
If context is specified then the number of triples in the context is
returned instead.
"""
return self.__store.__len__(context=self)
def __iter__(self):
"""Iterates over all triples in the store"""
return self.triples((None, None, None))
def __contains__(self, triple):
"""Support for 'triple in graph' syntax"""
for triple in self.triples(triple):
return True
return False
def __hash__(self):
return hash(self.identifier)
def __cmp__(self, other):
if other is None:
return -1
elif isinstance(other, Graph):
return (self.identifier > other.identifier) - (
self.identifier < other.identifier
)
else:
# Note if None is considered equivalent to owl:Nothing
# Then perhaps a graph with length 0 should be considered
# equivalent to None (if compared to it)?
return 1
def __eq__(self, other):
return isinstance(other, Graph) and self.identifier == other.identifier
def __lt__(self, other):
return (other is None) or (
isinstance(other, Graph) and self.identifier < other.identifier
)
def __le__(self, other):
return self < other or self == other
def __gt__(self, other):
return (isinstance(other, Graph) and self.identifier > other.identifier) or (
other is not None
)
def __ge__(self, other):
return self > other or self == other
def __iadd__(self, other):
"""Add all triples in Graph other to Graph.
BNode IDs are not changed."""
self.addN((s, p, o, self) for s, p, o in other)
return self
def __isub__(self, other):
"""Subtract all triples in Graph other from Graph.
BNode IDs are not changed."""
for triple in other:
self.remove(triple)
return self
def __add__(self, other):
"""Set-theoretic union
BNode IDs are not changed."""
try:
retval = type(self)()
except TypeError:
retval = Graph()
for (prefix, uri) in set(list(self.namespaces()) + list(other.namespaces())):
retval.bind(prefix, uri)
for x in self:
retval.add(x)
for y in other:
retval.add(y)
return retval
def __mul__(self, other):
"""Set-theoretic intersection.
BNode IDs are not changed."""
try:
retval = type(self)()
except TypeError:
retval = Graph()
for x in other:
if x in self:
retval.add(x)
return retval
def __sub__(self, other):
"""Set-theoretic difference.
BNode IDs are not changed."""
try:
retval = type(self)()
except TypeError:
retval = Graph()
for x in self:
if x not in other:
retval.add(x)
return retval
def __xor__(self, other):
"""Set-theoretic XOR.
BNode IDs are not changed."""
return (self - other) + (other - self)
__or__ = __add__
__and__ = __mul__
# Conv. methods
def set(self, triple):
"""Convenience method to update the value of object
Remove any existing triples for subject and predicate before adding
(subject, predicate, object).
"""
(subject, predicate, object_) = triple
assert (
subject is not None
), "s can't be None in .set([s,p,o]), as it would remove (*, p, *)"
assert (
predicate is not None
), "p can't be None in .set([s,p,o]), as it would remove (s, *, *)"
self.remove((subject, predicate, None))
self.add((subject, predicate, object_))
return self
def subjects(
self,
predicate: Union[None, Path, IdentifiedNode] = None,
object: Optional[Node] = None,
unique: bool = False,
) -> Iterable[IdentifiedNode]:
"""A generator of (optionally unique) subjects with the given
predicate and object"""
if not unique:
for s, p, o in self.triples((None, predicate, object)):
yield s
else:
subs = set()
for s, p, o in self.triples((None, predicate, object)):
if s not in subs:
yield s
try:
subs.add(s)
except MemoryError as e:
logger.error(
f"{e}. Consider not setting parameter 'unique' to True"
)
raise
def predicates(
self,
subject: Optional[IdentifiedNode] = None,
object: Optional[Node] = None,
unique: bool = False,
) -> Iterable[IdentifiedNode]:
"""A generator of (optionally unique) predicates with the given
subject and object"""
if not unique:
for s, p, o in self.triples((subject, None, object)):
yield p
else:
preds = set()
for s, p, o in self.triples((subject, None, object)):
if p not in preds:
yield p
try:
preds.add(p)
except MemoryError as e:
logger.error(
f"{e}. Consider not setting parameter 'unique' to True"
)
raise
def objects(
self,
subject: Optional[IdentifiedNode] = None,
predicate: Union[None, Path, IdentifiedNode] = None,
unique: bool = False,
) -> Iterable[Node]:
"""A generator of (optionally unique) objects with the given
subject and predicate"""
if not unique:
for s, p, o in self.triples((subject, predicate, None)):
yield o
else:
objs = set()
for s, p, o in self.triples((subject, predicate, None)):
if o not in objs:
yield o
try:
objs.add(o)
except MemoryError as e:
logger.error(
f"{e}. Consider not setting parameter 'unique' to True"
)
raise
def subject_predicates(
self, object: Optional[Node] = None, unique: bool = False
) -> Generator[Tuple[IdentifiedNode, IdentifiedNode], None, None]:
"""A generator of (optionally unique) (subject, predicate) tuples
for the given object"""
if not unique:
for s, p, o in self.triples((None, None, object)):
yield s, p
else:
subj_preds = set()
for s, p, o in self.triples((None, None, object)):
if (s, p) not in subj_preds:
yield s, p
try:
subj_preds.add((s, p))
except MemoryError as e:
logger.error(
f"{e}. Consider not setting parameter 'unique' to True"
)
raise
def subject_objects(
self, predicate: Union[None, Path, IdentifiedNode] = None, unique: bool = False
) -> Generator[Tuple[IdentifiedNode, Node], None, None]:
"""A generator of (optionally unique) (subject, object) tuples
for the given predicate"""
if not unique:
for s, p, o in self.triples((None, predicate, None)):
yield s, o
else:
subj_objs = set()
for s, p, o in self.triples((None, predicate, None)):
if (s, o) not in subj_objs:
yield s, o
try:
subj_objs.add((s, o))
except MemoryError as e:
logger.error(
f"{e}. Consider not setting parameter 'unique' to True"
)
raise
def predicate_objects(
self, subject: Optional[IdentifiedNode] = None, unique: bool = False
) -> Generator[Tuple[IdentifiedNode, Node], None, None]:
"""A generator of (optionally unique) (predicate, object) tuples
for the given subject"""
if not unique:
for s, p, o in self.triples((subject, None, None)):
yield p, o
else:
pred_objs = set()
for s, p, o in self.triples((subject, None, None)):
if (p, o) not in pred_objs:
yield p, o
try:
pred_objs.add((p, o))
except MemoryError as e:
logger.error(
f"{e}. Consider not setting parameter 'unique' to True"
)
raise
def triples_choices(self, triple, context=None):
subject, predicate, object_ = triple
for (s, p, o), cg in self.store.triples_choices(
(subject, predicate, object_), context=self
):
yield s, p, o
def value(
self, subject=None, predicate=RDF.value, object=None, default=None, any=True
):
"""Get a value for a pair of two criteria
Exactly one of subject, predicate, object must be None. Useful if one
knows that there may only be one value.
It is one of those situations that occur a lot, hence this
'macro' like utility
Parameters:
subject, predicate, object -- exactly one must be None
default -- value to be returned if no values found
any -- if True, return any value in the case there is more than one,
else, raise UniquenessError
"""
retval = default
if (
(subject is None and predicate is None)
or (subject is None and object is None)
or (predicate is None and object is None)
):
return None
if object is None:
values = self.objects(subject, predicate)
if subject is None:
values = self.subjects(predicate, object)
if predicate is None:
values = self.predicates(subject, object)
try:
retval = next(values)
except StopIteration:
retval = default
else:
if any is False:
try:
next(values)
msg = (
"While trying to find a value for (%s, %s, %s) the"
" following multiple values where found:\n"
% (subject, predicate, object)
)
triples = self.store.triples((subject, predicate, object), None)
for (s, p, o), contexts in triples:
msg += "(%s, %s, %s)\n (contexts: %s)\n" % (
s,
p,
o,
list(contexts),
)
raise exceptions.UniquenessError(msg)
except StopIteration:
pass
return retval
def items(self, list):
"""Generator over all items in the resource specified by list
list is an RDF collection.
"""
chain = set([list])
while list:
item = self.value(list, RDF.first)
if item is not None:
yield item
list = self.value(list, RDF.rest)
if list in chain:
raise ValueError("List contains a recursive rdf:rest reference")
chain.add(list)
def transitiveClosure(self, func, arg, seen=None):
"""
Generates transitive closure of a user-defined
function against the graph
>>> from rdflib.collection import Collection
>>> g=Graph()
>>> a=BNode("foo")
>>> b=BNode("bar")
>>> c=BNode("baz")
>>> g.add((a,RDF.first,RDF.type)) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g.add((a,RDF.rest,b)) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g.add((b,RDF.first,namespace.RDFS.label)) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g.add((b,RDF.rest,c)) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g.add((c,RDF.first,namespace.RDFS.comment)) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> g.add((c,RDF.rest,RDF.nil)) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> def topList(node,g):
... for s in g.subjects(RDF.rest, node):
... yield s
>>> def reverseList(node,g):
... for f in g.objects(node, RDF.first):
... print(f)
... for s in g.subjects(RDF.rest, node):
... yield s
>>> [rt for rt in g.transitiveClosure(
... topList,RDF.nil)] # doctest: +NORMALIZE_WHITESPACE
[rdflib.term.BNode('baz'),
rdflib.term.BNode('bar'),
rdflib.term.BNode('foo')]
>>> [rt for rt in g.transitiveClosure(
... reverseList,RDF.nil)] # doctest: +NORMALIZE_WHITESPACE
http://www.w3.org/2000/01/rdf-schema#comment
http://www.w3.org/2000/01/rdf-schema#label
http://www.w3.org/1999/02/22-rdf-syntax-ns#type
[rdflib.term.BNode('baz'),
rdflib.term.BNode('bar'),
rdflib.term.BNode('foo')]
"""
if seen is None:
seen = {}
elif arg in seen:
return
seen[arg] = 1
for rt in func(arg, self):
yield rt
for rt_2 in self.transitiveClosure(func, rt, seen):
yield rt_2
def transitive_objects(self, subject, predicate, remember=None):
"""Transitively generate objects for the ``predicate`` relationship
Generated objects belong to the depth first transitive closure of the
``predicate`` relationship starting at ``subject``.
"""
if remember is None:
remember = {}
if subject in remember:
return
remember[subject] = 1
yield subject
for object in self.objects(subject, predicate):
for o in self.transitive_objects(object, predicate, remember):
yield o
def transitive_subjects(self, predicate, object, remember=None):
"""Transitively generate subjects for the ``predicate`` relationship
Generated subjects belong to the depth first transitive closure of the
``predicate`` relationship starting at ``object``.
"""
if remember is None:
remember = {}
if object in remember:
return
remember[object] = 1
yield object
for subject in self.subjects(predicate, object):
for s in self.transitive_subjects(predicate, subject, remember):
yield s
def qname(self, uri):
return self.namespace_manager.qname(uri)
def compute_qname(self, uri, generate=True):
return self.namespace_manager.compute_qname(uri, generate)
def bind(self, prefix, namespace, override=True, replace=False) -> None:
"""Bind prefix to namespace
If override is True will bind namespace to given prefix even
if namespace was already bound to a different prefix.
if replace, replace any existing prefix with the new namespace
for example: graph.bind("foaf", "http://xmlns.com/foaf/0.1/")
"""
return self.namespace_manager.bind(
prefix, namespace, override=override, replace=replace
)
def namespaces(self):
"""Generator over all the prefix, namespace tuples"""
for prefix, namespace in self.namespace_manager.namespaces():
yield prefix, namespace
def absolutize(self, uri, defrag=1):
"""Turn uri into an absolute URI if it's not one already"""
return self.namespace_manager.absolutize(uri, defrag)
# no destination and non-None positional encoding
@overload
def serialize(
self, destination: None, format: str, base: Optional[str], encoding: str, **args
) -> bytes:
...
# no destination and non-None keyword encoding
@overload
def serialize(
self,
destination: None = ...,
format: str = ...,
base: Optional[str] = ...,
*,
encoding: str,
**args,
) -> bytes:
...
# no destination and None encoding
@overload
def serialize(
self,
destination: None = ...,
format: str = ...,
base: Optional[str] = ...,
encoding: None = ...,
**args,
) -> str:
...
# non-None destination
@overload
def serialize(
self,
destination: Union[str, pathlib.PurePath, IO[bytes]],
format: str = ...,
base: Optional[str] = ...,
encoding: Optional[str] = ...,
**args,
) -> "Graph":
...
# fallback
@overload
def serialize(
self,
destination: Optional[Union[str, pathlib.PurePath, IO[bytes]]] = ...,
format: str = ...,
base: Optional[str] = ...,
encoding: Optional[str] = ...,
**args,
) -> Union[bytes, str, "Graph"]:
...
def serialize(
self,
destination: Optional[Union[str, pathlib.PurePath, IO[bytes]]] = None,
format: str = "turtle",
base: Optional[str] = None,
encoding: Optional[str] = None,
**args: Any,
) -> Union[bytes, str, "Graph"]:
"""Serialize the Graph to destination
If destination is None serialize method returns the serialization as
bytes or string.
If encoding is None and destination is None, returns a string
If encoding is set, and Destination is None, returns bytes
Format defaults to turtle.
Format support can be extended with plugins,
but "xml", "n3", "turtle", "nt", "pretty-xml", "trix", "trig" and "nquads" are built in.
"""
# if base is not given as attribute use the base set for the graph
if base is None:
base = self.base
serializer = plugin.get(format, Serializer)(self)
stream: IO[bytes]
if destination is None:
stream = BytesIO()
if encoding is None:
serializer.serialize(stream, base=base, encoding="utf-8", **args)
return stream.getvalue().decode("utf-8")
else:
serializer.serialize(stream, base=base, encoding=encoding, **args)
return stream.getvalue()
if hasattr(destination, "write"):
stream = cast(IO[bytes], destination)
serializer.serialize(stream, base=base, encoding=encoding, **args)
else:
if isinstance(destination, pathlib.PurePath):
location = str(destination)
else:
location = cast(str, destination)
scheme, netloc, path, params, _query, fragment = urlparse(location)
if netloc != "":
raise ValueError(
f"destination {destination} is not a local file reference"
)
fd, name = tempfile.mkstemp()
stream = os.fdopen(fd, "wb")
serializer.serialize(stream, base=base, encoding=encoding, **args)
stream.close()
dest = url2pathname(path) if scheme == "file" else location
if hasattr(shutil, "move"):
shutil.move(name, dest)
else:
shutil.copy(name, dest)
os.remove(name)
return self
def print(self, format="turtle", encoding="utf-8", out=None):
print(
self.serialize(None, format=format, encoding=encoding).decode(encoding),
file=out,
flush=True,
)
def parse(
self,
source: Optional[
Union[IO[bytes], TextIO, InputSource, str, bytes, pathlib.PurePath]
] = None,
publicID: Optional[str] = None,
format: Optional[str] = None,
location: Optional[str] = None,
file: Optional[Union[BinaryIO, TextIO]] = None,
data: Optional[Union[str, bytes]] = None,
**args,
):
"""
Parse an RDF source adding the resulting triples to the Graph.
The source is specified using one of source, location, file or
data.
:Parameters:
- `source`: An InputSource, file-like object, or string. In the case
of a string the string is the location of the source.
- `location`: A string indicating the relative or absolute URL of the
source. Graph's absolutize method is used if a relative location
is specified.
- `file`: A file-like object.
- `data`: A string containing the data to be parsed.
- `format`: Used if format can not be determined from source, e.g. file
extension or Media Type. Defaults to text/turtle. Format support can
be extended with plugins, but "xml", "n3" (use for turtle), "nt" &
"trix" are built in.
- `publicID`: the logical URI to use as the document base. If None
specified the document location is used (at least in the case where
there is a document location).
:Returns:
- self, the graph instance.
Examples:
>>> my_data = '''
... <rdf:RDF
... xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
... xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#"
... >
... <rdf:Description>
... <rdfs:label>Example</rdfs:label>
... <rdfs:comment>This is really just an example.</rdfs:comment>
... </rdf:Description>
... </rdf:RDF>
... '''
>>> import tempfile
>>> fd, file_name = tempfile.mkstemp()
>>> f = os.fdopen(fd, "w")
>>> dummy = f.write(my_data) # Returns num bytes written
>>> f.close()
>>> g = Graph()
>>> result = g.parse(data=my_data, format="application/rdf+xml")
>>> len(g)
2
>>> g = Graph()
>>> result = g.parse(location=file_name, format="application/rdf+xml")
>>> len(g)
2
>>> g = Graph()
>>> with open(file_name, "r") as f:
... result = g.parse(f, format="application/rdf+xml")
>>> len(g)
2
>>> os.remove(file_name)
>>> # default turtle parsing
>>> result = g.parse(data="<http://example.com/a> <http://example.com/a> <http://example.com/a> .")
>>> len(g)
3
"""
source = create_input_source(
source=source,
publicID=publicID,
location=location,
file=file,
data=data,
format=format,
)
if format is None:
format = source.content_type
could_not_guess_format = False
if format is None:
if (
hasattr(source, "file")
and getattr(source.file, "name", None) # type: ignore[attr-defined]
and isinstance(source.file.name, str) # type: ignore[attr-defined]
):
format = rdflib.util.guess_format(source.file.name) # type: ignore[attr-defined]
if format is None:
format = "turtle"
could_not_guess_format = True
parser = plugin.get(format, Parser)()
try:
# TODO FIXME: Parser.parse should have **kwargs argument.
parser.parse(source, self, **args)
except SyntaxError as se:
if could_not_guess_format:
raise ParserError(
"Could not guess RDF format for %r from file extension so tried Turtle but failed."
"You can explicitly specify format using the format argument."
% source
)
else:
raise se
finally:
if source.auto_close:
source.close()
return self
def query(
self,
query_object,
processor: Union[str, query.Processor] = "sparql",
result: Union[str, Type[query.Result]] = "sparql",
initNs=None,
initBindings=None,
use_store_provided: bool = True,
**kwargs,
) -> query.Result:
"""
Query this graph.
A type of 'prepared queries' can be realised by providing
initial variable bindings with initBindings
Initial namespaces are used to resolve prefixes used in the query,
if none are given, the namespaces from the graph's namespace manager
are used.
:returntype: rdflib.query.Result
"""
initBindings = initBindings or {}
initNs = initNs or dict(self.namespaces())
if hasattr(self.store, "query") and use_store_provided:
try:
return self.store.query(
query_object,
initNs,
initBindings,
self.default_union and "__UNION__" or self.identifier,
**kwargs,
)
except NotImplementedError:
pass # store has no own implementation
if not isinstance(result, query.Result):
result = plugin.get(cast(str, result), query.Result)
if not isinstance(processor, query.Processor):
processor = plugin.get(processor, query.Processor)(self)
return result(processor.query(query_object, initBindings, initNs, **kwargs))
def update(
self,
update_object,
processor="sparql",
initNs=None,
initBindings=None,
use_store_provided=True,
**kwargs,
):
"""Update this graph with the given update query."""
initBindings = initBindings or {}
initNs = initNs or dict(self.namespaces())
if hasattr(self.store, "update") and use_store_provided:
try:
return self.store.update(
update_object,
initNs,
initBindings,
self.default_union and "__UNION__" or self.identifier,
**kwargs,
)
except NotImplementedError:
pass # store has no own implementation
if not isinstance(processor, query.UpdateProcessor):
processor = plugin.get(processor, query.UpdateProcessor)(self)
return processor.update(update_object, initBindings, initNs, **kwargs)
def n3(self):
"""Return an n3 identifier for the Graph"""
return "[%s]" % self.identifier.n3()
def __reduce__(self):
return (
Graph,
(
self.store,
self.identifier,
),
)
def isomorphic(self, other):
"""
does a very basic check if these graphs are the same
If no BNodes are involved, this is accurate.
See rdflib.compare for a correct implementation of isomorphism checks
"""
# TODO: this is only an approximation.
if len(self) != len(other):
return False
for s, p, o in self:
if not isinstance(s, BNode) and not isinstance(o, BNode):
if not (s, p, o) in other:
return False
for s, p, o in other:
if not isinstance(s, BNode) and not isinstance(o, BNode):
if not (s, p, o) in self:
return False
# TODO: very well could be a false positive at this point yet.
return True
def connected(self):
"""Check if the Graph is connected
The Graph is considered undirectional.
Performs a search on the Graph, starting from a random node. Then
iteratively goes depth-first through the triplets where the node is<|fim▁hole|> discovered = []
# take a random one, could also always take the first one, doesn't
# really matter.
if not all_nodes:
return False
visiting = [all_nodes[random.randrange(len(all_nodes))]]
while visiting:
x = visiting.pop()
if x not in discovered:
discovered.append(x)
for new_x in self.objects(subject=x):
if new_x not in discovered and new_x not in visiting:
visiting.append(new_x)
for new_x in self.subjects(object=x):
if new_x not in discovered and new_x not in visiting:
visiting.append(new_x)
# optimisation by only considering length, since no new objects can
# be introduced anywhere.
if len(all_nodes) == len(discovered):
return True
else:
return False
def all_nodes(self):
res = set(self.objects())
res.update(self.subjects())
return res
def collection(self, identifier):
"""Create a new ``Collection`` instance.
Parameters:
- ``identifier``: a URIRef or BNode instance.
Example::
>>> graph = Graph()
>>> uri = URIRef("http://example.org/resource")
>>> collection = graph.collection(uri)
>>> assert isinstance(collection, Collection)
>>> assert collection.uri is uri
>>> assert collection.graph is graph
>>> collection += [ Literal(1), Literal(2) ]
"""
return Collection(self, identifier)
def resource(self, identifier):
"""Create a new ``Resource`` instance.
Parameters:
- ``identifier``: a URIRef or BNode instance.
Example::
>>> graph = Graph()
>>> uri = URIRef("http://example.org/resource")
>>> resource = graph.resource(uri)
>>> assert isinstance(resource, Resource)
>>> assert resource.identifier is uri
>>> assert resource.graph is graph
"""
if not isinstance(identifier, Node):
identifier = URIRef(identifier)
return Resource(self, identifier)
def _process_skolem_tuples(self, target, func):
for t in self.triples((None, None, None)):
target.add(func(t))
def skolemize(self, new_graph=None, bnode=None, authority=None, basepath=None):
def do_skolemize(bnode, t):
(s, p, o) = t
if s == bnode:
s = s.skolemize(authority=authority, basepath=basepath)
if o == bnode:
o = o.skolemize(authority=authority, basepath=basepath)
return s, p, o
def do_skolemize2(t):
(s, p, o) = t
if isinstance(s, BNode):
s = s.skolemize(authority=authority, basepath=basepath)
if isinstance(o, BNode):
o = o.skolemize(authority=authority, basepath=basepath)
return s, p, o
retval = Graph() if new_graph is None else new_graph
if bnode is None:
self._process_skolem_tuples(retval, do_skolemize2)
elif isinstance(bnode, BNode):
self._process_skolem_tuples(retval, lambda t: do_skolemize(bnode, t))
return retval
def de_skolemize(self, new_graph=None, uriref=None):
def do_de_skolemize(uriref, t):
(s, p, o) = t
if s == uriref:
s = s.de_skolemize()
if o == uriref:
o = o.de_skolemize()
return s, p, o
def do_de_skolemize2(t):
(s, p, o) = t
if isinstance(s, Genid):
s = s.de_skolemize()
if isinstance(o, Genid):
o = o.de_skolemize()
return s, p, o
retval = Graph() if new_graph is None else new_graph
if uriref is None:
self._process_skolem_tuples(retval, do_de_skolemize2)
elif isinstance(uriref, Genid):
self._process_skolem_tuples(retval, lambda t: do_de_skolemize(uriref, t))
return retval
def cbd(self, resource):
"""Retrieves the Concise Bounded Description of a Resource from a Graph
Concise Bounded Description (CBD) is defined in [1] as:
Given a particular node (the starting node) in a particular RDF graph (the source graph), a subgraph of that
particular graph, taken to comprise a concise bounded description of the resource denoted by the starting node,
can be identified as follows:
1. Include in the subgraph all statements in the source graph where the subject of the statement is the
starting node;
2. Recursively, for all statements identified in the subgraph thus far having a blank node object, include
in the subgraph all statements in the source graph where the subject of the statement is the blank node
in question and which are not already included in the subgraph.
3. Recursively, for all statements included in the subgraph thus far, for all reifications of each statement
in the source graph, include the concise bounded description beginning from the rdf:Statement node of
each reification.
This results in a subgraph where the object nodes are either URI references, literals, or blank nodes not
serving as the subject of any statement in the graph.
[1] https://www.w3.org/Submission/CBD/
:param resource: a URIRef object, of the Resource for queried for
:return: a Graph, subgraph of self
"""
subgraph = Graph()
def add_to_cbd(uri):
for s, p, o in self.triples((uri, None, None)):
subgraph.add((s, p, o))
# recurse 'down' through ll Blank Nodes
if type(o) == BNode and not (o, None, None) in subgraph:
add_to_cbd(o)
# for Rule 3 (reification)
# for any rdf:Statement in the graph with the given URI as the object of rdf:subject,
# get all triples with that rdf:Statement instance as subject
# find any subject s where the predicate is rdf:subject and this uri is the object
# (these subjects are of type rdf:Statement, given the domain of rdf:subject)
for s, p, o in self.triples((None, RDF.subject, uri)):
# find all triples with s as the subject and add these to the subgraph
for s2, p2, o2 in self.triples((s, None, None)):
subgraph.add((s2, p2, o2))
add_to_cbd(resource)
return subgraph
class ConjunctiveGraph(Graph):
"""A ConjunctiveGraph is an (unnamed) aggregation of all the named
graphs in a store.
It has a ``default`` graph, whose name is associated with the
graph throughout its life. :meth:`__init__` can take an identifier
to use as the name of this default graph or it will assign a
BNode.
All methods that add triples work against this default graph.
All queries are carried out against the union of all graphs.
"""
def __init__(
self,
store: Union[Store, str] = "default",
identifier: Optional[Union[IdentifiedNode, str]] = None,
default_graph_base: Optional[str] = None,
):
super(ConjunctiveGraph, self).__init__(store, identifier=identifier)
assert self.store.context_aware, (
"ConjunctiveGraph must be backed by" " a context aware store."
)
self.context_aware = True
self.default_union = True # Conjunctive!
self.default_context = Graph(
store=self.store, identifier=identifier or BNode(), base=default_graph_base
)
def __str__(self):
pattern = (
"[a rdflib:ConjunctiveGraph;rdflib:storage "
"[a rdflib:Store;rdfs:label '%s']]"
)
return pattern % self.store.__class__.__name__
@overload
def _spoc(
self,
triple_or_quad: Union[
Tuple[Node, Node, Node, Optional[Any]], Tuple[Node, Node, Node]
],
default: bool = False,
) -> Tuple[Node, Node, Node, Optional[Graph]]:
...
@overload
def _spoc(
self,
triple_or_quad: None,
default: bool = False,
) -> Tuple[None, None, None, Optional[Graph]]:
...
def _spoc(
self,
triple_or_quad: Optional[
Union[Tuple[Node, Node, Node, Optional[Any]], Tuple[Node, Node, Node]]
],
default: bool = False,
) -> Tuple[Optional[Node], Optional[Node], Optional[Node], Optional[Graph]]:
"""
helper method for having methods that support
either triples or quads
"""
if triple_or_quad is None:
return (None, None, None, self.default_context if default else None)
if len(triple_or_quad) == 3:
c = self.default_context if default else None
(s, p, o) = triple_or_quad # type: ignore[misc]
elif len(triple_or_quad) == 4:
(s, p, o, c) = triple_or_quad # type: ignore[misc]
c = self._graph(c)
return s, p, o, c
def __contains__(self, triple_or_quad):
"""Support for 'triple/quad in graph' syntax"""
s, p, o, c = self._spoc(triple_or_quad)
for t in self.triples((s, p, o), context=c):
return True
return False
def add(
self,
triple_or_quad: Union[
Tuple[Node, Node, Node, Optional[Any]], Tuple[Node, Node, Node]
],
) -> "ConjunctiveGraph":
"""
Add a triple or quad to the store.
if a triple is given it is added to the default context
"""
s, p, o, c = self._spoc(triple_or_quad, default=True)
_assertnode(s, p, o)
self.store.add((s, p, o), context=c, quoted=False)
return self
@overload
def _graph(self, c: Union[Graph, Node, str]) -> Graph:
...
@overload
def _graph(self, c: None) -> None:
...
def _graph(self, c: Optional[Union[Graph, Node, str]]) -> Optional[Graph]:
if c is None:
return None
if not isinstance(c, Graph):
return self.get_context(c)
else:
return c
def addN(self, quads: Iterable[Tuple[Node, Node, Node, Any]]):
"""Add a sequence of triples with context"""
self.store.addN(
(s, p, o, self._graph(c)) for s, p, o, c in quads if _assertnode(s, p, o)
)
return self
def remove(self, triple_or_quad):
"""
Removes a triple or quads
if a triple is given it is removed from all contexts
a quad is removed from the given context only
"""
s, p, o, c = self._spoc(triple_or_quad)
self.store.remove((s, p, o), context=c)
return self
def triples(self, triple_or_quad, context=None):
"""
Iterate over all the triples in the entire conjunctive graph
For legacy reasons, this can take the context to query either
as a fourth element of the quad, or as the explicit context
keyword parameter. The kw param takes precedence.
"""
s, p, o, c = self._spoc(triple_or_quad)
context = self._graph(context or c)
if self.default_union:
if context == self.default_context:
context = None
else:
if context is None:
context = self.default_context
if isinstance(p, Path):
if context is None:
context = self
for s, o in p.eval(context, s, o):
yield s, p, o
else:
for (s, p, o), cg in self.store.triples((s, p, o), context=context):
yield s, p, o
def quads(self, triple_or_quad=None):
"""Iterate over all the quads in the entire conjunctive graph"""
s, p, o, c = self._spoc(triple_or_quad)
for (s, p, o), cg in self.store.triples((s, p, o), context=c):
for ctx in cg:
yield s, p, o, ctx
def triples_choices(self, triple, context=None):
"""Iterate over all the triples in the entire conjunctive graph"""
s, p, o = triple
if context is None:
if not self.default_union:
context = self.default_context
else:
context = self._graph(context)
for (s1, p1, o1), cg in self.store.triples_choices((s, p, o), context=context):
yield s1, p1, o1
def __len__(self):
"""Number of triples in the entire conjunctive graph"""
return self.store.__len__()
def contexts(self, triple=None):
"""Iterate over all contexts in the graph
If triple is specified, iterate over all contexts the triple is in.
"""
for context in self.store.contexts(triple):
if isinstance(context, Graph):
# TODO: One of these should never happen and probably
# should raise an exception rather than smoothing over
# the weirdness - see #225
yield context
else:
yield self.get_context(context)
def get_context(
self,
identifier: Optional[Union[Node, str]],
quoted: bool = False,
base: Optional[str] = None,
) -> Graph:
"""Return a context graph for the given identifier
identifier must be a URIRef or BNode.
"""
# TODO: FIXME - why is ConjunctiveGraph passed as namespace_manager?
return Graph(
store=self.store, identifier=identifier, namespace_manager=self, base=base # type: ignore[arg-type]
)
def remove_context(self, context):
"""Removes the given context from the graph"""
self.store.remove((None, None, None), context)
def context_id(self, uri, context_id=None):
"""URI#context"""
uri = uri.split("#", 1)[0]
if context_id is None:
context_id = "#context"
return URIRef(context_id, base=uri)
def parse(
self,
source: Optional[
Union[IO[bytes], TextIO, InputSource, str, bytes, pathlib.PurePath]
] = None,
publicID: Optional[str] = None,
format: Optional[str] = None,
location: Optional[str] = None,
file: Optional[Union[BinaryIO, TextIO]] = None,
data: Optional[Union[str, bytes]] = None,
**args,
):
"""
Parse source adding the resulting triples to its own context
(sub graph of this graph).
See :meth:`rdflib.graph.Graph.parse` for documentation on arguments.
:Returns:
The graph into which the source was parsed. In the case of n3
it returns the root context.
"""
source = create_input_source(
source=source,
publicID=publicID,
location=location,
file=file,
data=data,
format=format,
)
# NOTE on type hint: `xml.sax.xmlreader.InputSource.getPublicId` has no
# type annotations but given that systemId should be a string, and
# given that there is no specific mention of type for publicId, it
# seems reasonable to assume it should also be a string. Furthermore,
# create_input_source will ensure that publicId is not None, though it
# would be good if this guaruntee was made more explicit i.e. by type
# hint on InputSource (TODO/FIXME).
g_id: str = publicID and publicID or source.getPublicId()
if not isinstance(g_id, Node):
g_id = URIRef(g_id)
context = Graph(store=self.store, identifier=g_id)
context.remove((None, None, None)) # hmm ?
context.parse(source, publicID=publicID, format=format, **args)
# TODO: FIXME: This should not return context, but self.
return context
def __reduce__(self):
return ConjunctiveGraph, (self.store, self.identifier)
DATASET_DEFAULT_GRAPH_ID = URIRef("urn:x-rdflib:default")
class Dataset(ConjunctiveGraph):
__doc__ = """
RDF 1.1 Dataset. Small extension to the Conjunctive Graph:
- the primary term is graphs in the datasets and not contexts with quads,
so there is a separate method to set/retrieve a graph in a dataset and
operate with graphs
- graphs cannot be identified with blank nodes
- added a method to directly add a single quad
Examples of usage:
>>> # Create a new Dataset
>>> ds = Dataset()
>>> # simple triples goes to default graph
>>> ds.add((URIRef("http://example.org/a"),
... URIRef("http://www.example.org/b"),
... Literal("foo"))) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Dataset'>)>
>>>
>>> # Create a graph in the dataset, if the graph name has already been
>>> # used, the corresponding graph will be returned
>>> # (ie, the Dataset keeps track of the constituent graphs)
>>> g = ds.graph(URIRef("http://www.example.com/gr"))
>>>
>>> # add triples to the new graph as usual
>>> g.add(
... (URIRef("http://example.org/x"),
... URIRef("http://example.org/y"),
... Literal("bar")) ) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> # alternatively: add a quad to the dataset -> goes to the graph
>>> ds.add(
... (URIRef("http://example.org/x"),
... URIRef("http://example.org/z"),
... Literal("foo-bar"),g) ) # doctest: +ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Dataset'>)>
>>>
>>> # querying triples return them all regardless of the graph
>>> for t in ds.triples((None,None,None)): # doctest: +SKIP
... print(t) # doctest: +NORMALIZE_WHITESPACE
(rdflib.term.URIRef("http://example.org/a"),
rdflib.term.URIRef("http://www.example.org/b"),
rdflib.term.Literal("foo"))
(rdflib.term.URIRef("http://example.org/x"),
rdflib.term.URIRef("http://example.org/z"),
rdflib.term.Literal("foo-bar"))
(rdflib.term.URIRef("http://example.org/x"),
rdflib.term.URIRef("http://example.org/y"),
rdflib.term.Literal("bar"))
>>>
>>> # querying quads() return quads; the fourth argument can be unrestricted
>>> # (None) or restricted to a graph
>>> for q in ds.quads((None, None, None, None)): # doctest: +SKIP
... print(q) # doctest: +NORMALIZE_WHITESPACE
(rdflib.term.URIRef("http://example.org/a"),
rdflib.term.URIRef("http://www.example.org/b"),
rdflib.term.Literal("foo"),
None)
(rdflib.term.URIRef("http://example.org/x"),
rdflib.term.URIRef("http://example.org/y"),
rdflib.term.Literal("bar"),
rdflib.term.URIRef("http://www.example.com/gr"))
(rdflib.term.URIRef("http://example.org/x"),
rdflib.term.URIRef("http://example.org/z"),
rdflib.term.Literal("foo-bar"),
rdflib.term.URIRef("http://www.example.com/gr"))
>>>
>>> # unrestricted looping is equivalent to iterating over the entire Dataset
>>> for q in ds: # doctest: +SKIP
... print(q) # doctest: +NORMALIZE_WHITESPACE
(rdflib.term.URIRef("http://example.org/a"),
rdflib.term.URIRef("http://www.example.org/b"),
rdflib.term.Literal("foo"),
None)
(rdflib.term.URIRef("http://example.org/x"),
rdflib.term.URIRef("http://example.org/y"),
rdflib.term.Literal("bar"),
rdflib.term.URIRef("http://www.example.com/gr"))
(rdflib.term.URIRef("http://example.org/x"),
rdflib.term.URIRef("http://example.org/z"),
rdflib.term.Literal("foo-bar"),
rdflib.term.URIRef("http://www.example.com/gr"))
>>>
>>> # resticting iteration to a graph:
>>> for q in ds.quads((None, None, None, g)): # doctest: +SKIP
... print(q) # doctest: +NORMALIZE_WHITESPACE
(rdflib.term.URIRef("http://example.org/x"),
rdflib.term.URIRef("http://example.org/y"),
rdflib.term.Literal("bar"),
rdflib.term.URIRef("http://www.example.com/gr"))
(rdflib.term.URIRef("http://example.org/x"),
rdflib.term.URIRef("http://example.org/z"),
rdflib.term.Literal("foo-bar"),
rdflib.term.URIRef("http://www.example.com/gr"))
>>> # Note that in the call above -
>>> # ds.quads((None,None,None,"http://www.example.com/gr"))
>>> # would have been accepted, too
>>>
>>> # graph names in the dataset can be queried:
>>> for c in ds.graphs(): # doctest: +SKIP
... print(c) # doctest:
DEFAULT
http://www.example.com/gr
>>> # A graph can be created without specifying a name; a skolemized genid
>>> # is created on the fly
>>> h = ds.graph()
>>> for c in ds.graphs(): # doctest: +SKIP
... print(c) # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS
DEFAULT
http://rdlib.net/.well-known/genid/rdflib/N...
http://www.example.com/gr
>>> # Note that the Dataset.graphs() call returns names of empty graphs,
>>> # too. This can be restricted:
>>> for c in ds.graphs(empty=False): # doctest: +SKIP
... print(c) # doctest: +NORMALIZE_WHITESPACE
DEFAULT
http://www.example.com/gr
>>>
>>> # a graph can also be removed from a dataset via ds.remove_graph(g)
.. versionadded:: 4.0
"""
def __init__(self, store="default", default_union=False, default_graph_base=None):
super(Dataset, self).__init__(store=store, identifier=None)
if not self.store.graph_aware:
raise Exception("DataSet must be backed by a graph-aware store!")
self.default_context = Graph(
store=self.store,
identifier=DATASET_DEFAULT_GRAPH_ID,
base=default_graph_base,
)
self.default_union = default_union
def __str__(self):
pattern = (
"[a rdflib:Dataset;rdflib:storage " "[a rdflib:Store;rdfs:label '%s']]"
)
return pattern % self.store.__class__.__name__
def __reduce__(self):
return (type(self), (self.store, self.default_union))
def __getstate__(self):
return self.store, self.identifier, self.default_context, self.default_union
def __setstate__(self, state):
self.store, self.identifier, self.default_context, self.default_union = state
def graph(self, identifier=None, base=None):
if identifier is None:
from rdflib.term import rdflib_skolem_genid
self.bind(
"genid", "http://rdflib.net" + rdflib_skolem_genid, override=False
)
identifier = BNode().skolemize()
g = self._graph(identifier)
g.base = base
self.store.add_graph(g)
return g
def parse(
self,
source=None,
publicID=None,
format=None,
location=None,
file=None,
data=None,
**args,
):
c = ConjunctiveGraph.parse(
self, source, publicID, format, location, file, data, **args
)
self.graph(c)
return c
def add_graph(self, g):
"""alias of graph for consistency"""
return self.graph(g)
def remove_graph(self, g):
if not isinstance(g, Graph):
g = self.get_context(g)
self.store.remove_graph(g)
if g is None or g == self.default_context:
# default graph cannot be removed
# only triples deleted, so add it back in
self.store.add_graph(self.default_context)
return self
def contexts(self, triple=None):
default = False
for c in super(Dataset, self).contexts(triple):
default |= c.identifier == DATASET_DEFAULT_GRAPH_ID
yield c
if not default:
yield self.graph(DATASET_DEFAULT_GRAPH_ID)
graphs = contexts
def quads(self, quad):
for s, p, o, c in super(Dataset, self).quads(quad):
if c.identifier == self.default_context:
yield s, p, o, None
else:
yield s, p, o, c.identifier
def __iter__(
self,
) -> Generator[Tuple[Node, URIRef, Node, Optional[IdentifiedNode]], None, None]:
"""Iterates over all quads in the store"""
return self.quads((None, None, None, None))
class QuotedGraph(Graph):
"""
Quoted Graphs are intended to implement Notation 3 formulae. They are
associated with a required identifier that the N3 parser *must* provide
in order to maintain consistent formulae identification for scenarios
such as implication and other such processing.
"""
def __init__(self, store, identifier):
super(QuotedGraph, self).__init__(store, identifier)
def add(self, triple: Tuple[Node, Node, Node]):
"""Add a triple with self as context"""
s, p, o = triple
assert isinstance(s, Node), "Subject %s must be an rdflib term" % (s,)
assert isinstance(p, Node), "Predicate %s must be an rdflib term" % (p,)
assert isinstance(o, Node), "Object %s must be an rdflib term" % (o,)
self.store.add((s, p, o), self, quoted=True)
return self
def addN(self, quads: Tuple[Node, Node, Node, Any]) -> "QuotedGraph": # type: ignore[override]
"""Add a sequence of triple with context"""
self.store.addN(
(s, p, o, c)
for s, p, o, c in quads
if isinstance(c, QuotedGraph)
and c.identifier is self.identifier
and _assertnode(s, p, o)
)
return self
def n3(self):
"""Return an n3 identifier for the Graph"""
return "{%s}" % self.identifier.n3()
def __str__(self):
identifier = self.identifier.n3()
label = self.store.__class__.__name__
pattern = (
"{this rdflib.identifier %s;rdflib:storage "
"[a rdflib:Store;rdfs:label '%s']}"
)
return pattern % (identifier, label)
def __reduce__(self):
return QuotedGraph, (self.store, self.identifier)
# Make sure QuotedGraph is ordered correctly
# wrt to other Terms.
# this must be done here, as the QuotedGraph cannot be
# circularily imported in term.py
rdflib.term._ORDERING[QuotedGraph] = 11
class Seq(object):
"""Wrapper around an RDF Seq resource
It implements a container type in Python with the order of the items
returned corresponding to the Seq content. It is based on the natural
ordering of the predicate names _1, _2, _3, etc, which is the
'implementation' of a sequence in RDF terms.
"""
def __init__(self, graph, subject):
"""Parameters:
- graph:
the graph containing the Seq
- subject:
the subject of a Seq. Note that the init does not
check whether this is a Seq, this is done in whoever
creates this instance!
"""
_list = self._list = list()
LI_INDEX = URIRef(str(RDF) + "_")
for (p, o) in graph.predicate_objects(subject):
if p.startswith(LI_INDEX): # != RDF.Seq: #
i = int(p.replace(LI_INDEX, ""))
_list.append((i, o))
# here is the trick: the predicates are _1, _2, _3, etc. Ie,
# by sorting the keys (by integer) we have what we want!
_list.sort()
def toPython(self):
return self
def __iter__(self):
"""Generator over the items in the Seq"""
for _, item in self._list:
yield item
def __len__(self):
"""Length of the Seq"""
return len(self._list)
def __getitem__(self, index):
"""Item given by index from the Seq"""
index, item = self._list.__getitem__(index)
return item
class ModificationException(Exception):
def __init__(self):
pass
def __str__(self):
return (
"Modifications and transactional operations not allowed on "
"ReadOnlyGraphAggregate instances"
)
class UnSupportedAggregateOperation(Exception):
def __init__(self):
pass
def __str__(self):
return "This operation is not supported by ReadOnlyGraphAggregate " "instances"
class ReadOnlyGraphAggregate(ConjunctiveGraph):
"""Utility class for treating a set of graphs as a single graph
Only read operations are supported (hence the name). Essentially a
ConjunctiveGraph over an explicit subset of the entire store.
"""
def __init__(self, graphs, store="default"):
if store is not None:
super(ReadOnlyGraphAggregate, self).__init__(store)
Graph.__init__(self, store)
self.__namespace_manager = None
assert (
isinstance(graphs, list)
and graphs
and [g for g in graphs if isinstance(g, Graph)]
), "graphs argument must be a list of Graphs!!"
self.graphs = graphs
def __repr__(self):
return "<ReadOnlyGraphAggregate: %s graphs>" % len(self.graphs)
def destroy(self, configuration):
raise ModificationException()
# Transactional interfaces (optional)
def commit(self):
raise ModificationException()
def rollback(self):
raise ModificationException()
def open(self, configuration, create=False):
# TODO: is there a use case for this method?
for graph in self.graphs:
graph.open(self, configuration, create)
def close(self):
for graph in self.graphs:
graph.close()
def add(self, triple):
raise ModificationException()
def addN(self, quads):
raise ModificationException()
def remove(self, triple):
raise ModificationException()
def triples(self, triple):
s, p, o = triple
for graph in self.graphs:
if isinstance(p, Path):
for s, o in p.eval(self, s, o):
yield s, p, o
else:
for s1, p1, o1 in graph.triples((s, p, o)):
yield s1, p1, o1
def __contains__(self, triple_or_quad):
context = None
if len(triple_or_quad) == 4:
context = triple_or_quad[3]
for graph in self.graphs:
if context is None or graph.identifier == context.identifier:
if triple_or_quad[:3] in graph:
return True
return False
def quads(self, triple_or_quad):
"""Iterate over all the quads in the entire aggregate graph"""
c = None
if len(triple_or_quad) == 4:
s, p, o, c = triple_or_quad
else:
s, p, o = triple_or_quad
if c is not None:
for graph in [g for g in self.graphs if g == c]:
for s1, p1, o1 in graph.triples((s, p, o)):
yield s1, p1, o1, graph
else:
for graph in self.graphs:
for s1, p1, o1 in graph.triples((s, p, o)):
yield s1, p1, o1, graph
def __len__(self):
return sum(len(g) for g in self.graphs)
def __hash__(self):
raise UnSupportedAggregateOperation()
def __cmp__(self, other):
if other is None:
return -1
elif isinstance(other, Graph):
return -1
elif isinstance(other, ReadOnlyGraphAggregate):
return (self.graphs > other.graphs) - (self.graphs < other.graphs)
else:
return -1
def __iadd__(self, other):
raise ModificationException()
def __isub__(self, other):
raise ModificationException()
# Conv. methods
def triples_choices(self, triple, context=None):
subject, predicate, object_ = triple
for graph in self.graphs:
choices = graph.triples_choices((subject, predicate, object_))
for (s, p, o) in choices:
yield s, p, o
def qname(self, uri):
if hasattr(self, "namespace_manager") and self.namespace_manager:
return self.namespace_manager.qname(uri)
raise UnSupportedAggregateOperation()
def compute_qname(self, uri, generate=True):
if hasattr(self, "namespace_manager") and self.namespace_manager:
return self.namespace_manager.compute_qname(uri, generate)
raise UnSupportedAggregateOperation()
def bind(self, prefix, namespace, override=True):
raise UnSupportedAggregateOperation()
def namespaces(self):
if hasattr(self, "namespace_manager"):
for prefix, namespace in self.namespace_manager.namespaces():
yield prefix, namespace
else:
for graph in self.graphs:
for prefix, namespace in graph.namespaces():
yield prefix, namespace
def absolutize(self, uri, defrag=1):
raise UnSupportedAggregateOperation()
def parse(self, source, publicID=None, format=None, **args):
raise ModificationException()
def n3(self):
raise UnSupportedAggregateOperation()
def __reduce__(self):
raise UnSupportedAggregateOperation()
def _assertnode(*terms):
for t in terms:
assert isinstance(t, Node), "Term %s must be an rdflib term" % (t,)
return True
class BatchAddGraph(object):
"""
Wrapper around graph that turns batches of calls to Graph's add
(and optionally, addN) into calls to batched calls to addN`.
:Parameters:
- graph: The graph to wrap
- batch_size: The maximum number of triples to buffer before passing to
Graph's addN
- batch_addn: If True, then even calls to `addN` will be batched according to
batch_size
graph: The wrapped graph
count: The number of triples buffered since initialization or the last call to reset
batch: The current buffer of triples
"""
def __init__(self, graph: Graph, batch_size: int = 1000, batch_addn: bool = False):
if not batch_size or batch_size < 2:
raise ValueError("batch_size must be a positive number")
self.graph = graph
self.__graph_tuple = (graph,)
self.__batch_size = batch_size
self.__batch_addn = batch_addn
self.reset()
def reset(self):
"""
Manually clear the buffered triples and reset the count to zero
"""
self.batch = []
self.count = 0
return self
def add(
self,
triple_or_quad: Union[Tuple[Node, Node, Node], Tuple[Node, Node, Node, Any]],
) -> "BatchAddGraph":
"""
Add a triple to the buffer
:param triple: The triple to add
"""
if len(self.batch) >= self.__batch_size:
self.graph.addN(self.batch)
self.batch = []
self.count += 1
if len(triple_or_quad) == 3:
self.batch.append(triple_or_quad + self.__graph_tuple)
else:
self.batch.append(triple_or_quad)
return self
def addN(self, quads: Iterable[Tuple[Node, Node, Node, Any]]):
if self.__batch_addn:
for q in quads:
self.add(q)
else:
self.graph.addN(quads)
return self
def __enter__(self):
self.reset()
return self
def __exit__(self, *exc):
if exc[0] is None:
self.graph.addN(self.batch)
def test():
import doctest
doctest.testmod()
if __name__ == "__main__":
test()<|fim▁end|> | subject and object. Return True if all nodes have been visited and
False if it cannot continue and there are still unvisited nodes left.
"""
all_nodes = list(self.all_nodes()) |
<|file_name|>fetcher.go<|end_file_name|><|fim▁begin|>/*
Copyright 2011 Google Inc.
Modifications Copyright (c) 2014 Simon Zimmermann
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at<|fim▁hole|>distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package blob
import (
"io"
)
// Fetcher is the minimal interface for retrieving a blob from storage.
// The full storage interface is blobserver.Storage.
type Fetcher interface {
// Fetch returns a blob. If the blob is not found then
// os.ErrNotExist should be returned for the error (not a wrapped
// error with a ErrNotExist inside)
//
// The caller should close blob.
Fetch(Ref) (blob io.ReadCloser, size uint32, err error)
}<|fim▁end|> |
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software |
<|file_name|>gtest-printers.cc<|end_file_name|><|fim▁begin|>// Copyright 2007, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Google Test - The Google C++ Testing and Mocking Framework
//
// This file implements a universal value printer that can print a
// value of any type T:
//
// void ::testing::internal::UniversalPrinter<T>::Print(value, ostream_ptr);
//
// It uses the << operator when possible, and prints the bytes in the
// object otherwise. A user can override its behavior for a class
// type Foo by defining either operator<<(::std::ostream&, const Foo&)
// or void PrintTo(const Foo&, ::std::ostream*) in the namespace that
// defines Foo.
#include "gtest/gtest-printers.h"
#include <stdio.h>
#include <cctype>
#include <cwchar>
#include <ostream> // NOLINT
#include <string>
#include "gtest/internal/gtest-port.h"
#include "src/gtest-internal-inl.h"
namespace testing {
namespace {
using ::std::ostream;
// Prints a segment of bytes in the given object.
GTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_
GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_
GTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_
GTEST_ATTRIBUTE_NO_SANITIZE_THREAD_
void PrintByteSegmentInObjectTo(const unsigned char* obj_bytes, size_t start,
size_t count, ostream* os) {
char text[5] = "";
for (size_t i = 0; i != count; i++) {
const size_t j = start + i;
if (i != 0) {
// Organizes the bytes into groups of 2 for easy parsing by
// human.
if ((j % 2) == 0)
*os << ' ';
else
*os << '-';
}
GTEST_SNPRINTF_(text, sizeof(text), "%02X", obj_bytes[j]);
*os << text;
}
}
// Prints the bytes in the given value to the given ostream.
void PrintBytesInObjectToImpl(const unsigned char* obj_bytes, size_t count,
ostream* os) {
// Tells the user how big the object is.
*os << count << "-byte object <";
const size_t kThreshold = 132;
const size_t kChunkSize = 64;
// If the object size is bigger than kThreshold, we'll have to omit
// some details by printing only the first and the last kChunkSize
// bytes.
if (count < kThreshold) {
PrintByteSegmentInObjectTo(obj_bytes, 0, count, os);
} else {
PrintByteSegmentInObjectTo(obj_bytes, 0, kChunkSize, os);
*os << " ... ";
// Rounds up to 2-byte boundary.
const size_t resume_pos = (count - kChunkSize + 1)/2*2;
PrintByteSegmentInObjectTo(obj_bytes, resume_pos, count - resume_pos, os);
}
*os << ">";
}
} // namespace
namespace internal2 {
// Delegates to PrintBytesInObjectToImpl() to print the bytes in the
// given object. The delegation simplifies the implementation, which
// uses the << operator and thus is easier done outside of the
// ::testing::internal namespace, which contains a << operator that
// sometimes conflicts with the one in STL.
void PrintBytesInObjectTo(const unsigned char* obj_bytes, size_t count,
ostream* os) {
PrintBytesInObjectToImpl(obj_bytes, count, os);
}
} // namespace internal2
namespace internal {
// Depending on the value of a char (or wchar_t), we print it in one
// of three formats:
// - as is if it's a printable ASCII (e.g. 'a', '2', ' '),
// - as a hexadecimal escape sequence (e.g. '\x7F'), or
// - as a special escape sequence (e.g. '\r', '\n').
enum CharFormat {
kAsIs,
kHexEscape,
kSpecialEscape
};
// Returns true if c is a printable ASCII character. We test the
// value of c directly instead of calling isprint(), which is buggy on
// Windows Mobile.
inline bool IsPrintableAscii(wchar_t c) {
return 0x20 <= c && c <= 0x7E;
}
// Prints a wide or narrow char c as a character literal without the
// quotes, escaping it when necessary; returns how c was formatted.
// The template argument UnsignedChar is the unsigned version of Char,
// which is the type of c.
template <typename UnsignedChar, typename Char>
static CharFormat PrintAsCharLiteralTo(Char c, ostream* os) {
wchar_t w_c = static_cast<wchar_t>(c);
switch (w_c) {
case L'\0':
*os << "\\0";
break;
case L'\'':
*os << "\\'";
break;
case L'\\':
*os << "\\\\";
break;
case L'\a':
*os << "\\a";
break;
case L'\b':
*os << "\\b";
break;
case L'\f':
*os << "\\f";
break;
case L'\n':
*os << "\\n";
break;
case L'\r':
*os << "\\r";
break;
case L'\t':
*os << "\\t";
break;
case L'\v':
*os << "\\v";
break;
default:
if (IsPrintableAscii(w_c)) {
*os << static_cast<char>(c);
return kAsIs;
} else {
ostream::fmtflags flags = os->flags();
*os << "\\x" << std::hex << std::uppercase
<< static_cast<int>(static_cast<UnsignedChar>(c));
os->flags(flags);
return kHexEscape;
}
}
return kSpecialEscape;
}
// Prints a wchar_t c as if it's part of a string literal, escaping it when
// necessary; returns how c was formatted.
static CharFormat PrintAsStringLiteralTo(wchar_t c, ostream* os) {
switch (c) {
case L'\'':
*os << "'";
return kAsIs;
case L'"':
*os << "\\\"";
return kSpecialEscape;
default:
return PrintAsCharLiteralTo<wchar_t>(c, os);
}
}
// Prints a char c as if it's part of a string literal, escaping it when
// necessary; returns how c was formatted.
static CharFormat PrintAsStringLiteralTo(char c, ostream* os) {
return PrintAsStringLiteralTo(
static_cast<wchar_t>(static_cast<unsigned char>(c)), os);
}
// Prints a wide or narrow character c and its code. '\0' is printed
// as "'\\0'", other unprintable characters are also properly escaped
// using the standard C++ escape sequence. The template argument
// UnsignedChar is the unsigned version of Char, which is the type of c.
template <typename UnsignedChar, typename Char>
void PrintCharAndCodeTo(Char c, ostream* os) {
// First, print c as a literal in the most readable form we can find.
*os << ((sizeof(c) > 1) ? "L'" : "'");
const CharFormat format = PrintAsCharLiteralTo<UnsignedChar>(c, os);
*os << "'";
// To aid user debugging, we also print c's code in decimal, unless
// it's 0 (in which case c was printed as '\\0', making the code
// obvious).
if (c == 0)
return;
*os << " (" << static_cast<int>(c);
// For more convenience, we print c's code again in hexadecimal,
// unless c was already printed in the form '\x##' or the code is in
// [1, 9].
if (format == kHexEscape || (1 <= c && c <= 9)) {
// Do nothing.
} else {
*os << ", 0x" << String::FormatHexInt(static_cast<int>(c));
}
*os << ")";
}
void PrintTo(unsigned char c, ::std::ostream* os) {
PrintCharAndCodeTo<unsigned char>(c, os);
}
void PrintTo(signed char c, ::std::ostream* os) {
PrintCharAndCodeTo<unsigned char>(c, os);
}
// Prints a wchar_t as a symbol if it is printable or as its internal
// code otherwise and also as its code. L'\0' is printed as "L'\\0'".
void PrintTo(wchar_t wc, ostream* os) {
PrintCharAndCodeTo<wchar_t>(wc, os);
}
// Prints the given array of characters to the ostream. CharType must be either
// char or wchar_t.
// The array starts at begin, the length is len, it may include '\0' characters
// and may not be NUL-terminated.
template <typename CharType>
GTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_
GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_
GTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_
GTEST_ATTRIBUTE_NO_SANITIZE_THREAD_
static CharFormat PrintCharsAsStringTo(
const CharType* begin, size_t len, ostream* os) {
const char* const kQuoteBegin = sizeof(CharType) == 1 ? "\"" : "L\"";
*os << kQuoteBegin;
bool is_previous_hex = false;
CharFormat print_format = kAsIs;
for (size_t index = 0; index < len; ++index) {
const CharType cur = begin[index];
if (is_previous_hex && IsXDigit(cur)) {
// Previous character is of '\x..' form and this character can be
// interpreted as another hexadecimal digit in its number. Break string to
// disambiguate.
*os << "\" " << kQuoteBegin;
}
is_previous_hex = PrintAsStringLiteralTo(cur, os) == kHexEscape;
// Remember if any characters required hex escaping.
if (is_previous_hex) {
print_format = kHexEscape;
}
}
*os << "\"";
return print_format;
}
// Prints a (const) char/wchar_t array of 'len' elements, starting at address
// 'begin'. CharType must be either char or wchar_t.
template <typename CharType>
GTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_
GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_
GTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_
GTEST_ATTRIBUTE_NO_SANITIZE_THREAD_
static void UniversalPrintCharArray(
const CharType* begin, size_t len, ostream* os) {
// The code
// const char kFoo[] = "foo";
// generates an array of 4, not 3, elements, with the last one being '\0'.
//
// Therefore when printing a char array, we don't print the last element if
// it's '\0', such that the output matches the string literal as it's
// written in the source code.
if (len > 0 && begin[len - 1] == '\0') {
PrintCharsAsStringTo(begin, len - 1, os);
return;
}
// If, however, the last element in the array is not '\0', e.g.
// const char kFoo[] = { 'f', 'o', 'o' };
// we must print the entire array. We also print a message to indicate
// that the array is not NUL-terminated.
PrintCharsAsStringTo(begin, len, os);
*os << " (no terminating NUL)";
}
// Prints a (const) char array of 'len' elements, starting at address 'begin'.
void UniversalPrintArray(const char* begin, size_t len, ostream* os) {
UniversalPrintCharArray(begin, len, os);
}
// Prints a (const) wchar_t array of 'len' elements, starting at address
// 'begin'.
void UniversalPrintArray(const wchar_t* begin, size_t len, ostream* os) {
UniversalPrintCharArray(begin, len, os);
}
// Prints the given C string to the ostream.
void PrintTo(const char* s, ostream* os) {
if (s == nullptr) {
*os << "NULL";
} else {
*os << ImplicitCast_<const void*>(s) << " pointing to ";
PrintCharsAsStringTo(s, strlen(s), os);
}
}
// MSVC compiler can be configured to define whar_t as a typedef
// of unsigned short. Defining an overload for const wchar_t* in that case
// would cause pointers to unsigned shorts be printed as wide strings,
// possibly accessing more memory than intended and causing invalid
// memory accesses. MSVC defines _NATIVE_WCHAR_T_DEFINED symbol when
// wchar_t is implemented as a native type.
#if !defined(_MSC_VER) || defined(_NATIVE_WCHAR_T_DEFINED)
// Prints the given wide C string to the ostream.
void PrintTo(const wchar_t* s, ostream* os) {
if (s == nullptr) {
*os << "NULL";
} else {
*os << ImplicitCast_<const void*>(s) << " pointing to ";
PrintCharsAsStringTo(s, wcslen(s), os);
}
}
#endif // wchar_t is native
namespace {
bool ContainsUnprintableControlCodes(const char* str, size_t length) {
const unsigned char *s = reinterpret_cast<const unsigned char *>(str);
for (size_t i = 0; i < length; i++) {
unsigned char ch = *s++;
if (std::iscntrl(ch)) {
switch (ch) {
case '\t':
case '\n':
case '\r':
break;
default:
return true;
}
}
}
return false;
}
bool IsUTF8TrailByte(unsigned char t) { return 0x80 <= t && t<= 0xbf; }
bool IsValidUTF8(const char* str, size_t length) {
const unsigned char *s = reinterpret_cast<const unsigned char *>(str);
for (size_t i = 0; i < length;) {
unsigned char lead = s[i++];
if (lead <= 0x7f) {
continue; // single-byte character (ASCII) 0..7F
}
if (lead < 0xc2) {
return false; // trail byte or non-shortest form
} else if (lead <= 0xdf && (i + 1) <= length && IsUTF8TrailByte(s[i])) {
++i; // 2-byte character
} else if (0xe0 <= lead && lead <= 0xef && (i + 2) <= length &&
IsUTF8TrailByte(s[i]) &&
IsUTF8TrailByte(s[i + 1]) &&
// check for non-shortest form and surrogate
(lead != 0xe0 || s[i] >= 0xa0) &&
(lead != 0xed || s[i] < 0xa0)) {
i += 2; // 3-byte character
} else if (0xf0 <= lead && lead <= 0xf4 && (i + 3) <= length &&
IsUTF8TrailByte(s[i]) &&
IsUTF8TrailByte(s[i + 1]) &&
IsUTF8TrailByte(s[i + 2]) &&
// check for non-shortest form
(lead != 0xf0 || s[i] >= 0x90) &&
(lead != 0xf4 || s[i] < 0x90)) {
i += 3; // 4-byte character
} else {
return false;
}
}
return true;
}
void ConditionalPrintAsText(const char* str, size_t length, ostream* os) {
if (!ContainsUnprintableControlCodes(str, length) &&
IsValidUTF8(str, length)) {
*os << "\n As Text: \"" << str << "\"";
}
}
} // anonymous namespace
void PrintStringTo(const ::std::string& s, ostream* os) {
if (PrintCharsAsStringTo(s.data(), s.size(), os) == kHexEscape) {
if (GTEST_FLAG(print_utf8)) {
ConditionalPrintAsText(s.data(), s.size(), os);
}
}
}
<|fim▁hole|>#if GTEST_HAS_STD_WSTRING
void PrintWideStringTo(const ::std::wstring& s, ostream* os) {
PrintCharsAsStringTo(s.data(), s.size(), os);
}
#endif // GTEST_HAS_STD_WSTRING
} // namespace internal
} // namespace testing<|fim▁end|> | |
<|file_name|>TZlibTransport.py<|end_file_name|><|fim▁begin|>#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
'''
TZlibTransport provides a compressed transport and transport factory
class, using the python standard library zlib module to implement
data compression.
'''
from __future__ import division
import zlib
from cStringIO import StringIO
from TTransport import TTransportBase, CReadableTransport
class TZlibTransportFactory(object):
'''
Factory transport that builds zlib compressed transports.
This factory caches the last single client/transport that it was passed
and returns the same TZlibTransport object that was created.
This caching means the TServer class will get the _same_ transport
object for both input and output transports from this factory.
(For non-threaded scenarios only, since the cache only holds one object)
<|fim▁hole|> and makes the statistics from getCompSavings() and getCompRatio()
easier to understand.
'''
# class scoped cache of last transport given and zlibtransport returned
_last_trans = None
_last_z = None
def getTransport(self, trans, compresslevel=9):
'''Wrap a transport , trans, with the TZlibTransport
compressed transport class, returning a new
transport to the caller.
@param compresslevel: The zlib compression level, ranging
from 0 (no compression) to 9 (best compression). Defaults to 9.
@type compresslevel: int
This method returns a TZlibTransport which wraps the
passed C{trans} TTransport derived instance.
'''
if trans == self._last_trans:
return self._last_z
ztrans = TZlibTransport(trans, compresslevel)
self._last_trans = trans
self._last_z = ztrans
return ztrans
class TZlibTransport(TTransportBase, CReadableTransport):
'''
Class that wraps a transport with zlib, compressing writes
and decompresses reads, using the python standard
library zlib module.
'''
# Read buffer size for the python fastbinary C extension,
# the TBinaryProtocolAccelerated class.
DEFAULT_BUFFSIZE = 4096
def __init__(self, trans, compresslevel=9):
'''
Create a new TZlibTransport, wrapping C{trans}, another
TTransport derived object.
@param trans: A thrift transport object, i.e. a TSocket() object.
@type trans: TTransport
@param compresslevel: The zlib compression level, ranging
from 0 (no compression) to 9 (best compression). Default is 9.
@type compresslevel: int
'''
self.__trans = trans
self.compresslevel = compresslevel
self.__rbuf = StringIO()
self.__wbuf = StringIO()
self._init_zlib()
self._init_stats()
def _reinit_buffers(self):
'''
Internal method to initialize/reset the internal StringIO objects
for read and write buffers.
'''
self.__rbuf = StringIO()
self.__wbuf = StringIO()
def _init_stats(self):
'''
Internal method to reset the internal statistics counters
for compression ratios and bandwidth savings.
'''
self.bytes_in = 0
self.bytes_out = 0
self.bytes_in_comp = 0
self.bytes_out_comp = 0
def _init_zlib(self):
'''
Internal method for setting up the zlib compression and
decompression objects.
'''
self._zcomp_read = zlib.decompressobj()
self._zcomp_write = zlib.compressobj(self.compresslevel)
def getCompRatio(self):
'''
Get the current measured compression ratios (in,out) from
this transport.
Returns a tuple of:
(inbound_compression_ratio, outbound_compression_ratio)
The compression ratios are computed as:
compressed / uncompressed
E.g., data that compresses by 10x will have a ratio of: 0.10
and data that compresses to half of ts original size will
have a ratio of 0.5
None is returned if no bytes have yet been processed in
a particular direction.
'''
r_percent, w_percent = (None, None)
if self.bytes_in > 0:
r_percent = self.bytes_in_comp / self.bytes_in
if self.bytes_out > 0:
w_percent = self.bytes_out_comp / self.bytes_out
return (r_percent, w_percent)
def getCompSavings(self):
'''
Get the current count of saved bytes due to data
compression.
Returns a tuple of:
(inbound_saved_bytes, outbound_saved_bytes)
Note: if compression is actually expanding your
data (only likely with very tiny thrift objects), then
the values returned will be negative.
'''
r_saved = self.bytes_in - self.bytes_in_comp
w_saved = self.bytes_out - self.bytes_out_comp
return (r_saved, w_saved)
def isOpen(self):
'''Return the underlying transport's open status'''
return self.__trans.isOpen()
def open(self):
"""Open the underlying transport"""
self._init_stats()
return self.__trans.open()
def listen(self):
'''Invoke the underlying transport's listen() method'''
self.__trans.listen()
def accept(self):
'''Accept connections on the underlying transport'''
return self.__trans.accept()
def close(self):
'''Close the underlying transport,'''
self._reinit_buffers()
self._init_zlib()
return self.__trans.close()
def read(self, sz):
'''
Read up to sz bytes from the decompressed bytes buffer, and
read from the underlying transport if the decompression
buffer is empty.
'''
ret = self.__rbuf.read(sz)
if len(ret) > 0:
return ret
# keep reading from transport until something comes back
while True:
if self.readComp(sz):
break
ret = self.__rbuf.read(sz)
return ret
def readComp(self, sz):
'''
Read compressed data from the underlying transport, then
decompress it and append it to the internal StringIO read buffer
'''
zbuf = self.__trans.read(sz)
zbuf = self._zcomp_read.unconsumed_tail + zbuf
buf = self._zcomp_read.decompress(zbuf)
self.bytes_in += len(zbuf)
self.bytes_in_comp += len(buf)
old = self.__rbuf.read()
self.__rbuf = StringIO(old + buf)
if len(old) + len(buf) == 0:
return False
return True
def write(self, buf):
'''
Write some bytes, putting them into the internal write
buffer for eventual compression.
'''
self.__wbuf.write(buf)
def flush(self):
'''
Flush any queued up data in the write buffer and ensure the
compression buffer is flushed out to the underlying transport
'''
wout = self.__wbuf.getvalue()
if len(wout) > 0:
zbuf = self._zcomp_write.compress(wout)
self.bytes_out += len(wout)
self.bytes_out_comp += len(zbuf)
else:
zbuf = ''
ztail = self._zcomp_write.flush(zlib.Z_SYNC_FLUSH)
self.bytes_out_comp += len(ztail)
if (len(zbuf) + len(ztail)) > 0:
self.__wbuf = StringIO()
self.__trans.write(zbuf + ztail)
self.__trans.flush()
@property
def cstringio_buf(self):
'''Implement the CReadableTransport interface'''
return self.__rbuf
def cstringio_refill(self, partialread, reqlen):
'''Implement the CReadableTransport interface for refill'''
retstring = partialread
if reqlen < self.DEFAULT_BUFFSIZE:
retstring += self.read(self.DEFAULT_BUFFSIZE)
while len(retstring) < reqlen:
retstring += self.read(reqlen - len(retstring))
self.__rbuf = StringIO(retstring)
return self.__rbuf<|fim▁end|> | The purpose of this caching is to allocate only one TZlibTransport where
only one is really needed (since it must have separate read/write buffers), |
<|file_name|>paymentflowhistorydialog.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2010 Async Open Source <http://www.async.com.br>
## All rights reserved
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU Lesser General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., or visit: http://www.gnu.org/.
##
## Author(s): Stoq Team <[email protected]>
##
"""Payment Flow History Report Dialog"""
from storm.expr import And, Eq, Or
from stoqlib.database.expr import Date
from stoqlib.gui.dialogs.daterangedialog import DateRangeDialog
from stoqlib.gui.utils.printing import print_report
from stoqlib.lib.message import info
from stoqlib.lib.translation import stoqlib_gettext
from stoqlib.reporting.payment import PaymentFlowHistoryReport
_ = stoqlib_gettext
# A few comments for the payment_flow_query:
# - The first table in the FROM clause is the list of all possible dates
# (due_date and paid_date) in the results. This is done so that the subsequent
# subselect can be joined properly
# - In that same subselect, we use IS NOT NULL to avoid an empty row for
# payments that were not received yet.
# - We filter out statuses (0, 5) to not include PREVIEW and CANCELED payments
# - payment_type = 1 are OUT_PAYMENTS and 0 are IN_PAYMENTS
payment_flow_query = """
SELECT all_payment_dates.date,
COALESCE(payments_to_pay.count, 0) as to_pay_payments,
COALESCE(payments_to_pay.to_pay, 0) as to_pay,<|fim▁hole|> COALESCE(payments_to_receive.count, 0) as to_receive_payments,
COALESCE(payments_to_receive.to_receive, 0) as to_receive,
COALESCE(payments_received.count, 0) as received_payments,
COALESCE(payments_received.received, 0) as received
FROM (SELECT date(due_date) as date FROM payment
UNION SELECT date(paid_date) as date FROM payment WHERE
paid_date IS NOT NULL) as all_payment_dates
-- To pay (out payments)
LEFT JOIN (SELECT DATE(due_date) as date, count(1) as count, sum(value) as to_pay
FROM payment WHERE payment_type = 'out' AND status not in ('preview', 'cancelled')
GROUP BY DATE(due_date))
AS payments_to_pay ON (all_payment_dates.date = payments_to_pay.date)
-- Paid (out payments)
LEFT JOIN (SELECT DATE(paid_date) as date, count(1) as count, sum(value) as paid
FROM payment WHERE payment_type = 'out'
AND payment.status not in ('preview', 'cancelled')
GROUP BY DATE(paid_date))
AS payments_paid ON (all_payment_dates.date = payments_paid.date)
-- To receive (in payments)
LEFT JOIN (SELECT DATE(due_date) as date, count(1) as count, sum(value) as to_receive
FROM payment WHERE payment_type = 'in'
AND payment.status not in ('preview', 'cancelled')
GROUP BY DATE(due_date))
AS payments_to_receive ON (all_payment_dates.date = payments_to_receive.date)
-- Received (in payments)
LEFT JOIN (SELECT DATE(paid_date) as date, count(1) as count, sum(value) as received
FROM payment WHERE payment_type = 'in'
AND payment.status not in ('preview', 'cancelled')
GROUP BY DATE(paid_date))
AS payments_received ON (all_payment_dates.date = payments_received.date)
ORDER BY all_payment_dates.date;
"""
class PaymentFlowDay(object):
def __init__(self, store, row, previous_day=None):
"""Payment Flow History for a given date
:param row: A list of values from the payment_flow_query above
:param previous_day: The `previous_day <PaymentFlowDay>`. This is used
to calculate the expected and real balances for each day (based on the
previous dates).
"""
(date, to_pay_count, to_pay, paid_count, paid, to_receive_count,
to_receive, received_count, received) = row
self.history_date = date
# values
self.to_pay = to_pay
self.to_receive = to_receive
self.paid = paid
self.received = received
# counts
self.to_pay_payments = to_pay_count
self.to_receive_payments = to_receive_count
self.paid_payments = paid_count
self.received_payments = received_count
if previous_day:
self.previous_balance = previous_day.balance_real
else:
self.previous_balance = 0
# Today's balance is the previous day balance, plus the payments we
# received, minus what we paid. expected if for the payments we should
# have paid/received
self.balance_expected = self.previous_balance + to_receive - to_pay
self.balance_real = self.previous_balance + received - paid
self.store = store
def get_divergent_payments(self):
"""Returns a :class:`Payment` sequence that meet the following requirements:
* The payment due date, paid date or cancel date is the current
PaymentFlowHistory date.
* The payment was paid/received with different values (eg with
discount or surcharge).
* The payment was scheduled to be paid/received on the current,
but it was not.
* The payment was not expected to be paid/received on the current date.
"""
from stoqlib.domain.payment.payment import Payment
date = self.history_date
query = And(Or(Date(Payment.due_date) == date,
Date(Payment.paid_date) == date,
Date(Payment.cancel_date) == date),
Or(Eq(Payment.paid_value, None),
Payment.value != Payment.paid_value,
Eq(Payment.paid_date, None),
Date(Payment.due_date) != Date(Payment.paid_date)))
return self.store.find(Payment, query)
@classmethod
def get_flow_history(cls, store, start, end):
"""Get the payment flow history for a given date interval
This will return a list of PaymentFlowDay, one for each date that has
payments registered and are in the interval specified.
"""
history = []
previous_entry = None
for row in store.execute(payment_flow_query).get_all():
entry = cls(store, row, previous_entry)
if entry.history_date > end:
break
# We only store entries for dates higher than the user requested, but
# we still need to create the entries from the beginning, so we
# have the real balances
if entry.history_date >= start:
history.append(entry)
previous_entry = entry
return history
class PaymentFlowHistoryDialog(DateRangeDialog):
title = _(u'Payment Flow History Dialog')
desc = _("Select a date or a range to be visualised in the report:")
size = (-1, -1)
def __init__(self, store):
"""A dialog to print the PaymentFlowHistoryReport report.
:param store: a store
"""
self.store = store
DateRangeDialog.__init__(self, title=self.title, header_text=self.desc)
#
# BasicDialog
#
def confirm(self):
DateRangeDialog.confirm(self)
start = self.retval.start
end = self.retval.end
results = PaymentFlowDay.get_flow_history(self.store, start, end)
if not results:
info(_('No payment history found.'))
return False
print_report(PaymentFlowHistoryReport, payment_histories=results)
return True<|fim▁end|> | COALESCE(payments_paid.count, 0) as paid_payments,
COALESCE(payments_paid.paid, 0) as paid, |
<|file_name|>inspect.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The rkt Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"bufio"
"bytes"
"crypto/sha1"
"flag"
"fmt"
"io/ioutil"
"os"
"os/signal"
"path/filepath"
"sort"
"strconv"
"strings"
"syscall"
"time"
"unsafe"
"golang.org/x/sys/unix"
"github.com/appc/spec/pkg/device"
"github.com/coreos/rkt/common/cgroup"
"github.com/coreos/rkt/tests/testutils"
"github.com/syndtr/gocapability/capability"
)
var (
globalFlagset = flag.NewFlagSet("inspect", flag.ExitOnError)
globalFlags = struct {
ReadStdin bool
CheckTty bool
CheckPath bool
PrintExec bool
PrintMsg string
SuffixMsg string
PrintEnv string
PrintCapsPid int
PrintUser bool
PrintGroups bool
PrintCwd bool
ExitCode int
ReadFile bool
WriteFile bool
StatFile bool
HashFile bool
Sleep int
PreSleep int
PrintMemoryLimit bool
PrintCPUQuota bool
FileName string
Content string
CheckCgroupMounts bool
PrintNetNS bool
PrintIPv4 string
PrintIPv6 string
PrintDefaultGWv4 bool
PrintDefaultGWv6 bool
PrintGWv4 string
PrintGWv6 string
PrintHostname bool
GetHTTP string
ServeHTTP string
ServeHTTPTimeout int
PrintIfaceCount bool
PrintAppAnnotation string
SilentSigterm bool
CheckMountNS bool
PrintNoNewPrivs bool
CheckMknod string
}{}
)
func init() {
globalFlagset.BoolVar(&globalFlags.ReadStdin, "read-stdin", false, "Read a line from stdin")
globalFlagset.BoolVar(&globalFlags.CheckTty, "check-tty", false, "Check if stdin is a terminal")
globalFlagset.BoolVar(&globalFlags.CheckPath, "check-path", false, "Check if environment variable PATH does not contain \\n")
globalFlagset.BoolVar(&globalFlags.PrintExec, "print-exec", false, "Print the command we were execed as (i.e. argv[0])")
globalFlagset.StringVar(&globalFlags.PrintMsg, "print-msg", "", "Print the message given as parameter")
globalFlagset.StringVar(&globalFlags.SuffixMsg, "suffix-msg", "", "Print this suffix after some commands")
globalFlagset.BoolVar(&globalFlags.PrintCwd, "print-cwd", false, "Print the current working directory")
globalFlagset.StringVar(&globalFlags.PrintEnv, "print-env", "", "Print the specified environment variable")
globalFlagset.IntVar(&globalFlags.PrintCapsPid, "print-caps-pid", -1, "Print capabilities of the specified pid (or current process if pid=0)")
globalFlagset.BoolVar(&globalFlags.PrintUser, "print-user", false, "Print uid and gid")
globalFlagset.BoolVar(&globalFlags.PrintGroups, "print-groups", false, "Print all gids")
globalFlagset.IntVar(&globalFlags.ExitCode, "exit-code", 0, "Return this exit code")
globalFlagset.BoolVar(&globalFlags.ReadFile, "read-file", false, "Print the content of the file $FILE")
globalFlagset.BoolVar(&globalFlags.WriteFile, "write-file", false, "Write $CONTENT in the file $FILE")
globalFlagset.BoolVar(&globalFlags.StatFile, "stat-file", false, "Print the ownership and mode of the file $FILE")
globalFlagset.BoolVar(&globalFlags.HashFile, "hash-file", false, "Print the SHA1SUM of the file $FILE")
globalFlagset.IntVar(&globalFlags.Sleep, "sleep", -1, "Sleep before exiting (in seconds)")
globalFlagset.IntVar(&globalFlags.PreSleep, "pre-sleep", -1, "Sleep before executing (in seconds)")
globalFlagset.BoolVar(&globalFlags.PrintMemoryLimit, "print-memorylimit", false, "Print cgroup memory limit")
globalFlagset.BoolVar(&globalFlags.PrintCPUQuota, "print-cpuquota", false, "Print cgroup cpu quota in milli-cores")
globalFlagset.StringVar(&globalFlags.FileName, "file-name", "", "The file to read/write, $FILE will be ignored if this is specified")
globalFlagset.StringVar(&globalFlags.Content, "content", "", "The content to write, $CONTENT will be ignored if this is specified")
globalFlagset.BoolVar(&globalFlags.CheckCgroupMounts, "check-cgroups", false, "Try to write to the cgroup filesystem. Everything should be RO except some well-known files")
globalFlagset.BoolVar(&globalFlags.PrintNetNS, "print-netns", false, "Print the network namespace")
globalFlagset.StringVar(&globalFlags.PrintIPv4, "print-ipv4", "", "Takes an interface name and prints its IPv4")
globalFlagset.StringVar(&globalFlags.PrintIPv6, "print-ipv6", "", "Takes an interface name and prints its IPv6")
globalFlagset.BoolVar(&globalFlags.PrintDefaultGWv4, "print-defaultgwv4", false, "Print the default IPv4 gateway")
globalFlagset.BoolVar(&globalFlags.PrintDefaultGWv6, "print-defaultgwv6", false, "Print the default IPv6 gateway")
globalFlagset.StringVar(&globalFlags.PrintGWv4, "print-gwv4", "", "Takes an interface name and prints its gateway's IPv4")
globalFlagset.StringVar(&globalFlags.PrintGWv6, "print-gwv6", "", "Takes an interface name and prints its gateway's IPv6")
globalFlagset.BoolVar(&globalFlags.PrintHostname, "print-hostname", false, "Prints the pod hostname")
globalFlagset.StringVar(&globalFlags.GetHTTP, "get-http", "", "HTTP-Get from the given address")
globalFlagset.StringVar(&globalFlags.ServeHTTP, "serve-http", "", "Serve the hostname via HTTP on the given address:port")
globalFlagset.IntVar(&globalFlags.ServeHTTPTimeout, "serve-http-timeout", 30, "HTTP Timeout to wait for a client connection")
globalFlagset.BoolVar(&globalFlags.PrintIfaceCount, "print-iface-count", false, "Print the interface count")
globalFlagset.StringVar(&globalFlags.PrintAppAnnotation, "print-app-annotation", "", "Take an annotation name of the app, and prints its value")
globalFlagset.BoolVar(&globalFlags.SilentSigterm, "silent-sigterm", false, "Exit with a success exit status if we receive SIGTERM")
globalFlagset.BoolVar(&globalFlags.CheckMountNS, "check-mountns", false, "Check if app's mount ns is different than stage1's. Requires CAP_SYS_PTRACE")
globalFlagset.BoolVar(&globalFlags.PrintNoNewPrivs, "print-no-new-privs", false, "print the prctl PR_GET_NO_NEW_PRIVS value")
globalFlagset.StringVar(&globalFlags.CheckMknod, "check-mknod", "", "check whether mknod on restricted devices is allowed")
}
func in(list []int, el int) bool {
for _, x := range list {
if el == x {
return true
}
}
return false
}
func main() {
globalFlagset.Parse(os.Args[1:])
args := globalFlagset.Args()
if len(args) > 0 {
fmt.Fprintln(os.Stderr, "Wrong parameters")
os.Exit(1)
}
if globalFlags.PrintNoNewPrivs {
r1, _, err := syscall.Syscall(
syscall.SYS_PRCTL,
uintptr(unix.PR_GET_NO_NEW_PRIVS),
uintptr(0), uintptr(0),
)
fmt.Printf("no_new_privs: %v err: %v\n", r1, err)
}
if globalFlags.CheckMknod != "" {
/* format: c:5:2:name */
dev := strings.SplitN(globalFlags.CheckMknod, ":", 4)
if len(dev) < 4 {
fmt.Fprintln(os.Stderr, "Not enough parameters for mknod")
os.Exit(1)
}
typ := dev[0]
major, err := strconv.Atoi(dev[1])
if err != nil {
fmt.Fprintln(os.Stderr, "Wrong major")
os.Exit(1)
}
minor, err := strconv.Atoi(dev[2])
if err != nil {
fmt.Fprintln(os.Stderr, "Wrong minor")
os.Exit(1)
}
nodeName := dev[3]
majorMinor := device.Makedev(uint(major), uint(minor))
mode := uint32(0777)
switch typ {
case "c":
mode |= syscall.S_IFCHR
case "b":
mode |= syscall.S_IFBLK
default:
fmt.Fprintln(os.Stderr, "Wrong device node type")
os.Exit(1)
}
if err := syscall.Mknod(nodeName, mode, int(majorMinor)); err != nil {
fmt.Fprintf(os.Stderr, "mknod %s: fail: %v\n", nodeName, err)
os.Exit(1)
} else {
fmt.Printf("mknod %s: succeed\n", nodeName)
os.Exit(0)
}
}
if globalFlags.SilentSigterm {
terminateCh := make(chan os.Signal, 1)
signal.Notify(terminateCh, syscall.SIGTERM)
go func() {
<-terminateCh
os.Exit(0)
}()
}
if globalFlags.PreSleep >= 0 {
time.Sleep(time.Duration(globalFlags.PreSleep) * time.Second)
}
if globalFlags.ReadStdin {
reader := bufio.NewReader(os.Stdin)
fmt.Printf("Enter text:\n")
text, _ := reader.ReadString('\n')
fmt.Printf("Received text: %s\n", text)
}
if globalFlags.CheckTty {
fd := int(os.Stdin.Fd())
var termios syscall.Termios
_, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), syscall.TCGETS, uintptr(unsafe.Pointer(&termios)), 0, 0, 0)
if err == 0 {
fmt.Printf("stdin is a terminal\n")
} else {
fmt.Printf("stdin is not a terminal\n")
}
}
if globalFlags.CheckPath {
envBytes, err := ioutil.ReadFile("/proc/self/environ")
if err != nil {
fmt.Fprintf(os.Stderr, "Error reading environment from \"/proc/self/environ\": %v\n", err)
os.Exit(1)
}
for _, v := range bytes.Split(envBytes, []byte{0}) {
if len(v) == 0 {
continue
}
if strings.HasPrefix(string(v), "PATH=") {
if strings.Contains(string(v), "\n") {
fmt.Fprintf(os.Stderr, "Malformed PATH: found new line")
os.Exit(1)
} else {
fmt.Printf("PATH is good\n")
os.Exit(0)
}
} else {
continue
}
}
fmt.Fprintf(os.Stderr, "PATH not found")
os.Exit(1)
}
if globalFlags.PrintExec {
fmt.Fprintf(os.Stdout, "inspect execed as: %s\n", os.Args[0])
}
if globalFlags.PrintMsg != "" {
fmt.Fprintf(os.Stdout, "%s\n", globalFlags.PrintMsg)
messageLoopStr := os.Getenv("MESSAGE_LOOP")
messageLoop, err := strconv.Atoi(messageLoopStr)
if err == nil {
for i := 0; i < messageLoop; i++ {
time.Sleep(time.Second)
fmt.Fprintf(os.Stdout, "%s\n", globalFlags.PrintMsg)
}
}
}
if globalFlags.PrintEnv != "" {
fmt.Fprintf(os.Stdout, "%s=%s\n", globalFlags.PrintEnv, os.Getenv(globalFlags.PrintEnv))
}
if globalFlags.PrintCapsPid >= 0 {
caps, err := capability.NewPid(globalFlags.PrintCapsPid)
if err != nil {
fmt.Fprintf(os.Stderr, "Cannot get caps: %v\n", err)
os.Exit(1)
}
fmt.Printf("Capability set: effective: %s (%s)\n", caps.StringCap(capability.EFFECTIVE), globalFlags.SuffixMsg)
fmt.Printf("Capability set: permitted: %s (%s)\n", caps.StringCap(capability.PERMITTED), globalFlags.SuffixMsg)
fmt.Printf("Capability set: inheritable: %s (%s)\n", caps.StringCap(capability.INHERITABLE), globalFlags.SuffixMsg)
fmt.Printf("Capability set: bounding: %s (%s)\n", caps.StringCap(capability.BOUNDING), globalFlags.SuffixMsg)
if capStr := os.Getenv("CAPABILITY"); capStr != "" {
capInt, err := strconv.Atoi(capStr)
if err != nil {
fmt.Fprintf(os.Stderr, "Environment variable $CAPABILITY is not a valid capability number: %v\n", err)
os.Exit(1)
}
c := capability.Cap(capInt)
if caps.Get(capability.BOUNDING, c) {
fmt.Printf("%v=enabled (%s)\n", c.String(), globalFlags.SuffixMsg)
} else {
fmt.Printf("%v=disabled (%s)\n", c.String(), globalFlags.SuffixMsg)
}
}
}
if globalFlags.PrintUser {
fmt.Printf("User: uid=%d euid=%d gid=%d egid=%d\n", os.Getuid(), os.Geteuid(), os.Getgid(), os.Getegid())
}
if globalFlags.PrintGroups {
gids, err := os.Getgroups()
if err != nil {
fmt.Fprintf(os.Stderr, "Error getting groups: %v\n", err)
os.Exit(1)
}
// getgroups(2): It is unspecified whether the effective group ID of
// the calling process is included in the returned list. (Thus, an
// application should also call getegid(2) and add or remove the
// resulting value.)
egid := os.Getegid()
if !in(gids, egid) {
gids = append(gids, egid)
sort.Ints(gids)
}
var b bytes.Buffer
for _, gid := range gids {
b.WriteString(fmt.Sprintf("%d ", gid))
}
fmt.Printf("Groups: %s\n", b.String())
}
if globalFlags.WriteFile {
fileName := os.Getenv("FILE")
if globalFlags.FileName != "" {
fileName = globalFlags.FileName
}
content := os.Getenv("CONTENT")
if globalFlags.Content != "" {
content = globalFlags.Content
}
err := ioutil.WriteFile(fileName, []byte(content), 0600)
if err != nil {
fmt.Fprintf(os.Stderr, "Cannot write to file %q: %v\n", fileName, err)
os.Exit(1)
}
}
if globalFlags.ReadFile {
fileName := os.Getenv("FILE")
if globalFlags.FileName != "" {
fileName = globalFlags.FileName
}
dat, err := ioutil.ReadFile(fileName)
if err != nil {
fmt.Fprintf(os.Stderr, "Cannot read file %q: %v\n", fileName, err)
os.Exit(1)
}
fmt.Print("<<<")
fmt.Print(string(dat))
fmt.Print(">>>\n")
}
if globalFlags.StatFile {
fileName := os.Getenv("FILE")
if globalFlags.FileName != "" {
fileName = globalFlags.FileName
}
fi, err := os.Stat(fileName)
if err != nil {
fmt.Fprintf(os.Stderr, "Cannot stat file %q: %v\n", fileName, err)
os.Exit(1)
}
fmt.Printf("%s: mode: %s\n", fileName, fi.Mode().String())
fmt.Printf("%s: user: %v\n", fileName, fi.Sys().(*syscall.Stat_t).Uid)
fmt.Printf("%s: group: %v\n", fileName, fi.Sys().(*syscall.Stat_t).Gid)
}
if globalFlags.HashFile {
fileName := os.Getenv("FILE")
if globalFlags.FileName != "" {
fileName = globalFlags.FileName
}
dat, err := ioutil.ReadFile(fileName)
if err != nil {
fmt.Fprintf(os.Stderr, "Cannot read file %q: %v\n", fileName, err)
os.Exit(1)
}
fmt.Printf("sha1sum: %x\n", sha1.Sum(dat))
}
if globalFlags.PrintCwd {
wd, err := os.Getwd()
if err != nil {
fmt.Fprintf(os.Stderr, "Cannot get working directory: %v\n", err)
os.Exit(1)
}
fmt.Printf("cwd: %s\n", wd)
}
if globalFlags.Sleep >= 0 {
time.Sleep(time.Duration(globalFlags.Sleep) * time.Second)
}
if globalFlags.PrintMemoryLimit {
memCgroupPath, err := cgroup.GetOwnCgroupPath("memory")
if err != nil {
fmt.Fprintf(os.Stderr, "Error getting own memory cgroup path: %v\n", err)
os.Exit(1)
}
// we use /proc/1/root to escape the chroot we're in and read our
// memory limit
limitPath := filepath.Join("/proc/1/root/sys/fs/cgroup/memory", memCgroupPath, "memory.limit_in_bytes")
limit, err := ioutil.ReadFile(limitPath)
if err != nil {
fmt.Fprintf(os.Stderr, "Can't read memory.limit_in_bytes\n")
os.Exit(1)
}
fmt.Printf("Memory Limit: %s\n", string(limit))
}
if globalFlags.PrintCPUQuota {
cpuCgroupPath, err := cgroup.GetOwnCgroupPath("cpu")
if err != nil {
fmt.Fprintf(os.Stderr, "Error getting own cpu cgroup path: %v\n", err)
os.Exit(1)
}
// we use /proc/1/root to escape the chroot we're in and read our
// cpu quota
periodPath := filepath.Join("/proc/1/root/sys/fs/cgroup/cpu", cpuCgroupPath, "cpu.cfs_period_us")
periodBytes, err := ioutil.ReadFile(periodPath)
if err != nil {
fmt.Fprintf(os.Stderr, "Can't read cpu.cpu_period_us\n")
os.Exit(1)
}
quotaPath := filepath.Join("/proc/1/root/sys/fs/cgroup/cpu", cpuCgroupPath, "cpu.cfs_quota_us")
quotaBytes, err := ioutil.ReadFile(quotaPath)
if err != nil {
fmt.Fprintf(os.Stderr, "Can't read cpu.cpu_quota_us\n")
os.Exit(1)
}
period, err := strconv.Atoi(strings.Trim(string(periodBytes), "\n"))
if err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
}
quota, err := strconv.Atoi(strings.Trim(string(quotaBytes), "\n"))
if err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
}
quotaMilliCores := quota * 1000 / period
fmt.Printf("CPU Quota: %s\n", strconv.Itoa(quotaMilliCores))<|fim▁hole|> }
if globalFlags.CheckCgroupMounts {
rootCgroupPath := "/proc/1/root/sys/fs/cgroup"
testPaths := []string{rootCgroupPath}
// test a couple of controllers if they're available
if _, err := os.Stat(filepath.Join(rootCgroupPath, "memory")); err == nil {
testPaths = append(testPaths, filepath.Join(rootCgroupPath, "memory"))
}
if _, err := os.Stat(filepath.Join(rootCgroupPath, "cpu")); err == nil {
testPaths = append(testPaths, filepath.Join(rootCgroupPath, "cpu"))
}
for _, p := range testPaths {
if err := syscall.Mkdir(filepath.Join(p, "test"), 0600); err == nil || err != syscall.EROFS {
fmt.Fprintf(os.Stderr, "check-cgroups: FAIL (%v)", err)
os.Exit(1)
}
}
fmt.Println("check-cgroups: SUCCESS")
}
if globalFlags.PrintNetNS {
ns, err := os.Readlink("/proc/self/ns/net")
if err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
}
fmt.Printf("NetNS: %s\n", ns)
}
if globalFlags.PrintIPv4 != "" {
iface := globalFlags.PrintIPv4
ips, err := testutils.GetIPsv4(iface)
if err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
}
if len(ips) == 0 {
fmt.Fprintf(os.Stderr, "No IPv4 found for interface %+v:\n", iface)
os.Exit(1)
}
fmt.Printf("%v IPv4: %s\n", iface, ips[0])
}
if globalFlags.PrintDefaultGWv4 {
gw, err := testutils.GetDefaultGWv4()
if err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
}
fmt.Printf("DefaultGWv4: %s\n", gw)
}
if globalFlags.PrintDefaultGWv6 {
gw, err := testutils.GetDefaultGWv6()
if err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
}
fmt.Printf("DefaultGWv6: %s\n", gw)
}
if globalFlags.PrintGWv4 != "" {
// TODO: GetGW not implemented yet
iface := globalFlags.PrintGWv4
gw, err := testutils.GetGWv4(iface)
if err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
}
fmt.Printf("%v GWv4: %s\n", iface, gw)
}
if globalFlags.PrintIPv6 != "" {
// TODO
}
if globalFlags.PrintGWv6 != "" {
// TODO
}
if globalFlags.PrintHostname {
hostname, err := os.Hostname()
if err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
}
fmt.Printf("Hostname: %s\n", hostname)
}
if globalFlags.ServeHTTP != "" {
err := testutils.HTTPServe(globalFlags.ServeHTTP, globalFlags.ServeHTTPTimeout)
if err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
}
}
if globalFlags.GetHTTP != "" {
body, err := testutils.HTTPGet(globalFlags.GetHTTP)
if err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
}
fmt.Printf("HTTP-Get received: %s\n", body)
}
if globalFlags.PrintIfaceCount {
ifaceCount, err := testutils.GetIfaceCount()
if err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
}
fmt.Printf("Interface count: %d\n", ifaceCount)
}
if globalFlags.PrintAppAnnotation != "" {
mdsUrl, appName := os.Getenv("AC_METADATA_URL"), os.Getenv("AC_APP_NAME")
body, err := testutils.HTTPGet(fmt.Sprintf("%s/acMetadata/v1/apps/%s/annotations/%s", mdsUrl, appName, globalFlags.PrintAppAnnotation))
if err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
}
fmt.Printf("Annotation %s=%s\n", globalFlags.PrintAppAnnotation, body)
}
if globalFlags.CheckMountNS {
appMountNS, err := os.Readlink("/proc/self/ns/mnt")
if err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
}
s1MountNS, err := os.Readlink("/proc/1/ns/mnt")
if err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
}
if appMountNS != s1MountNS {
fmt.Println("check-mountns: DIFFERENT")
} else {
fmt.Println("check-mountns: IDENTICAL")
os.Exit(1)
}
}
os.Exit(globalFlags.ExitCode)
}<|fim▁end|> | |
<|file_name|>null_parameter_test.cpp<|end_file_name|><|fim▁begin|>#include <csapex/param/parameter.h>
#include <csapex/param/value_parameter.h>
#include <csapex/param/interval_parameter.h>
#include <csapex/param/string_list_parameter.h>
#include <csapex/param/null_parameter.h>
#include <csapex/param/parameter_factory.h><|fim▁hole|>#include <csapex_testing/csapex_test_case.h>
#include <unordered_map>
#include <typeindex>
using namespace csapex;
using namespace csapex::param;
class NullParameterTest : public CsApexTestCase
{
protected:
NullParameterTest()
{
}
};
namespace csapex
{
namespace param
{
namespace factory
{
// nulls must never be used intenionally, declare the factory here
ParameterBuilder declareNull(const std::string& name)
{
std::shared_ptr<NullParameter> result(new NullParameter(name, ParameterDescription()));
return ParameterBuilder(std::move(result));
}
} // namespace factory
} // namespace param
} // namespace csapex
TEST_F(NullParameterTest, HasState)
{
EXPECT_FALSE(factory::declareNull("foo").build()->hasState());
}
TEST_F(NullParameterTest, StringConversion)
{
EXPECT_STREQ("[foo: [null]]", factory::declareNull("foo").build()->toString().c_str());
}
TEST_F(NullParameterTest, GetThrows)
{
ParameterPtr p = factory::declareNull("foo").build();
EXPECT_ANY_THROW(p->as<int>());
}
TEST_F(NullParameterTest, Serialization)
{
YAML::Node node;
{
ParameterPtr p = factory::declareNull("foo");
p->serialize_yaml(node);
EXPECT_STREQ(p->name().c_str(), node["name"].as<std::string>().c_str());
EXPECT_STREQ(p->getParameterType().c_str(), node["type"].as<std::string>().c_str());
}
{
ParameterPtr p = factory::makeEmpty(node["type"].as<std::string>());
ASSERT_NE(nullptr, p);
p->deserialize_yaml(node);
}
SerializationBuffer buffer;
{
ParameterPtr p = factory::declareNull("foo");
ParameterSerializer::instance().serialize(*p, buffer);
}
{
StreamablePtr s = ParameterSerializer::instance().deserialize(buffer);
ASSERT_NE(nullptr, s);
ParameterPtr p = std::dynamic_pointer_cast<Parameter>(s);
ASSERT_NE(nullptr, p);
NullParameter::Ptr sp = std::dynamic_pointer_cast<NullParameter>(p);
ASSERT_NE(nullptr, sp);
}
}
TEST_F(NullParameterTest, BinarySerialization)
{
YAML::Node node;
{
ParameterPtr p = factory::declareNull("foo");
p->serialize_yaml(node);
EXPECT_STREQ(p->name().c_str(), node["name"].as<std::string>().c_str());
EXPECT_STREQ(p->getParameterType().c_str(), node["type"].as<std::string>().c_str());
}
{
ParameterPtr p = factory::makeEmpty(node["type"].as<std::string>());
ASSERT_NE(nullptr, p);
p->deserialize_yaml(node);
}
SerializationBuffer buffer;
{
ParameterPtr p = factory::declareNull("foo");
ParameterSerializer::instance().serialize(*p, buffer);
}
{
StreamablePtr s = ParameterSerializer::instance().deserialize(buffer);
ASSERT_NE(nullptr, s);
ParameterPtr p = std::dynamic_pointer_cast<Parameter>(s);
ASSERT_NE(nullptr, p);
NullParameter::Ptr sp = std::dynamic_pointer_cast<NullParameter>(p);
ASSERT_NE(nullptr, sp);
}
}<|fim▁end|> | #include <csapex/utility/delegate.h>
#include <csapex/serialization/parameter_serializer.h>
#include <csapex/utility/yaml.h>
|
<|file_name|>importer_test.go<|end_file_name|><|fim▁begin|>package importer
import (
"flag"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"strings"
"testing"
"github.com/anz-bank/sysl/pkg/syslutil"
"github.com/sirupsen/logrus/hooks/test"
"github.com/stretchr/testify/require"
)
var (
update = flag.Bool("update", false, "Update golden test files")
)
func TestMain(m *testing.M) {
flag.Parse()
os.Exit(m.Run())
}
<|fim▁hole|> format string
}
func runImportEqualityTests(t *testing.T, cfg testConfig) {
t.Helper()
files, err := ioutil.ReadDir(cfg.testDir)
require.NoError(t, err)
for _, f := range files {
if f.IsDir() {
continue
}
logger, _ := test.NewNullLogger()
ext := filepath.Ext(f.Name())
if strings.EqualFold(ext, cfg.testExtension) {
filename := strings.TrimSuffix(f.Name(), ext)
t.Run(fmt.Sprintf("%s-%s", cfg.name, filename), func(t *testing.T) {
t.Parallel()
syslFile := filepath.Join(cfg.testDir, filename+".sysl")
fileToImport := syslutil.MustAbsolute(t, filepath.Join(cfg.testDir, filename+cfg.testExtension))
input, err := ioutil.ReadFile(fileToImport)
require.NoError(t, err)
absFilePath, err := filepath.Abs(filepath.Join(cfg.testDir, filename+cfg.testExtension))
require.NoError(t, err)
imp, err := Factory(absFilePath, false, cfg.format, input, logger)
require.NoError(t, err)
imp.WithAppName("TestApp").WithPackage("com.example.package")
result, err := imp.LoadFile(absFilePath)
require.NoError(t, err)
if *update {
err = ioutil.WriteFile(syslFile, []byte(result), 0600)
if err != nil {
t.Error(err)
}
}
expected, err := ioutil.ReadFile(syslFile)
require.NoError(t, err)
expected = syslutil.HandleCRLF(expected)
require.NoError(t, err)
require.Equal(t, string(expected), result)
})
}
}
}
func runImportDirEqualityTests(t *testing.T, cfg testConfig) {
t.Helper()
logger, _ := test.NewNullLogger()
syslFile := filepath.Join(cfg.testDir, filepath.Base(cfg.testDir)+".sysl")
path := syslutil.MustAbsolute(t, cfg.testDir)
imp, err := Factory(path, true, cfg.format, nil, logger)
require.NoError(t, err)
out, err := imp.WithAppName("TestApp").WithPackage("com.example.package").LoadFile(path)
require.NoError(t, err)
expected, err := ioutil.ReadFile(syslFile)
require.NoError(t, err)
expected = syslutil.HandleCRLF(expected)
require.NoError(t, err)
require.Equal(t, string(expected), out)
}
func TestLoadOpenAPI2JSONFromTestFiles(t *testing.T) {
runImportEqualityTests(t, testConfig{
name: "TestLoadOpenAPI2JSONFromTestFiles",
testDir: "tests/openapi2",
testExtension: ".json",
})
}
func TestLoadOpenAPI2FromTestFiles(t *testing.T) {
runImportEqualityTests(t, testConfig{
name: "TestLoadOpenAPI2FromTestFiles",
testDir: "tests/openapi2",
testExtension: ".yaml",
})
}
func TestLoadOpenAPI3FromTestFiles(t *testing.T) {
runImportEqualityTests(t, testConfig{
name: "TestLoadOpenAPI3FromTestFiles",
testDir: "tests/openapi3",
testExtension: ".yaml",
})
}
func TestLoadXSDFromTestFiles(t *testing.T) {
runImportEqualityTests(t, testConfig{
name: "TestLoadXSDFromTestFiles",
testDir: "tests/xsd",
testExtension: ".xsd",
})
}
func TestLoadSpannerFromTestFiles(t *testing.T) {
runImportEqualityTests(t, testConfig{
name: "TestLoadSpannerFromTestFiles",
testDir: "sql/tests/spanner",
testExtension: ".sql",
format: "spannerSQL",
})
}
func TestLoadSpannerDirFromTestDir(t *testing.T) {
runImportDirEqualityTests(t, testConfig{
name: "TestLoadSpannerDirFromTestDir",
testDir: "sql/tests/spanner/migrations",
testExtension: "",
format: "spannerSQLdir",
})
}
func TestLoadPostgresqlFromTestFiles(t *testing.T) {
runImportEqualityTests(t, testConfig{
name: "TestLoadPostgresqlFromTestFiles",
testDir: "sql/tests/postgresql",
testExtension: ".sql",
format: "postgres",
})
}
func TestLoadPostgresqlDirFromTestFiles(t *testing.T) {
runImportEqualityTests(t, testConfig{
name: "TestLoadPostgresqlDirFromTestFiles",
testDir: "sql/tests/postgresql/migrations",
testExtension: "",
format: "postgresDir",
})
}
func TestLoadMySQLFromTestFiles(t *testing.T) {
runImportEqualityTests(t, testConfig{
name: "TestLoadMySQLFromTestFiles",
testDir: "sql/tests/mysql",
testExtension: ".sql",
format: "mysql",
})
}
func TestLoadMySQLDirFromTestFiles(t *testing.T) {
runImportEqualityTests(t, testConfig{
name: "TestLoadMySQLDirFromTestFiles",
testDir: "sql/tests/mysql/migrations",
testExtension: "",
format: "mysqlDir",
})
}
func TestLoadBigQueryFromTestFiles(t *testing.T) {
runImportEqualityTests(t, testConfig{
name: "TestLoadBigQueryFromTestFiles",
testDir: "sql/tests/bigquery",
testExtension: ".sql",
format: "bigquery",
})
}
/*
func TestLoadGrammarFromTestFiles(t *testing.T) {
runImportEqualityTests(t, testConfig{
name: "TestLoadGrammarFromTestFiles",
testDir: "tests-grammar",
testExtension: "g",
mode: ModeGrammar,
fn: LoadGrammar,
})
}
*/
func TestLoadAvroFromTestFiles(t *testing.T) {
runImportEqualityTests(t, testConfig{
name: "TestLoadAvroFromTestFiles",
testDir: "avro/tests",
testExtension: ".avsc",
})
}<|fim▁end|> | type testConfig struct {
name string
testDir string
testExtension string |
<|file_name|>memstore.go<|end_file_name|><|fim▁begin|>package gobuddyfs
import (
"sync"
"github.com/golang/glog"
)
type MemStore struct {
lock *sync.RWMutex
store map[string][]byte
KVStore
}
func NewMemStore() *MemStore {
return &MemStore{store: make(map[string][]byte), lock: &sync.RWMutex{}}
}
func (self *MemStore) Get(key string, retry bool) ([]byte, error) {
if glog.V(2) {
glog.Infof("Get(%s)\n", key)
}
self.lock.RLock()
defer self.lock.RUnlock()
val, ok := self.store[key]
if !ok {
return nil, nil
}
return val, nil
}
func (self *MemStore) Set(key string, value []byte) error {
if glog.V(2) {
glog.Infof("Set(%s)\n", key)
}
self.lock.Lock()<|fim▁hole|> delete(self.store, key)
} else {
self.store[key] = value
}
return nil
}
var _ KVStore = new(MemStore)<|fim▁end|> | defer self.lock.Unlock()
if value == nil {
// Implicit delete operation |
<|file_name|>functional_test.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import print_function
import logging
import os
import signal
import socket
import time
import traceback
from datetime import datetime
from multiprocessing import Process
from os.path import abspath
from os.path import dirname
from os.path import expanduser
from os.path import join
from os.path import realpath
import mock
import pyotp
import requests
import tbselenium.common as cm
from selenium import webdriver
from selenium.common.exceptions import NoAlertPresentException
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.remote.remote_connection import LOGGER
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
from sqlalchemy.exc import IntegrityError
from tbselenium.tbdriver import TorBrowserDriver
import journalist_app
import source_app
import tests.utils.env as env
from db import db
from models import Journalist
from sdconfig import config
os.environ["SECUREDROP_ENV"] = "test"
FUNCTIONAL_TEST_DIR = abspath(dirname(__file__))
LOGFILE_PATH = abspath(join(FUNCTIONAL_TEST_DIR, "firefox.log"))
FILES_DIR = abspath(join(dirname(realpath(__file__)), "../..", "tests/files"))
FIREFOX_PATH = "/usr/bin/firefox/firefox"
TBB_PATH = abspath(join(expanduser("~"), ".local/tbb/tor-browser_en-US/"))
os.environ["TBB_PATH"] = TBB_PATH
TBBRC = join(TBB_PATH, "Browser/TorBrowser/Data/Tor/torrc")
LOGGER.setLevel(logging.WARNING)
# https://stackoverflow.com/a/34795883/837471
class alert_is_not_present(object):
""" Expect an alert to not be present."""
def __call__(self, driver):
try:
alert = driver.switch_to.alert
alert.text
return False
except NoAlertPresentException:
return True
class FunctionalTest(object):
use_firefox = False
driver = None
accept_languages = None
_firefox_driver = None
_torbrowser_driver = None
gpg = None
new_totp = None
timeout = 10
secret_message = "These documents outline a major government invasion of privacy."
def _unused_port(self):
s = socket.socket()
s.bind(("127.0.0.1", 0))
port = s.getsockname()[1]
s.close()
return port
def _create_torbrowser_driver(self):
logging.info("Creating TorBrowserDriver")
log_file = open(LOGFILE_PATH, "a")
log_file.write("\n\n[%s] Running Functional Tests\n" % str(datetime.now()))
log_file.flush()
# Don't use Tor when reading from localhost, and turn off private
# browsing. We need to turn off private browsing because we won't be
# able to access the browser's cookies in private browsing mode. Since
# we use session cookies in SD anyway (in private browsing mode all
# cookies are set as session cookies), this should not affect session
# lifetime.
pref_dict = {
"network.proxy.no_proxies_on": "127.0.0.1",
"browser.privatebrowsing.autostart": False,
}
if self.accept_languages is not None:
pref_dict["intl.accept_languages"] = self.accept_languages
self._torbrowser_driver = TorBrowserDriver(
TBB_PATH, tor_cfg=cm.USE_RUNNING_TOR, pref_dict=pref_dict, tbb_logfile_path=LOGFILE_PATH
)
logging.info("Created Tor Browser driver")
def _create_firefox_driver(self, profile=None):
logging.info("Creating Firefox driver")
if profile is None:
profile = webdriver.FirefoxProfile()
if self.accept_languages is not None:
profile.set_preference("intl.accept_languages", self.accept_languages)
profile.update_preferences()
self._firefox_driver = webdriver.Firefox(
firefox_binary=FIREFOX_PATH, firefox_profile=profile
)
self._firefox_driver.set_window_position(0, 0)
self._firefox_driver.set_window_size(1024, 768)
self._firefox_driver.implicitly_wait(self.timeout)
logging.info("Created Firefox driver")
def disable_javascript(self):
self.driver.profile.set_preference("javascript.enabled", False)
def enable_javascript(self):
self.driver.profile.set_preference("javascript.enabled", True)
def switch_to_firefox_driver(self):
self.driver = self._firefox_driver
def switch_to_torbrowser_driver(self):
self.driver = self._torbrowser_driver
def setup(self, session_expiration=30):
env.create_directories()
self.gpg = env.init_gpg()
self.__context = journalist_app.create_app(config).app_context()
self.__context.push()
# Patch the two-factor verification to avoid intermittent errors
self.patcher = mock.patch("models.Journalist.verify_token")
self.mock_journalist_verify_token = self.patcher.start()
self.mock_journalist_verify_token.return_value = True
self.patcher2 = mock.patch("source_app.main.get_entropy_estimate")
self.mock_get_entropy_estimate = self.patcher2.start()
self.mock_get_entropy_estimate.return_value = 8192
signal.signal(signal.SIGUSR1, lambda _, s: traceback.print_stack(s))
env.create_directories()
db.create_all()
# Add our test user
try:
valid_password = "correct horse battery staple profanity oil chewy"
user = Journalist(username="journalist", password=valid_password, is_admin=True)
user.otp_secret = "JHCOGO7VCER3EJ4L"
db.session.add(user)
db.session.commit()
except IntegrityError:
logging.error("Test user already added")
db.session.rollback()
# This user is required for our tests cases to login
self.admin_user = {
"name": "journalist",
"password": ("correct horse battery staple" " profanity oil chewy"),
"secret": "JHCOGO7VCER3EJ4L",
}
self.admin_user["totp"] = pyotp.TOTP(self.admin_user["secret"])
source_port = self._unused_port()
journalist_port = self._unused_port()
self.source_location = "http://127.0.0.1:%d" % source_port
self.journalist_location = "http://127.0.0.1:%d" % journalist_port
# Allow custom session expiration lengths
self.session_expiration = session_expiration
self.source_app = source_app.create_app(config)
self.journalist_app = journalist_app.create_app(config)
def start_source_server(app):
config.SESSION_EXPIRATION_MINUTES = self.session_expiration
app.run(port=source_port, debug=True, use_reloader=False, threaded=True)
def start_journalist_server(app):
app.run(port=journalist_port, debug=True, use_reloader=False, threaded=True)
self.source_process = Process(target=lambda: start_source_server(self.source_app))
self.journalist_process = Process(
target=lambda: start_journalist_server(self.journalist_app)
)
self.source_process.start()
self.journalist_process.start()
for tick in range(30):
try:
requests.get(self.source_location, timeout=1)
requests.get(self.journalist_location, timeout=1)
except Exception:
time.sleep(0.5)
else:
break
self._create_torbrowser_driver()
self._create_firefox_driver()
if self.use_firefox:
self.switch_to_firefox_driver()
else:
self.switch_to_torbrowser_driver()
# Polls the DOM to wait for elements. To read more about why
# this is necessary:
#
# http://www.obeythetestinggoat.com/how-to-get-selenium-to-wait-for-page-load-after-a-click.html
#
# A value of 5 is known to not be enough in some cases, when
# the machine hosting the tests is slow, reason why it was
# raised to 10. Setting the value to 60 or more would surely
# cover even the slowest of machine. However it also means
# that a test failing to find the desired element in the DOM
# will only report failure after 60 seconds which is painful
# for quickly debuging.
#
self.driver.implicitly_wait(self.timeout)
def wait_for_source_key(self, source_name):
filesystem_id = self.source_app.crypto_util.hash_codename(source_name)
def key_available(filesystem_id):
assert self.source_app.crypto_util.getkey(filesystem_id)
self.wait_for(lambda: key_available(filesystem_id), timeout=60)
def teardown(self):
if self._torbrowser_driver:
self._torbrowser_driver.quit()
if self._firefox_driver:
self._firefox_driver.quit()
self.patcher.stop()
env.teardown()
self.source_process.terminate()
self.journalist_process.terminate()
self.__context.pop()
def create_new_totp(self, secret):
self.new_totp = pyotp.TOTP(secret)<|fim▁hole|>
def wait_for(self, function_with_assertion, timeout=None):
"""Polling wait for an arbitrary assertion."""
# Thanks to
# http://chimera.labs.oreilly.com/books/1234000000754/ch20.html#_a_common_selenium_problem_race_conditions
if timeout is None:
timeout = self.timeout
start_time = time.time()
while time.time() - start_time < timeout:
try:
return function_with_assertion()
except (AssertionError, WebDriverException):
time.sleep(0.1)
# one more try, which will raise any errors if they are outstanding
return function_with_assertion()
def safe_click_by_id(self, element_id):
WebDriverWait(self.driver, self.timeout).until(
expected_conditions.element_to_be_clickable((By.ID, element_id))
)
el = self.wait_for(lambda: self.driver.find_element_by_id(element_id))
el.location_once_scrolled_into_view
ActionChains(self.driver).move_to_element(el).click().perform()
def safe_click_by_css_selector(self, selector):
WebDriverWait(self.driver, self.timeout).until(
expected_conditions.element_to_be_clickable((By.CSS_SELECTOR, selector))
)
el = self.wait_for(lambda: self.driver.find_element_by_css_selector(selector))
el.location_once_scrolled_into_view
ActionChains(self.driver).move_to_element(el).click().perform()
def safe_click_all_by_css_selector(self, selector, root=None):
if root is None:
root = self.driver
els = self.wait_for(lambda: root.find_elements_by_css_selector(selector))
for el in els:
el.location_once_scrolled_into_view
self.wait_for(lambda: el.is_enabled() and el.is_displayed())
ActionChains(self.driver).move_to_element(el).click().perform()
def _alert_wait(self, timeout=None):
if timeout is None:
timeout = self.timeout
WebDriverWait(self.driver, timeout).until(
expected_conditions.alert_is_present(), "Timed out waiting for confirmation popup."
)
def _alert_accept(self):
self.driver.switch_to.alert.accept()
WebDriverWait(self.driver, self.timeout).until(
alert_is_not_present(), "Timed out waiting for confirmation popup to disappear."
)
def _alert_dismiss(self):
self.driver.switch_to.alert.dismiss()
WebDriverWait(self.driver, self.timeout).until(
alert_is_not_present(), "Timed out waiting for confirmation popup to disappear."
)<|fim▁end|> | |
<|file_name|>disk.rs<|end_file_name|><|fim▁begin|>#![feature(plugin, custom_derive, custom_attribute)]
#![plugin(serde_macros)]
extern crate drum;
extern crate serde;
use drum::*;
use std::io::*;
use std::collections::*;
use std::fs::{OpenOptions};
#[derive(PartialEq, Ord, Eq, PartialOrd, Serialize, Deserialize)]
enum Value {
Array(Vec<Value>),
Object(BTreeMap<Value, Value>),
String(String),
Number(i64)
}
<|fim▁hole|> let msg = "Hello World";
let file =
try!(OpenOptions::new()
.read(true)
.write(true)
.create(true)
.append(true)
.open("test.db"));
let mut store = try!(Store::reopen(file));
for key in store.keys() {
println!("{}", key)
}
let previous = try!(store.get(&String::from(msg)));
try!(store.insert(
String::from(msg),
Value::Array(vec![Value::Number(100)]))
);
match previous {
Some(Value::Array(vec)) => {
match vec[0] {
Value::Number(num) => {
println!("previous: {}", num);
},
_ => panic!()
}
},
_ => ()
}
Ok(())
}
fn main() {
run().unwrap();
return;
}<|fim▁end|> | fn run() -> Result<()> { |
<|file_name|>tray_settings.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "ash/system/settings/tray_settings.h"
#include "ash/shell.h"
#include "ash/system/power/power_status_view.h"
#include "ash/system/tray/system_tray_delegate.h"
#include "ash/system/tray/tray_constants.h"
#include "ash/system/tray/tray_views.h"
#include "base/logging.h"
#include "base/utf_string_conversions.h"
#include "grit/ash_resources.h"
#include "grit/ash_strings.h"
#include "third_party/skia/include/core/SkColor.h"
#include "ui/base/accessibility/accessible_view_state.h"
#include "ui/base/resource/resource_bundle.h"
#include "ui/gfx/image/image.h"
#include "ui/views/controls/image_view.h"
#include "ui/views/controls/label.h"
#include "ui/views/layout/box_layout.h"
#include "ui/views/layout/fill_layout.h"
#include "ui/views/view.h"
namespace ash {
namespace internal {
namespace tray {
class SettingsDefaultView : public ash::internal::ActionableView {
public:
explicit SettingsDefaultView(user::LoginStatus status)
: login_status_(status),
label_(NULL),
power_status_view_(NULL) {
SetLayoutManager(new views::BoxLayout(views::BoxLayout::kHorizontal,
ash::kTrayPopupPaddingHorizontal, 0,
ash::kTrayPopupPaddingBetweenItems));
bool power_view_right_align = false;
if (login_status_ != user::LOGGED_IN_NONE &&
login_status_ != user::LOGGED_IN_LOCKED) {
ui::ResourceBundle& rb = ui::ResourceBundle::GetSharedInstance();
views::ImageView* icon =
new ash::internal::FixedSizedImageView(0, ash::kTrayPopupItemHeight);
icon->SetImage(
rb.GetImageNamed(IDR_AURA_UBER_TRAY_SETTINGS).ToImageSkia());
AddChildView(icon);
string16 text = rb.GetLocalizedString(IDS_ASH_STATUS_TRAY_SETTINGS);
label_ = new views::Label(text);
AddChildView(label_);
SetAccessibleName(text);
power_view_right_align = true;
}
PowerSupplyStatus power_status =
ash::Shell::GetInstance()->tray_delegate()->GetPowerSupplyStatus();
if (power_status.battery_is_present) {
power_status_view_ = new ash::internal::PowerStatusView(
ash::internal::PowerStatusView::VIEW_DEFAULT, power_view_right_align);
AddChildView(power_status_view_);
UpdatePowerStatus(power_status);
}
}
virtual ~SettingsDefaultView() {}
void UpdatePowerStatus(const PowerSupplyStatus& status) {
if (power_status_view_)
power_status_view_->UpdatePowerStatus(status);
}
// Overridden from ash::internal::ActionableView.
virtual bool PerformAction(const ui::Event& event) OVERRIDE {
if (login_status_ == user::LOGGED_IN_NONE ||
login_status_ == user::LOGGED_IN_LOCKED)
return false;
ash::Shell::GetInstance()->tray_delegate()->ShowSettings();
return true;
}
// Overridden from views::View.
virtual void Layout() OVERRIDE {
views::View::Layout();
if (label_ && power_status_view_) {
// Let the box-layout do the layout first. Then move power_status_view_
// to right align if it is created.
gfx::Size size = power_status_view_->GetPreferredSize();
gfx::Rect bounds(size);
bounds.set_x(width() - size.width() - ash::kTrayPopupPaddingBetweenItems);
bounds.set_y((height() - size.height()) / 2);
power_status_view_->SetBoundsRect(bounds);
}
}
<|fim▁hole|> // Overridden from views::View.
virtual void ChildPreferredSizeChanged(views::View* child) OVERRIDE {
views::View::ChildPreferredSizeChanged(child);
Layout();
}
private:
user::LoginStatus login_status_;
views::Label* label_;
ash::internal::PowerStatusView* power_status_view_;
DISALLOW_COPY_AND_ASSIGN(SettingsDefaultView);
};
} // namespace tray
TraySettings::TraySettings(SystemTray* system_tray)
: SystemTrayItem(system_tray),
default_view_(NULL) {
}
TraySettings::~TraySettings() {}
views::View* TraySettings::CreateTrayView(user::LoginStatus status) {
return NULL;
}
views::View* TraySettings::CreateDefaultView(user::LoginStatus status) {
if ((status == user::LOGGED_IN_NONE || status == user::LOGGED_IN_LOCKED) &&
(!ash::Shell::GetInstance()->tray_delegate()->
GetPowerSupplyStatus().battery_is_present))
return NULL;
CHECK(default_view_ == NULL);
default_view_ = new tray::SettingsDefaultView(status);
return default_view_;
}
views::View* TraySettings::CreateDetailedView(user::LoginStatus status) {
NOTIMPLEMENTED();
return NULL;
}
void TraySettings::DestroyTrayView() {
}
void TraySettings::DestroyDefaultView() {
default_view_ = NULL;
}
void TraySettings::DestroyDetailedView() {
}
void TraySettings::UpdateAfterLoginStatusChange(user::LoginStatus status) {
}
// Overridden from PowerStatusObserver.
void TraySettings::OnPowerStatusChanged(const PowerSupplyStatus& status) {
if (default_view_)
default_view_->UpdatePowerStatus(status);
}
} // namespace internal
} // namespace ash<|fim▁end|> | |
<|file_name|>code_interpreter.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8; Mode: Python; indent-tabs-mode: nil; tab-width: 4 -*-<|fim▁hole|>@file code_interpreter.py
@package pybooster.code_interpreter
@version 2019.07.14
@author Devyn Collier Johnson <[email protected]>
@copyright LGPLv3
@section LICENSE
GNU Lesser General Public License v3
Copyright (c) Devyn Collier Johnson, All rights reserved.
This software is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this software.
"""
from subprocess import getoutput # nosec
from sys import stdout
__all__: list = [
# CLISP #
r'execclispfile',
# COFFEESCRIPT #
r'execcoffeescript',
# JAVASCRIPT #
r'execjs',
r'execjsfile',
# LUA #
r'execlua',
r'execluafile',
# PERL #
r'execperl',
r'execperlfile',
r'initperl',
# PHP #
r'execphp',
r'execphpfile',
# RUBY #
r'execruby',
r'execrubyfile',
# SCALA #
r'execscala',
r'execscala',
# SHELL #
r'execsh',
r'execshfile',
r'initsh'
]
# CLISP #
def execclispfile(_filename: str) -> str:
"""Execute a CLisp file given as a str and return the output as a str."""
return getoutput(r'clisp ' + _filename)
# COFFEESCRIPT #
def execcoffeescript(_code: str) -> str:
"""Execute Coffeescript code given as a str and return the output as a str."""
return getoutput('coffeescript --eval \'' + _code.replace('\'', '\\\'') + '\'')
# JAVASCRIPT #
def execjs(_code: str) -> str:
"""Execute JavaScript code given as a str and return the output as a str."""
return getoutput('jsc -e \'' + _code.replace('\'', '\\\'') + '\'')
def execjsfile(_filename: str) -> str:
"""Execute a JavaScript file given as a str and return the output as a str."""
return getoutput(r'jsc -e ' + _filename)
# LUA #
def execlua(_code: str) -> str:
"""Execute Lua code given as a str and return the output as a str."""
return getoutput('lua -e \'' + _code.replace('\'', '\\\'') + '\'')
def execluafile(_filename: str) -> str:
"""Execute a Lua script given as a str and return the output as a str."""
return getoutput(r'lua ' + _filename)
# PERL #
def execperl(_code: str) -> str:
"""Execute Perl code given as a str and return the output as a str."""
return getoutput('perl -e \'' + _code.replace('\'', '\\\'') + '\'')
def execperlfile(_filename: str) -> str:
"""Execute a Perl script given as a str and return the output as a str."""
return getoutput(r'perl ' + _filename)
def initperl() -> None:
"""Run a Perl REP-Loop (Read-Evaluate-Print-Loop)."""
_input: str = r''
while 1:
_input = input(r'Perl > ').replace('\'', '\\\'') # nosec
if _input in {r'exit', r'quit'}:
break
stdout.write(getoutput('perl -e \'' + _input + '\'') + '\n')
# PHP #
def execphp(_code: str) -> str:
"""Execute PHP code given as a str and return the output as a str."""
return getoutput('php -r \'' + _code.replace('\'', '\\\'') + '\'')
def execphpfile(_filename: str) -> str:
"""Execute a PHP script given as a str and return the output as a str."""
return getoutput(r'php -f ' + _filename)
# RUBY #
def execruby(_code: str) -> str:
"""Execute Ruby code given as a str and return the output as a str."""
return getoutput('ruby -e \'' + _code.replace('\'', '\\\'') + '\'')
def execrubyfile(_filename: str) -> str:
"""Execute a Ruby script given as a str and return the output as a str."""
return getoutput(r'ruby ' + _filename)
# SCALA #
def execscala(_code: str) -> str:
"""Execute Scala code given as a str and return the output as a str."""
return getoutput('scala -e \'' + _code.replace('\'', '\\\'') + '\'')
def execscalafile(_filename: str) -> str:
"""Execute a Scala file given as a str and return the output as a str."""
return getoutput(r'scala ' + _filename)
# SHELL #
def execsh(_code: str) -> str:
"""Execute Shell code given as a str and return the output as a str."""
return getoutput('sh -c \'' + _code.replace('\'', '\\\'') + '\'')
def execshfile(_filename: str) -> str:
"""Execute a Shell script given as a str and return the output as a str."""
return getoutput(r'sh ' + _filename)
def initsh() -> None:
"""Run a shell REP-Loop (Read-Evaluate-Print-Loop)."""
_input: str = r''
while 1:
_input = input(r'Shell: $ ').replace('\'', '\\\'') # nosec
if _input in {r'exit', r'quit'}:
break
stdout.write(getoutput('sh -c \'' + _input + '\'') + '\n')<|fim▁end|> | # vim: set fileencoding=utf-8 filetype=python syntax=python.doxygen fileformat=unix tabstop=4 expandtab :
# kate: encoding utf-8; bom off; syntax python; indent-mode python; eol unix; replace-tabs off; indent-width 4; tab-width 4; remove-trailing-space on;
"""@brief Interpret various computer languages using installed interpreters.
|
<|file_name|>avi_role.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Created on Aug 25, 2016
# @author: Gaurav Rastogi ([email protected])
# Eric Anderson ([email protected])
# module_check: supported
# Avi Version: 17.1.1
#
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify<|fim▁hole|># it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_role
author: Gaurav Rastogi ([email protected])
short_description: Module for setup of Role Avi RESTful Object
description:
- This module is used to configure Role object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.3"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent", "present"]
avi_api_update_method:
description:
- Default method for object update is HTTP PUT.
- Setting to patch will override that behavior to use HTTP PATCH.
version_added: "2.5"
default: put
choices: ["put", "patch"]
avi_api_patch_op:
description:
- Patch operation to use when using avi_api_update_method as patch.
version_added: "2.5"
choices: ["add", "replace", "delete"]
name:
description:
- Name of the object.
required: true
privileges:
description:
- List of permission.
tenant_ref:
description:
- It is a reference to an object of type tenant.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Unique object identifier of the object.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Example to create Role object
avi_role:
controller: 10.10.25.42
username: admin
password: something
state: present
name: sample_role
"""
RETURN = '''
obj:
description: Role (api/role) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.network.avi.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
avi_api_update_method=dict(default='put',
choices=['put', 'patch']),
avi_api_patch_op=dict(choices=['add', 'replace', 'delete']),
name=dict(type='str', required=True),
privileges=dict(type='list',),
tenant_ref=dict(type='str',),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'role',
set([]))
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>tables.py<|end_file_name|><|fim▁begin|># Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.core.urlresolvers import reverse
from django.template import defaultfilters<|fim▁hole|>
from openstack_horizon import api
from openstack_horizon.dashboards.identity.groups import constants
LOG = logging.getLogger(__name__)
LOGOUT_URL = 'logout'
STATUS_CHOICES = (
("true", True),
("false", False)
)
class CreateGroupLink(tables.LinkAction):
name = "create"
verbose_name = _("Create Group")
url = constants.GROUPS_CREATE_URL
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("identity", "identity:create_group"),)
def allowed(self, request, group):
return api.keystone.keystone_can_edit_group()
class EditGroupLink(tables.LinkAction):
name = "edit"
verbose_name = _("Edit Group")
url = constants.GROUPS_UPDATE_URL
classes = ("ajax-modal",)
icon = "pencil"
policy_rules = (("identity", "identity:update_group"),)
def allowed(self, request, group):
return api.keystone.keystone_can_edit_group()
class DeleteGroupsAction(tables.DeleteAction):
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete Group",
u"Delete Groups",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Deleted Group",
u"Deleted Groups",
count
)
name = "delete"
policy_rules = (("identity", "identity:delete_group"),)
def allowed(self, request, datum):
return api.keystone.keystone_can_edit_group()
def delete(self, request, obj_id):
LOG.info('Deleting group "%s".' % obj_id)
api.keystone.group_delete(request, obj_id)
class ManageUsersLink(tables.LinkAction):
name = "users"
verbose_name = _("Modify Users")
url = constants.GROUPS_MANAGE_URL
icon = "pencil"
policy_rules = (("identity", "identity:get_group"),
("identity", "identity:list_users"),)
def allowed(self, request, datum):
return api.keystone.keystone_can_edit_group()
class GroupFilterAction(tables.FilterAction):
def filter(self, table, groups, filter_string):
"""Naive case-insensitive search."""
q = filter_string.lower()
def comp(group):
if q in group.name.lower():
return True
return False
return filter(comp, groups)
class GroupsTable(tables.DataTable):
name = tables.Column('name', verbose_name=_('Name'))
description = tables.Column(lambda obj: getattr(obj, 'description', None),
verbose_name=_('Description'))
id = tables.Column('id', verbose_name=_('Group ID'))
class Meta:
name = "groups"
verbose_name = _("Groups")
row_actions = (ManageUsersLink, EditGroupLink, DeleteGroupsAction)
table_actions = (GroupFilterAction, CreateGroupLink,
DeleteGroupsAction)
class UserFilterAction(tables.FilterAction):
def filter(self, table, users, filter_string):
"""Naive case-insensitive search."""
q = filter_string.lower()
return [user for user in users
if q in user.name.lower()
or q in getattr(user, 'email', '').lower()]
class RemoveMembers(tables.DeleteAction):
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Remove User",
u"Remove Users",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Removed User",
u"Removed Users",
count
)
name = "removeGroupMember"
policy_rules = (("identity", "identity:remove_user_from_group"),)
def allowed(self, request, user=None):
return api.keystone.keystone_can_edit_group()
def action(self, request, obj_id):
user_obj = self.table.get_object_by_id(obj_id)
group_id = self.table.kwargs['group_id']
LOG.info('Removing user %s from group %s.' % (user_obj.id,
group_id))
api.keystone.remove_group_user(request,
group_id=group_id,
user_id=user_obj.id)
# TODO(lin-hua-cheng): Fix the bug when removing current user
# Keystone revokes the token of the user removed from the group.
# If the logon user was removed, redirect the user to logout.
class AddMembersLink(tables.LinkAction):
name = "add_user_link"
verbose_name = _("Add...")
classes = ("ajax-modal",)
icon = "plus"
url = constants.GROUPS_ADD_MEMBER_URL
policy_rules = (("identity", "identity:list_users"),
("identity", "identity:add_user_to_group"),)
def allowed(self, request, user=None):
return api.keystone.keystone_can_edit_group()
def get_link_url(self, datum=None):
return reverse(self.url, kwargs=self.table.kwargs)
class UsersTable(tables.DataTable):
name = tables.Column('name', verbose_name=_('User Name'))
email = tables.Column('email', verbose_name=_('Email'),
filters=[defaultfilters.escape,
defaultfilters.urlize])
id = tables.Column('id', verbose_name=_('User ID'))
enabled = tables.Column('enabled', verbose_name=_('Enabled'),
status=True,
status_choices=STATUS_CHOICES,
empty_value="False")
class GroupMembersTable(UsersTable):
class Meta:
name = "group_members"
verbose_name = _("Group Members")
table_actions = (UserFilterAction, AddMembersLink, RemoveMembers)
class AddMembers(tables.BatchAction):
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Add User",
u"Add Users",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Added User",
u"Added Users",
count
)
name = "addMember"
icon = "plus"
requires_input = True
success_url = constants.GROUPS_MANAGE_URL
policy_rules = (("identity", "identity:add_user_to_group"),)
def allowed(self, request, user=None):
return api.keystone.keystone_can_edit_group()
def action(self, request, obj_id):
user_obj = self.table.get_object_by_id(obj_id)
group_id = self.table.kwargs['group_id']
LOG.info('Adding user %s to group %s.' % (user_obj.id,
group_id))
api.keystone.add_group_user(request,
group_id=group_id,
user_id=user_obj.id)
# TODO(lin-hua-cheng): Fix the bug when adding current user
# Keystone revokes the token of the user added to the group.
# If the logon user was added, redirect the user to logout.
def get_success_url(self, request=None):
group_id = self.table.kwargs.get('group_id', None)
return reverse(self.success_url, args=[group_id])
class GroupNonMembersTable(UsersTable):
class Meta:
name = "group_non_members"
verbose_name = _("Non-Members")
table_actions = (UserFilterAction, AddMembers)<|fim▁end|> | from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from horizon_lib import tables |
<|file_name|>grpc_test.go<|end_file_name|><|fim▁begin|>package grpc
import (
"net"
"testing"
"github.com/asim/go-micro/v3/transport"
)
func expectedPort(t *testing.T, expected string, lsn transport.Listener) {
_, port, err := net.SplitHostPort(lsn.Addr())
if err != nil {
t.Errorf("Expected address to be `%s`, got error: %v", expected, err)
}
if port != expected {
lsn.Close()
t.Errorf("Expected address to be `%s`, got `%s`", expected, port)
}
}
func TestGRPCTransportPortRange(t *testing.T) {
tp := NewTransport()
lsn1, err := tp.Listen(":44444-44448")
if err != nil {
t.Errorf("Did not expect an error, got %s", err)
}
expectedPort(t, "44444", lsn1)
lsn2, err := tp.Listen(":44444-44448")
if err != nil {
t.Errorf("Did not expect an error, got %s", err)
}
expectedPort(t, "44445", lsn2)
lsn, err := tp.Listen(":0")
if err != nil {
t.Errorf("Did not expect an error, got %s", err)
}
lsn.Close()
lsn1.Close()
lsn2.Close()
}
func TestGRPCTransportCommunication(t *testing.T) {
tr := NewTransport()
l, err := tr.Listen(":0")
if err != nil {
t.Errorf("Unexpected listen err: %v", err)
}
defer l.Close()
fn := func(sock transport.Socket) {
defer sock.Close()
for {
var m transport.Message
if err := sock.Recv(&m); err != nil {
return
}
if err := sock.Send(&m); err != nil {
return
}
}
}
done := make(chan bool)
go func() {
if err := l.Accept(fn); err != nil {
select {
case <-done:
default:
t.Errorf("Unexpected accept err: %v", err)
}
}
}()
c, err := tr.Dial(l.Addr())
if err != nil {
t.Errorf("Unexpected dial err: %v", err)
}<|fim▁hole|> defer c.Close()
m := transport.Message{
Header: map[string]string{
"X-Content-Type": "application/json",
},
Body: []byte(`{"message": "Hello World"}`),
}
if err := c.Send(&m); err != nil {
t.Errorf("Unexpected send err: %v", err)
}
var rm transport.Message
if err := c.Recv(&rm); err != nil {
t.Errorf("Unexpected recv err: %v", err)
}
if string(rm.Body) != string(m.Body) {
t.Errorf("Expected %v, got %v", m.Body, rm.Body)
}
close(done)
}<|fim▁end|> | |
<|file_name|>load-ili-kinds.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
import sqlite3, sys
# It takes one argument: the name of the new database
if (len(sys.argv) < 1):
sys.stderr.write('You need to give the name of the ILI DB\n')
sys.exit(1)
else:
dbfile = sys.argv[1]
################################################################
# CONNECT TO DB<|fim▁hole|>con = sqlite3.connect(dbfile)
c = con.cursor()
################################################################
# USER
################################################################
u = "ili_load-kinds.py"
################################################################
# INSERT POS DATA (CODES AND NAMES)
################################################################
c.execute("""INSERT INTO kind (id, kind, u)
VALUES (?,?,?)""", [1,'concept',u])
c.execute("""INSERT INTO kind (id, kind, u)
VALUES (?,?,?)""", [2,'instance',u])
con.commit()
con.close()
sys.stderr.write('Loaded KIND data in (%s)\n' % (dbfile))<|fim▁end|> | ################################################################ |
<|file_name|>chickenEggs.js<|end_file_name|><|fim▁begin|>import UnitConverter from '../../../UnitConverter'
import { Fraction, mul, div } from '../../../../numbers'
import { Mass } from '../../domains'
import { UnitedStates } from '../../authorities'
import { OunceConverter } from '../../us/customary/mass'
import { ChickenEggs } from '../../systems'
import { Jumbo, VeryLarge, Large, Medium, Small, Peewee } from '../../units'
const jumboScalar = new Fraction(5, 2)
export const JumboConverter =
new UnitConverter(
Mass,
UnitedStates,
ChickenEggs,
Jumbo,
OunceConverter,
value => mul(value, jumboScalar),
value => div(value, jumboScalar))
const veryLargeScalar = new Fraction(9, 4)
export const VeryLargeConverter =
new UnitConverter(
Mass,
UnitedStates,
ChickenEggs,
VeryLarge,
OunceConverter,
value => mul(value, veryLargeScalar),
value => div(value, veryLargeScalar))
const largeScalar = 2
export const LargeConverter =<|fim▁hole|> Large,
OunceConverter,
value => mul(value, largeScalar),
value => div(value, largeScalar))
const mediumScalar = new Fraction(7, 4)
export const MediumConverter =
new UnitConverter(
Mass,
UnitedStates,
ChickenEggs,
Medium,
OunceConverter,
value => mul(value, mediumScalar),
value => div(value, mediumScalar))
const smallScalar = new Fraction(3, 2)
export const SmallConverter =
new UnitConverter(
Mass,
UnitedStates,
ChickenEggs,
Small,
OunceConverter,
value => mul(value, smallScalar),
value => div(value, smallScalar))
const peeweeScalar = new Fraction(5, 4);
export const PeeweeConverter =
new UnitConverter(
Mass,
UnitedStates,
ChickenEggs,
Peewee,
OunceConverter,
value => mul(value, peeweeScalar),
value => div(value, peeweeScalar))
export function collectUnitConverters() {
return [
JumboConverter,
VeryLargeConverter,
LargeConverter,
MediumConverter,
SmallConverter,
PeeweeConverter
]
}<|fim▁end|> | new UnitConverter(
Mass,
UnitedStates,
ChickenEggs, |
<|file_name|>kaiseki.js<|end_file_name|><|fim▁begin|>/*!
* Kaiseki
* Copyright(c) 2012 BJ Basañes / Shiki ([email protected])
* MIT Licensed
*
* See the README.md file for documentation.
*/
var request = require('request');
var _ = require('underscore');
var Kaiseki = function(options) {
if (!_.isObject(options)) {
// Original signature
this.applicationId = arguments[0];
this.restAPIKey = arguments[1];
this.masterKey = null;
this.sessionToken = arguments[2] || null;
this.request = request;
this.baseURL = 'https://api.parse.com';
} else {
// New interface to allow masterKey and custom request function
options = options || {};
this.applicationId = options.applicationId;
this.restAPIKey = options.restAPIKey;
this.masterKey = options.masterKey || null;
this.sessionToken = options.sessionToken || null;
this.request = options.request || request;
this.baseURL = options.serverURL;
}
};
Kaiseki.prototype = {
applicationId: null,
restAPIKey: null,
masterKey: null, // required for deleting files
sessionToken: null,
createUser: function(data, callback) {
this._jsonRequest({
method: 'POST',
url: '/1/users',
params: data,
callback: function(err, res, body, success) {
if (!err && success)
body = _.extend({}, data, body);
callback(err, res, body, success);
}
});
},
getUser: function(objectId, params, callback) {
this._jsonRequest({
url: '/1/users/' + objectId,
params: _.isFunction(params) ? null : params,
callback: _.isFunction(params) ? params : callback
});
},
// Also used for validating a session token
// https://parse.com/docs/rest#users-validating
getCurrentUser: function(sessionToken, callback) {
if (_.isFunction(sessionToken)) {
callback = sessionToken;
sessionToken = undefined;
}
this._jsonRequest({
url: '/1/users/me',
sessionToken: sessionToken,
callback: callback
});
},
loginFacebookUser: function(facebookAuthData, callback) {
this._socialLogin({facebook: facebookAuthData}, callback);
},
loginTwitterUser: function(twitterAuthData, callback) {
this._socialLogin({twitter: twitterAuthData}, callback);
},
loginUser: function(username, password, callback) {
this._jsonRequest({
url: '/1/login',
params: {
username: username,
password: password
},
callback: callback
});
},
updateUser: function(objectId, data, callback) {
this._jsonRequest({
method: 'PUT',
url: '/1/users/' + objectId,
params: data,
callback: callback
});
},
deleteUser: function(objectId, callback) {
this._jsonRequest({
method: 'DELETE',
url: '/1/users/' + objectId,
callback: callback
});
},
getUsers: function(params, callback) {
this._jsonRequest({
url: '/1/users',
params: _.isFunction(params) ? null : params,
callback: _.isFunction(params) ? params : callback
});
},
requestPasswordReset: function(email, callback) {
this._jsonRequest({
method: 'POST',
url: '/1/requestPasswordReset',
params: {'email': email},
callback: callback
});
},
createObjects: function(className, data, callback) {
var requests = [];
for (var i = 0; i < data.length; i++) {
requests.push({
'method': 'POST',
'path': '/1/classes/' + className,
'body': data[i]
});
}
this._jsonRequest({
method: 'POST',
url: '/1/batch/',
params: {
requests: requests
},
callback: function(err, res, body, success) {
if (!err && success)
body = _.extend({}, data, body);
callback(err, res, body, success);
}
});
},
createObject: function(className, data, callback) {
this._jsonRequest({
method: 'POST',
url: '/1/classes/' + className,
params: data,
callback: function(err, res, body, success) {
if (!err && success)
body = _.extend({}, data, body);
callback(err, res, body, success);
}
});
},
getObject: function(className, objectId, params, callback) {
this._jsonRequest({
url: '/1/classes/' + className + '/' + objectId,
params: _.isFunction(params) ? null : params,
callback: _.isFunction(params) ? params : callback
});
},
updateObjects: function(className, updates, callback) {
var requests = [],
update = null;
for (var i = 0; i < updates.length; i++) {
update = updates[i];
requests.push({
'method': 'PUT',
'path': '/1/classes/' + className + '/' + update.objectId,
'body': update.data
});
}
this._jsonRequest({
method: 'POST',
url: '/1/batch/',
params: {
requests: requests
},
callback: callback
});
},
updateObject: function(className, objectId, data, callback) {
this._jsonRequest({
method: 'PUT',
url: '/1/classes/' + className + '/' + objectId,
params: data,
callback: callback
});
},
deleteObject: function(className, objectId, callback) {
this._jsonRequest({
method: 'DELETE',
url: '/1/classes/' + className + '/' + objectId,
callback: callback
});
},
getObjects: function(className, params, callback) {
this._jsonRequest({
url: '/1/classes/' + className,
params: _.isFunction(params) ? null : params,
callback: _.isFunction(params) ? params : callback
});
},
countObjects: function(className, params, callback) {
var paramsMod = params;
if (_.isFunction(params)) {
paramsMod = {};
paramsMod['count'] = 1;
paramsMod['limit'] = 0;
} else {
paramsMod['count'] = 1;
paramsMod['limit'] = 0;
}
this._jsonRequest({
url: '/1/classes/' + className,
params: paramsMod,
callback: _.isFunction(params) ? params : callback
});
},
createRole: function(data, callback) {
this._jsonRequest({
method: 'POST',
url: '/1/roles',
params: data,
callback: function(err, res, body, success) {
if (!err && success)
body = _.extend({}, data, body);
callback(err, res, body, success);
}
});
},
getRole: function(objectId, params, callback) {
this._jsonRequest({
url: '/1/roles/' + objectId,
params: _.isFunction(params) ? null : params,
callback: _.isFunction(params) ? params : callback
});
},
updateRole: function(objectId, data, callback) {
this._jsonRequest({
method: 'PUT',
url: '/1/roles/' + objectId,
params: data,
callback: callback
});
},
deleteRole: function(objectId, callback) {
this._jsonRequest({
method: 'DELETE',
url: '/1/roles/' + objectId,
callback: callback
});
},
getRoles: function(params, callback) {
this._jsonRequest({
url: '/1/roles',
params: _.isFunction(params) ? null : params,
callback: _.isFunction(params) ? params : callback
});
},
uploadFile: function(filePath, fileName, callback) {
if (_.isFunction(fileName)) {
callback = fileName;
fileName = null;
}
var contentType = require('mime').lookup(filePath);
if (!fileName)
fileName = filePath.replace(/^.*[\\\/]/, ''); // http://stackoverflow.com/a/423385/246142
var buffer = require('fs').readFileSync(filePath);
this.uploadFileBuffer(buffer, contentType, fileName, callback);
},
uploadFileBuffer: function(buffer, contentType, fileName, callback) {
this._jsonRequest({
method: 'POST',
url: '/1/files/' + fileName,
body: buffer,
headers: { 'Content-type': contentType },
callback: callback
});
},
deleteFile: function(name, callback) {
this._jsonRequest({
method: 'DELETE',
url: '/1/files/' + name,
callback: callback
});
},
sendPushNotification: function(data, callback) {
this._jsonRequest({
method: 'POST',
url: '/1/push',
params: data,
callback: function(err, res, body, success) {
if (!err && success)
body = _.extend({}, data, body);
callback.apply(this, arguments);
}
});
},
sendAnalyticsEvent: function(eventName, dimensionsOrCallback, callback) {
this._jsonRequest({
method: 'POST',
url: '/1/events/' + eventName,
params: _.isFunction(dimensionsOrCallback) ? {} : dimensionsOrCallback,
callback: _.isFunction(dimensionsOrCallback) ? dimensionsOrCallback : callback
});
},
stringifyParamValues: function(params) {
if (!params || _.isEmpty(params))
return null;
var values = _(params).map(function(value, key) {
if (_.isObject(value) || _.isArray(value))
return JSON.stringify(value);
else
return value;
});
var keys = _(params).keys();
var ret = {};<|fim▁hole|> return ret;
},
_socialLogin: function(authData, callback) {
this._jsonRequest({
method: 'POST',
url: '/1/users',
params: {
authData: authData
},
callback: callback
});
},
_jsonRequest: function(opts) {
var sessionToken = opts.sessionToken || this.sessionToken;
opts = _.omit(opts, 'sessionToken');
opts = _.extend({
method: 'GET',
url: null,
params: null,
body: null,
headers: null,
callback: null
}, opts);
var reqOpts = {
method: opts.method,
headers: {
'X-Parse-Application-Id': this.applicationId,
'X-Parse-REST-API-Key': this.restAPIKey
}
};
if (sessionToken)
reqOpts.headers['X-Parse-Session-Token'] = sessionToken;
if (this.masterKey)
reqOpts.headers['X-Parse-Master-Key'] = this.masterKey;
if (opts.headers)
_.extend(reqOpts.headers, opts.headers);
if (opts.params) {
if (opts.method == 'GET')
opts.params = this.stringifyParamValues(opts.params);
var key = 'qs';
if (opts.method === 'POST' || opts.method === 'PUT')
key = 'json';
reqOpts[key] = opts.params;
} else if (opts.body) {
reqOpts.body = opts.body;
}
this.request(this.baseURL + opts.url, reqOpts, function(err, res, body) {
var isCountRequest = opts.params && !_.isUndefined(opts.params['count']) && !!opts.params.count;
var success = !err && res && (res.statusCode === 200 || res.statusCode === 201);
if (res && res.headers['content-type'] &&
res.headers['content-type'].toLowerCase().indexOf('application/json') >= 0) {
if (body != null && !_.isObject(body) && !_.isArray(body)) // just in case it's been parsed already
body = JSON.parse(body);
if (body != null) {
if (body.error) {
success = false;
} else if (body.results && _.isArray(body.results) && !isCountRequest) {
// If this is a "count" request. Don't touch the body/result.
body = body.results;
}
}
}
opts.callback(err, res, body, success);
});
}
};
module.exports = Kaiseki;<|fim▁end|> | for (var i = 0; i < keys.length; i++)
ret[keys[i]] = values[i]; |
<|file_name|>AtomPub10.java<|end_file_name|><|fim▁begin|>/*
Copyright 2019 Nationale-Nederlanden
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
<|fim▁hole|> distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package nl.nn.adapterframework.extensions.cmis.servlets;
import nl.nn.adapterframework.lifecycle.IbisInitializer;
@IbisInitializer
public class AtomPub10 extends AtomPubServletBase {
private static final long serialVersionUID = 1L;
@Override
public String getUrlMapping() {
return "/cmis/atompub10/*";
}
@Override
protected String getCmisVersionStr() {
return "1.0";
}
}<|fim▁end|> | Unless required by applicable law or agreed to in writing, software |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>from rest_framework import serializers
def serializer_factory(model, serializer_class=serializers.ModelSerializer, attrs=None, meta=None):
"""
Generate a simple serializer for the given model class.
:param model: Model class
:param serializer_class: Serializer base class
:param attrs: Serializer class attrs
:param meta: Serializer Meta class attrs
:return: a Serializer class
"""<|fim▁hole|> return type(str("%sSerializer" % model.__name__), (serializer_class,), attrs)<|fim▁end|> | attrs = attrs or {}
meta = meta or {}
meta.setdefault("model", model)
attrs.setdefault("Meta", type(str("Meta"), (object,), meta)) |
<|file_name|>JaxRsClientProxyFactorySupportTest.java<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2015 the original author or authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.jmnarloch.spring.jaxrs.client.support;
import org.junit.Before;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import javax.ws.rs.ext.Provider;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
/**
* Tests the {@link JaxRsClientProxyFactorySupport} class.
*
* @author Jakub Narloch
*/
public class JaxRsClientProxyFactorySupportTest {
/**
* The instance of the tested class.
*/
private JaxRsClientProxyFactorySupport instance;
/**
* Sets up the test environment.
*
* @throws Exception if any error occurs
*/
@Before
public void setUp() throws Exception {
instance = new MockJaxRsClientProxyFactorySupport();
}
@Test
public void shouldRetrieveProviders() {
// given
final List<JaxRsClientConfigurer> configurers = Arrays.asList(
mock(JaxRsClientConfigurer.class),
mock(JaxRsClientConfigurer.class)
);
for(JaxRsClientConfigurer conf : configurers) {
doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
((ProviderRegistry)invocation.getArguments()[0]).addProvider(SimpleProvider.class);
return null;
}
}).when(conf).registerProviders(any(ProviderRegistry.class));
}
instance.setConfigurers(configurers);
// when
Class<?>[] providers = instance.getProviders();
// then<|fim▁hole|> assertNotNull(providers);
assertEquals(2, providers.length);
}
private static class MockJaxRsClientProxyFactorySupport extends JaxRsClientProxyFactorySupport {
@Override
public <T> T createClientProxy(Class<T> serviceClass, String serviceUrl) {
return null;
}
}
/**
* A simple provider class used for testing.
*
* @author Jakub Narloch
*/
@Provider
private static class SimpleProvider {
}
}<|fim▁end|> | |
<|file_name|>coinched.rs<|end_file_name|><|fim▁begin|>extern crate coinched;
extern crate clap;
extern crate env_logger;
#[macro_use]
extern crate log;
use std::str::FromStr;
use clap::{Arg, App};
fn main() {
env_logger::init().unwrap();
let matches = App::new("coinched")
.version(env!("CARGO_PKG_VERSION"))
.author("Alexandre Bury <[email protected]>")
.about("A coinche server")
.arg(Arg::with_name("PORT")
.help("Port to listen to (defaults to 3000)")
.short("p")
.long("port")
.takes_value(true))
.get_matches();<|fim▁hole|> Ok(port) => port,
Err(err) => {
println!("Invalid port: `{}` ({})", port, err);
std::process::exit(1);
}
}
} else {
3000
};
let server = coinched::server::http::Server::new(port);
server.run();
}<|fim▁end|> |
let port = if let Some(port) = matches.value_of("PORT") {
match u16::from_str(port) { |
<|file_name|>DayFlagsTest.java<|end_file_name|><|fim▁begin|>package com.getroadmap.r2rlib.models;
import org.junit.Test;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
/**
* Created by jan on 28/08/2017.
* test dayflag bitwise and operator
*/
public class DayFlagsTest {
@Test
public void isSunday() throws Exception {
DayFlags dayFlags = new DayFlags(DayFlags.Companion.getSUNDAY());
assertThat(dayFlags.isDay(DayFlags.Companion.getSUNDAY()), is(true));
}
@Test
public void isWeekday() throws Exception {
DayFlags dayFlags = new DayFlags(DayFlags.Companion.getWEEKDAYS());
assertThat(dayFlags.isDay(DayFlags.Companion.getFRIDAY()), is(true));
}
@Test
public void isNotWeekday() throws Exception {
DayFlags dayFlags = new DayFlags(DayFlags.Companion.getWEEKDAYS());
assertThat(dayFlags.isDay(DayFlags.Companion.getSUNDAY()), is(false));
}
@Test
public void isWeekend() throws Exception {
DayFlags dayFlags = new DayFlags(DayFlags.Companion.getWEEKENDS());
assertThat(dayFlags.isDay(DayFlags.Companion.getSATURDAY()), is(true));
}
@Test
public void isNotWeekend() throws Exception {
DayFlags dayFlags = new DayFlags(DayFlags.Companion.getWEEKENDS());
assertThat(dayFlags.isDay(DayFlags.Companion.getTHURSDAY()), is(false));
}
@Test
public void isAlways() throws Exception {
DayFlags dayFlags = new DayFlags(DayFlags.Companion.getEVERYDAY());
<|fim▁hole|>
@Test
public void isAlwaysWeekend() throws Exception {
DayFlags dayFlags = new DayFlags(DayFlags.Companion.getEVERYDAY());
assertThat(dayFlags.isDay(DayFlags.Companion.getWEEKENDS()), is(true));
}
@Test
public void isNever() throws Exception {
DayFlags dayFlags = new DayFlags(DayFlags.Companion.getNEVER());
assertThat(dayFlags.isDay(DayFlags.Companion.getSATURDAY()), is(false));
}
}<|fim▁end|> | assertThat(dayFlags.isDay(DayFlags.Companion.getSATURDAY()), is(true));
} |
<|file_name|>test_behave.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python -tt
from behave import *
import os
import subprocess
import glob
import re
import shutil
DNF_FLAGS = ['-y', '--disablerepo=*', '--nogpgcheck']
RPM_INSTALL_FLAGS = ['-Uvh']
RPM_ERASE_FLAGS = ['-e']
def _left_decorator(item):
""" Removed packages """
return u'-' + item
def _right_decorator(item):
""" Installed packages """
return u'+' + item
def find_pkg(pkg):
""" Find the package file in the repository """
candidates = glob.glob('/repo/' + pkg + '*.rpm')
if len(candidates) == 0:
print("No candidates for: '{0}'".format(pkg))
assert len(candidates) == 1
return candidates[0]
def decorate_rpm_packages(pkgs):
""" Converts package names like TestA, TestB into absolute paths """
return [find_pkg(p) for p in pkgs]
def get_rpm_package_list():
""" Gets all installed packages in the system """
pkgstr = subprocess.check_output(['rpm', '-qa', '--queryformat', '%{NAME}\n'])
return pkgstr.splitlines()
def get_rpm_package_version_list():
""" Gets all installed packages in the system with version"""
pkgverstr = subprocess.check_output(['rpm', '-qa', '--queryformat', '%{NAME}-%{VERSION}-%{RELEASE}\n'])
return pkgverstr.splitlines()
def get_dnf_package_version_list():
""" Gets all installed packages in the system with version to check that dnf has same data about installed packages"""
pkgverstr = subprocess.check_output(['dnf', 'repoquery', '--installed', '-Cq', '--queryformat', '%{name}.%{version}.%{release}\n'])
pkgverstr = pkgverstr.splitlines()
return pkgverstr
def diff_package_lists(a, b):
""" Computes both left/right diff between lists `a` and `b` """
sa, sb = set(a), set(b)
return (map(_left_decorator, list(sa - sb)),
map(_right_decorator, list(sb - sa)))
def package_version_lists(pkg, list_ver):
""" Select package versions """
found_pkgs = [x for x in list_ver if x.startswith(pkg)]
assert len(found_pkgs) == 1
return str(found_pkgs[0])
def package_absence(pkg, list_ver):
""" Select package versions """
found_pkgs = [x for x in list_ver if re.search('^' + pkg, x)]
assert len(found_pkgs) == 0
return None
def execute_dnf_command(cmd, reponame):
""" Execute DNF command with default flags and the specified `reponame` enabled """
flags = DNF_FLAGS + ['--enablerepo={0}'.format(reponame)]
return subprocess.check_call(['dnf'] + flags + cmd, stdout=subprocess.PIPE)
def execute_rpm_command(pkg, action):
""" Execute given action over specified pkg(s) """
if not isinstance(pkg, list):
pkg = [pkg]
if action == "remove":
rpm_command = RPM_ERASE_FLAGS
elif action == "install":
rpm_command = RPM_INSTALL_FLAGS
pkg = decorate_rpm_packages(pkg)
return subprocess.check_call(['rpm'] + rpm_command + pkg, stdout=subprocess.PIPE)
def piecewise_compare(a, b):
""" Check if the two sequences are identical regardless of ordering """
return sorted(a) == sorted(b)
def split(pkgs):
return [p.strip() for p in pkgs.split(',')]
@given('I use the repository "{repo}"')
def given_repo_condition(context, repo):
""" :type context: behave.runner.Context """
assert repo<|fim▁hole|> assert os.path.exists('/var/www/html/repo/' + repo)
for root, dirs, files in os.walk('/repo'):
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
subprocess.check_call(['cp -rs /var/www/html/repo/' + repo + '/* /repo/'], shell=True)
with open('/etc/yum.repos.d/' + repo + '.repo', 'w') as f:
f.write('[' + repo + ']\nname=' + repo + '\nbaseurl=http://127.0.0.1/repo/' + repo + '\nenabled=1\ngpgcheck=0')
@when('I "{action}" a package "{pkgs}" with "{manager}"')
def when_action_package(context, action, pkgs, manager):
assert pkgs
context.pre_rpm_packages = get_rpm_package_list()
assert context.pre_rpm_packages
context.pre_rpm_packages_version = get_rpm_package_version_list()
assert context.pre_rpm_packages_version
context.pre_dnf_packages_version = get_dnf_package_version_list()
assert context.pre_dnf_packages_version
if manager == 'rpm':
if action in ["install", "remove"]:
execute_rpm_command(split(pkgs), action)
else:
raise AssertionError('The action {} is not allowed parameter with rpm manager'.format(action))
elif manager == 'dnf':
if action == 'upgrade':
if pkgs == 'all':
execute_dnf_command([action], context.repo)
else:
execute_dnf_command([action] + split(pkgs), context.repo)
elif action == 'autoremove':
subprocess.check_call(['dnf', '-y', action], stdout=subprocess.PIPE)
elif action in ["install", "remove", "downgrade", "upgrade-to"]:
execute_dnf_command([action] + split(pkgs), context.repo)
else:
raise AssertionError('The action {} is not allowed parameter with dnf manager'.format(action))
else:
raise AssertionError('The manager {} is not allowed parameter'.format(manager))
@when('I execute command "{command}" with "{result}"')
def when_action_command(context, command, result):
assert command
context.pre_rpm_packages = get_rpm_package_list()
assert context.pre_rpm_packages
context.pre_rpm_packages_version = get_rpm_package_version_list()
assert context.pre_rpm_packages_version
context.pre_dnf_packages_version = get_dnf_package_version_list()
assert context.pre_dnf_packages_version
cmd_output = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
context.cmd_rc = cmd_output.returncode
if result == "success":
assert context.cmd_rc == 0
elif result == "fail":
assert context.cmd_rc != 0
else:
raise AssertionError('The option {} is not allowed option for expected result of command. '
'Allowed options are "success" and "fail"'.format(result))
@then('package "{pkgs}" should be "{state}"')
def then_package_state(context, pkgs, state):
assert pkgs
pkgs_rpm = get_rpm_package_list()
pkgs_rpm_ver = get_rpm_package_version_list()
pkgs_dnf_ver = get_dnf_package_version_list()
assert pkgs_rpm
assert context.pre_rpm_packages
removed, installed = diff_package_lists(context.pre_rpm_packages, pkgs_rpm)
assert removed is not None and installed is not None
for n in split(pkgs):
if state == 'installed':
assert ('+' + n) in installed
installed.remove('+' + n)
post_rpm_present = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_present
post_dnf_present = package_version_lists(n, pkgs_dnf_ver)
assert post_dnf_present
elif state == 'removed':
assert ('-' + n) in removed
removed.remove('-' + n)
post_rpm_absence = package_absence(n, pkgs_rpm_ver)
assert not post_rpm_absence
post_dnf_absence = package_absence(n, pkgs_dnf_ver)
assert not post_dnf_absence
elif state == 'absent':
assert ('+' + n) not in installed
assert ('-' + n) not in removed
post_rpm_absence = package_absence(n, pkgs_rpm_ver)
assert not post_rpm_absence
post_dnf_absence = package_absence(n, pkgs_dnf_ver)
assert not post_dnf_absence
elif state == 'upgraded':
pre_rpm_ver = package_version_lists(n, context.pre_rpm_packages_version)
post_rpm_ver = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_ver
assert pre_rpm_ver
assert post_rpm_ver > pre_rpm_ver
elif state == 'unupgraded':
pre_rpm_ver = package_version_lists(n, context.pre_rpm_packages_version)
post_rpm_ver = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_ver
assert pre_rpm_ver
assert post_rpm_ver == pre_rpm_ver
elif state == 'downgraded':
pre_rpm_ver = package_version_lists(n, context.pre_rpm_packages_version)
post_rpm_ver = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_ver
assert pre_rpm_ver
assert post_rpm_ver < pre_rpm_ver
elif state == 'present':
assert ('+' + n) not in installed
assert ('-' + n) not in removed
post_rpm_present = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_present
post_dnf_present = package_version_lists(n, pkgs_dnf_ver)
assert post_dnf_present
elif state == 'upgraded-to':
assert n in package_version_lists(n, pkgs_rpm_ver)
else:
raise AssertionError('The state {} is not allowed option for Then statement'.format(state))
""" This checks that installations/removals are always fully specified,
so that we always cover the requirements/expecations entirely """
if state in ["installed", "removed"]:
assert not installed and not removed
@then('exit code of command should be equal to "{exit_code}"')
def then_package_state(context, exit_code):
exit_code = int(exit_code)
assert context.cmd_rc == exit_code<|fim▁end|> | context.repo = repo |
<|file_name|>GenerateFeaturesXmlMojoTest.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.felix.karaf.tooling.features;<|fim▁hole|>import org.easymock.EasyMock;
import static org.easymock.EasyMock.*;
import junit.framework.TestCase;
/**
* Test cases for {@link GenerateFeaturesXmlMojo}
*/
public class GenerateFeaturesXmlMojoTest extends TestCase {
public void testToString() throws Exception {
Artifact artifact = EasyMock.createMock(Artifact.class);
expect(artifact.getGroupId()).andReturn("org.apache.felix.karaf.test");
expect(artifact.getArtifactId()).andReturn("test-artifact");
expect(artifact.getVersion()).andReturn("1.2.3");
replay(artifact);
assertEquals("org.apache.felix.karaf.test/test-artifact/1.2.3", GenerateFeaturesXmlMojo.toString(artifact));
}
}<|fim▁end|> |
import org.apache.maven.artifact.Artifact; |
<|file_name|>test_lws_logger.py<|end_file_name|><|fim▁begin|>"""Test cases for JSON lws_logger module, assumes Pytest."""
<|fim▁hole|>from jsonutils.lws import lws_logger
class TestDictToTreeHelpers:
"""Test the helper functions for dict_to_tree."""
def test_flatten_list(self):
"""Test flattening of nested lists."""
f = lws_logger.flatten_list
nested = [1, [2, 3, [[4], 5]]]
assert list(f(nested)) == [1, 2, 3, 4, 5]
nested = [[[1]]]
assert list(f(nested)) == [1]
flat = [1, 2]
assert list(f(flat)) == [1, 2]
def test_filter_errors(self):
"""Test error filtering (helper function to filter_keys)."""
f = lws_logger.filter_errors
errors = {'key': 99,
'key_str': 'key error',
'val': -99,
'val_str': 'val error'}
seq = [100, 99, 99, 99]
assert f(seq, errors) == [100]
seq = [99]
assert f(seq, errors) == ['key error']
seq = [-99, -99, 100]
assert f(seq, errors) == [100]
seq = [-99, -99]
assert f(seq, errors) == ['val error']
def test_filter_errors_single(self):
"""Test list error term filtering, single error."""
f = lws_logger.filter_keys
errors = {'key': 99,
'key_str': 'key error',
'val': -99,
'val_str': 'val error'}
pairs = [('a', 'hi'), ('a', 99), ('b', 'hi')]
filtered = [('a', 'hi'), ('b', 'hi')]
assert f(pairs, errors) == filtered
def test_filter_errors_multiple(self):
"""Test list error term filtering, multiple errors."""
f = lws_logger.filter_keys
errors = {'key': 99,
'key_str': 'key error',
'val': -99,
'val_str': 'val error'}
pairs = [('a', 'hi'), ('a', 99), ('a', 99),
('b', 'hi'), ('b', -99)]
filtered = [('a', 'hi'), ('b', 'hi')]
assert f(pairs, errors) == filtered
def test_filter_errors_only(self):
"""Test list error term filtering, only errors."""
f = lws_logger.filter_keys
errors = {'key': 99,
'key_str': 'key error',
'val': -99,
'val_str': 'val error'}
pairs = [('a', 99), ('b', -99)]
filtered = [('a', 'key error'), ('b', 'val error')]
assert f(pairs, errors) == filtered
class TestLoggerHelpers:
"""Test the helper functions for logger."""
def test_dict_to_tree_simple(self):
"""Test dict_to_tree simple dicts."""
f = lws_logger.dict_to_tree
simple_d = {'root': ['a', 'b']}
flat_list = [('root', 0), [('a', 1)], [('b', 1)]]
assert f(simple_d, 'root', [('root', 0)]) == flat_list
nested_d = {'root': ['a', 'b'], 'a': ['one', 'two']}
nested_list = [('root', 0), [('a', 1), [('one', 2)], [('two', 2)]],
[('b', 1)]]
assert f(nested_d, 'root', [('root', 0)]) == nested_list
def test_parse_errors_one(self):
"""Test scenario with one type of error."""
f = lws_logger.parse_errors
errors = {'key_str': 'key error',
'val_str': 'val error'}
nodes = [('one', 'key error'), ('two', 3), ('three', 3)]
output = 'Key Errors:\t1\nValue Errors:\t0'
assert f(nodes, errors) == (1, 0, output)
def test_parse_errors_both(self):
"""Test scenario with two types of errors."""
f = lws_logger.parse_errors
errors = {'key_str': 'key error',
'val_str': 'val error'}
nodes = [('one', 'key error'), ('two', 3), ('three', 3),
('four', 'val error')]
output = 'Key Errors:\t1\nValue Errors:\t1'
assert f(nodes, errors) == (1, 1, output)
def test_format_node(self):
"""Test node to string function."""
f = lws_logger.format_node
assert f('a', '----', 1) == '|----a'
assert f('a', '----', 2) == ' |----a'<|fim▁end|> | |
<|file_name|>LiteralExpressionTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.dmn.api.definition.model;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.workbench.common.dmn.api.definition.HasTypeRef;
import org.kie.workbench.common.dmn.api.property.dmn.Description;
import org.kie.workbench.common.dmn.api.property.dmn.ExpressionLanguage;
import org.kie.workbench.common.dmn.api.property.dmn.Id;
import org.kie.workbench.common.dmn.api.property.dmn.Text;
import org.kie.workbench.common.dmn.api.property.dmn.types.BuiltInType;
import org.mockito.runners.MockitoJUnitRunner;
import static java.util.Collections.singletonList;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
@RunWith(MockitoJUnitRunner.class)
public class LiteralExpressionTest {
private static final String LITERAL_ID = "LITERAL-ID";
private static final String DESCRIPTION = "DESCRIPTION";
private static final String TEXT = "TEXT";
private static final String EXPRESSION_LANGUAGE = "EXPRESSION-LANGUAGE";
private LiteralExpression literalExpression;
@Before
public void setup() {
this.literalExpression = new LiteralExpression();
}
@Test
public void testGetHasTypeRefs() {
final java.util.List<HasTypeRef> actualHasTypeRefs = literalExpression.getHasTypeRefs();
final java.util.List<HasTypeRef> expectedHasTypeRefs = singletonList(literalExpression);
assertEquals(expectedHasTypeRefs, actualHasTypeRefs);
}
@Test
public void testComponentWidths() {
assertEquals(literalExpression.getRequiredComponentWidthCount(),
literalExpression.getComponentWidths().size());
literalExpression.getComponentWidths().forEach(Assert::assertNull);
}
@Test
public void testCopy() {
final LiteralExpression source = new LiteralExpression(
new Id(LITERAL_ID),
new Description(DESCRIPTION),
BuiltInType.BOOLEAN.asQName(),
new Text(TEXT),
null,
new ExpressionLanguage(EXPRESSION_LANGUAGE)
);
final LiteralExpression target = source.copy();
assertNotNull(target);
assertNotEquals(LITERAL_ID, target.getId());
assertEquals(DESCRIPTION, target.getDescription().getValue());
assertEquals(BuiltInType.BOOLEAN.asQName(), target.getTypeRef());
assertEquals(TEXT, target.getText().getValue());<|fim▁hole|><|fim▁end|> | assertNull(target.getImportedValues());
assertEquals(EXPRESSION_LANGUAGE, target.getExpressionLanguage().getValue());
}
} |
<|file_name|>events_receiver.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright (C) 2018-2019 Matthias Klumpp <[email protected]>
#
# Licensed under the GNU Lesser General Public License Version 3
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the license, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
import os
import zmq
import json
import logging as log
from zmq.eventloop import ioloop, zmqstream
from laniakea.msgstream import verify_event_message, event_message_is_valid_and_signed
class EventsReceiver:
'''
Lighthouse module handling event stream submissions,
registering them and publishing them to the world.
'''
def __init__(self, endpoint, pub_queue):
from glob import glob
from laniakea.localconfig import LocalConfig
from laniakea.msgstream import keyfile_read_verify_key
self._socket = None
self._ctx = zmq.Context.instance()
self._pub_queue = pub_queue
self._endpoint = endpoint
self._trusted_keys = {}
# TODO: Implement auto-reloading of valid keys list if directory changes
for keyfname in glob(os.path.join(LocalConfig().trusted_curve_keys_dir, '*')):
signer_id, verify_key = keyfile_read_verify_key(keyfname)
if signer_id and verify_key:
self._trusted_keys[signer_id] = verify_key
def _event_message_received(self, socket, msg):
data = str(msg[1], 'utf-8', 'replace')
try:
event = json.loads(data)
except json.JSONDecodeError as e:
# we ignore invalid requests<|fim▁hole|> return
# check if the message is actually valid and can be processed
if not event_message_is_valid_and_signed(event):
# we currently just silently ignore invalid submissions
return
signatures = event.get('signatures')
signature_checked = False
for signer in signatures.keys():
key = self._trusted_keys.get(signer)
if not key:
continue
try:
verify_event_message(signer, event, key, assume_valid=True)
except Exception as e:
log.info('Invalid signature on event ({}): {}'.format(str(e), str(event)))
return
# if we are here, we verified a signature without issues, which means
# the message is legit and we can sign it ourselves and publish it
signature_checked = True
if not signature_checked:
log.info('Unable to verify signature on event: {}'.format(str(event)))
return
# now publish the event to the world
self._pub_queue.put([bytes(event['tag'], 'utf-8'),
bytes(data, 'utf-8')])
def run(self):
if self._socket:
log.warning('Tried to run an already running event receiver again.')
return
self._socket = self._ctx.socket(zmq.ROUTER)
self._socket.bind(self._endpoint)
server_stream = zmqstream.ZMQStream(self._socket)
server_stream.on_recv_stream(self._event_message_received)
ioloop.IOLoop.instance().start()<|fim▁end|> | log.info('Received invalid JSON message from sender: %s (%s)', data if len(data) > 1 else msg, str(e)) |
<|file_name|>alert.ts<|end_file_name|><|fim▁begin|>import { Component } from '@angular/core';
import { NavController, AlertController } from 'ionic-angular';
@Component({
templateUrl: 'build/pages/alert/alert.html',
})
export class AlertPage {
user: string = 'nome';
constructor(public navCtrl: NavController, public alertCtrl: AlertController) { }
showAlert() {
let alert = this.alertCtrl.create({
title: 'Alert Test',
message: "Digite seu nome",
inputs: [
{
name: 'nome',
placeholder: 'Seu nome'
}
],
buttons: [
{
text: 'Cancelar'
},
{
text: 'Ok',
handler: (data) => {
this.user = data.nome;
}
}
]
});
<|fim▁hole|> }
}<|fim▁end|> | alert.present(); |
<|file_name|>AuiTabs.java<|end_file_name|><|fim▁begin|>package com.atlassian.pageobjects.components.aui;
import com.atlassian.pageobjects.PageBinder;
import com.atlassian.pageobjects.binder.Init;
import com.atlassian.pageobjects.binder.InvalidPageStateException;
import com.atlassian.pageobjects.components.TabbedComponent;
import com.atlassian.pageobjects.elements.PageElement;
import com.atlassian.pageobjects.elements.PageElementFinder;
import com.atlassian.pageobjects.elements.query.Poller;
import org.openqa.selenium.By;
import javax.inject.Inject;
import java.util.List;
/**
* Represents a tabbed content area created via AUI.
*
* This is an example of a reusable components.
*/
public class AuiTabs implements TabbedComponent
{
@Inject
protected PageBinder pageBinder;
@Inject
protected PageElementFinder elementFinder;
private final By rootLocator;
private PageElement rootElement;
public AuiTabs(By locator)
{
this.rootLocator = locator;
}
@Init
public void initialize()
{
this.rootElement = elementFinder.find(rootLocator);
}
public PageElement selectedTab()
{
List<PageElement> items = rootElement.find(By.className("tabs-menu")).findAll(By.tagName("li"));
for(int i = 0; i < items.size(); i++)
{
PageElement tab = items.get(i);
if(tab.hasClass("active-tab"))
{
return tab;
}
}
throw new InvalidPageStateException("A tab must be active.", this);
}
public PageElement selectedView()
{
List<PageElement> panes = rootElement.findAll(By.className("tabs-pane"));
for(int i = 0; i < panes.size(); i++)
{
PageElement pane = panes.get(i);
if(pane.hasClass("active-pane"))
{
return pane;
}
}
throw new InvalidPageStateException("A pane must be active", this);
}
public List<PageElement> tabs()
{
return rootElement.find(By.className("tabs-menu")).findAll(By.tagName("li"));
}
public PageElement openTab(String tabText)
{
List<PageElement> tabs = rootElement.find(By.className("tabs-menu")).findAll(By.tagName("a"));
for(int i = 0; i < tabs.size(); i++)
{
if(tabs.get(i).getText().equals(tabText))
{
PageElement listItem = tabs.get(i);
listItem.click();
// find the pane and wait until it has class "active-pane"<|fim▁hole|> String tabViewClassName = tabViewHref.substring(tabViewHref.indexOf('#') + 1);
PageElement pane = rootElement.find(By.id(tabViewClassName));
Poller.waitUntilTrue(pane.timed().hasClass("active-pane"));
return pane;
}
}
throw new InvalidPageStateException("Tab not found", this);
}
public PageElement openTab(PageElement tab)
{
String tabIdentifier = tab.getText();
return openTab(tabIdentifier);
}
}<|fim▁end|> | String tabViewHref = listItem.getAttribute("href"); |
<|file_name|>ListMainInfo.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react'
const invariant = require('invariant');
import {ListMainInfoLeft} from "./ListMainInfoLeft";
import {ListMainInfoDescription} from "./ListMainInfoDescription";
import {ListAdditionalInfo} from "./ListAdditionalInfo";
import {ListAdditionalInfoStacked} from "./ListAdditionalInfoStacked";
interface ItemsDto {
left?: React.ReactElement<any>
description?: React.ReactElement<any>
additional: React.ReactElement<any>[]
}
export class ListMainInfo extends React.Component<any, any> {
static get Left(): typeof ListMainInfoLeft {
return ListMainInfoLeft;
}
static get Description(): typeof ListMainInfoDescription {
return ListMainInfoDescription;
}
static get Additional(): typeof ListAdditionalInfo {
return ListAdditionalInfo;
}
static get AdditionalStacked(): typeof ListAdditionalInfoStacked {
return ListAdditionalInfoStacked;
}
private findItems(children: React.ReactNode): ItemsDto {
let left = null;
let description = null;
let additional = [];
const allChildren = React.Children.toArray(children);
allChildren.forEach(child => {
if(React.isValidElement(child)) {
switch (child.type) {
case ListMainInfoLeft:
invariant(left == null, 'Only one Left element allowed.');
left = child;
break;
case ListMainInfoDescription:
invariant(description == null, 'Only one Description element allowed.');
description = child;
break;
case ListAdditionalInfo:
additional.push(child);
break;<|fim▁hole|> break;
default:
invariant(false, 'Only Left, Description and Additional elements allowed as child for ListMainInfo')
}
}
});
return {left, description, additional}
}
render(): React.ReactElement<any> {
const items = this.findItems(this.props.children);
return <div className="list-view-pf-main-info">
{items.left}
<div className="list-view-pf-body">
{items.description}
<div className="list-view-pf-additional-info">
{items.additional}
</div>
</div>
</div>
}
}<|fim▁end|> | case ListAdditionalInfoStacked:
additional.push(child); |
<|file_name|>agent.go<|end_file_name|><|fim▁begin|>/*
gopm (Go Package Manager)
Copyright (c) 2012 cailei ([email protected])
The MIT License (MIT)
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
package main
import (
"fmt"
"github.com/cailei/gopm_index/gopm/index"
"github.com/hailiang/gosocks"
"io"
"io/ioutil"
"log"
"net/http"
"net/url"
"os"
)
type Agent struct {
client *http.Client
}
func newAgent() *Agent {
client := http.DefaultClient
// check if using a proxy
proxy_addr := os.Getenv("GOPM_PROXY")
if proxy_addr != "" {
fmt.Printf("NOTE: Using socks5 proxy: %v\n", proxy_addr)
proxy := socks.DialSocksProxy(socks.SOCKS5, proxy_addr)
transport := &http.Transport{Dial: proxy}
client = &http.Client{Transport: transport}
}
return &Agent{client}
}
func (agent *Agent) getFullIndexReader() io.Reader {
request := remote_db_host + "/all"
return agent._get_body_reader(request)
}
func (agent *Agent) uploadPackage(meta index.PackageMeta) {
request := fmt.Sprintf("%v/publish", remote_db_host)
// marshal PackageMeta to json
json, err := meta.ToJson()
if err != nil {
log.Fatalln(err)
}
// create a POST request
response, err := http.PostForm(request, url.Values{"pkg": {string(json)}})
if err != nil {
log.Fatalln(err)
}
body, err := ioutil.ReadAll(response.Body)
defer response.Body.Close()
if err != nil {
log.Fatalln(err)
}
if len(body) > 0 {
fmt.Println(string(body))
}
// check response
if response.StatusCode != 200 {
log.Fatalln(response.Status)<|fim▁hole|>}
func (agent *Agent) _get_body_reader(request string) io.ReadCloser {
// GET the index content
response, err := agent.client.Get(request)
if err != nil {
log.Fatalln(err)
}
// check response
if response.StatusCode != 200 {
body, err := ioutil.ReadAll(response.Body)
if err != nil {
log.Fatalln(err)
}
if len(body) > 0 {
fmt.Println(string(body))
}
log.Fatalln(response.Status)
}
return response.Body
}<|fim▁end|> | } |
<|file_name|>Status.java<|end_file_name|><|fim▁begin|>package org.myrobotlab.framework;
import static org.myrobotlab.framework.StatusLevel.DEBUG;
import static org.myrobotlab.framework.StatusLevel.ERROR;
import static org.myrobotlab.framework.StatusLevel.INFO;
import static org.myrobotlab.framework.StatusLevel.SUCCESS;
import static org.myrobotlab.framework.StatusLevel.WARN;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Serializable;
import java.io.StringWriter;
import java.util.Objects;
import org.myrobotlab.codec.CodecUtils;
import org.myrobotlab.logging.Level;
import org.myrobotlab.logging.LoggerFactory;
import org.myrobotlab.logging.LoggingFactory;
import org.slf4j.Logger;
/**
* Goal is to have a very simple Pojo with only a few (native Java helper
* methods) WARNING !!! - this class used to extend Exception or Throwable - but
* the gson serializer would stack overflow with self reference issue
*
* TODO - allow radix tree searches for "keys" ???
*
*/
public class Status implements Serializable {// extends Exception {
private static final long serialVersionUID = 1L;
public final static Logger log = LoggerFactory.getLogger(Status.class);
public String name; // service name ???
/**
* FIXME - should probably be an enum now that serialization mostly works now
* with enums [debug|info|warn|error|success] - yes the last part is different
* than "logging" but could still be a status...
*
*/
public String level;
<|fim▁hole|> * key, whilst the "detail" is for "changing" detail. This becomes important
* when Stati are aggregated - and humans are interested in "high" counts of
* specific Status while the details are not important unless diagnosing one.
*
* Violating Servo limits is a good example - "key" can be "Outside servo
* limits". The key can contain spaces and punctuation - the important part is
* that it is STATIC.
*
* "details" contain dynamic specifics - for example: "key":"Outside servo
* limits", "detail":"servo01 moveTo(75) limit is greater than 100"
*/
public String key;
/**
* Dynamic of verbose explanation of the status. e.g. "detail":"servo01
* moveTo(75) limit is greater than 100" or complete stack trace from an
* exception
*/
public String detail;
/**
* optional source of status
*/
public Object source;
// --- static creation of typed Status objects ----
public static Status debug(String format, Object... args) {
Status status = new Status(String.format(format, args));
status.level = DEBUG;
return status;
}
public static Status error(Exception e) {
Status s = new Status(e);
s.level = ERROR;
return s;
}
public static Status error(String msg) {
Status s = new Status(msg);
s.level = ERROR;
return s;
}
public static Status error(String format, Object... args) {
Status status = new Status(String.format(format, args));
status.level = ERROR;
return status;
}
public static Status warn(String msg) {
Status s = new Status(msg);
s.level = ERROR;
return s;
}
public static Status warn(String format, Object... args) {
Status status = new Status(String.format(format, args));
status.level = WARN;
return status;
}
public static Status info(String msg) {
Status s = new Status(msg);
s.level = INFO;
return s;
}
public static Status info(String format, Object... args) {
String formattedInfo = String.format(format, args);
Status status = new Status(formattedInfo);
status.level = INFO;
return status;
}
public final static String stackToString(final Throwable e) {
StringWriter sw;
try {
sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
e.printStackTrace(pw);
} catch (Exception e2) {
return "bad stackToString";
}
return "------\r\n" + sw.toString() + "------\r\n";
}
public Status(Exception e) {
this.level = ERROR;
StringWriter sw;
try {
sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
e.printStackTrace(pw);
detail = sw.toString();
} catch (Exception e2) {
}
this.key = String.format("%s - %s", e.getClass().getSimpleName(), e.getMessage());
}
public Status(Status s) {
if (s == null) {
return;
}
this.name = s.name;
this.level = s.level;
this.key = s.key;
this.detail = s.detail;
}
/**
* for minimal amount of information error is assumed, and info is detail of
* an ERROR
*
* @param detail
* d
*/
public Status(String detail) {
this.level = ERROR;
this.detail = detail;
}
public Status(String name, String level, String key, String detail) {
this.name = name;
this.level = level;
this.key = key;
this.detail = detail;
}
public boolean isDebug() {
return DEBUG.equals(level);
}
public boolean isError() {
return ERROR.equals(level);
}
public boolean isInfo() {
return INFO.equals(level);
}
public boolean isWarn() {
return WARN.equals(level);
}
@Override
public String toString() {
StringBuffer sb = new StringBuffer();
if (name != null) {
sb.append(name);
sb.append(" ");
}
if (level != null) {
sb.append(level);
sb.append(" ");
}
if (key != null) {
sb.append(key);
sb.append(" ");
}
if (detail != null) {
sb.append(detail);
}
return sb.toString();
}
static public final Status newInstance(String name, String level, String key, String detail) {
Status s = new Status(name, level, key, detail);
return s;
}
@Override
public boolean equals(Object o) {
if (o == this)
return true;
if (!(o instanceof Status)) {
return false;
}
Status status = (Status) o;
return Objects.equals(name, status.name) && Objects.equals(level, status.level) && Objects.equals(key, status.key) && Objects.equals(detail, status.detail);
}
@Override
public int hashCode() {
return Objects.hash(name, level, key, detail);
}
public static void main(String[] args) throws IOException, InterruptedException {
LoggingFactory.init(Level.INFO);
Status test = new Status("i am pessimistic");
// Status subTest = new Status("i am sub pessimistic");
// test.add(subTest);
String json = CodecUtils.toJson(test);
Status z = CodecUtils.fromJson(json, Status.class);
log.info(json);
log.info(z.toString());
}
public static Status success() {
Status s = new Status(SUCCESS);
s.level = SUCCESS;
return s;
}
public boolean isSuccess() {
return SUCCESS.equals(level);
}
public static Status success(String detail) {
Status s = new Status(SUCCESS);
s.level = SUCCESS;
s.detail = detail;
return s;
}
}<|fim▁end|> | /**
* The key is the non changing part and good identifier of what went on... For
* Exceptions I would recommend the Exception.class.getSimpleName() for the
|
<|file_name|>foreign-dupe.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at<|fim▁hole|>// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// calling pin_thread and that's having weird side-effects.
// pretty-expanded FIXME #23616
#![feature(libc)]
mod rustrt1 {
extern crate libc;
#[link(name = "rust_test_helpers")]
extern {
pub fn rust_get_test_int() -> libc::intptr_t;
}
}
mod rustrt2 {
extern crate libc;
extern {
pub fn rust_get_test_int() -> libc::intptr_t;
}
}
pub fn main() {
unsafe {
rustrt1::rust_get_test_int();
rustrt2::rust_get_test_int();
}
}<|fim▁end|> | |
<|file_name|>XmlInterrogator.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2013 Red Hat, Inc. and/or its affiliates.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Brad Davis - [email protected] - Initial API and implementation
*/
package org.jboss.windup.interrogator.impl;
<|fim▁hole|>import java.io.File;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jboss.windup.metadata.decoration.Summary;
import org.jboss.windup.metadata.decoration.AbstractDecoration.NotificationLevel;
import org.jboss.windup.metadata.type.FileMetadata;
import org.jboss.windup.metadata.type.XmlMetadata;
import org.jboss.windup.metadata.type.ZipEntryMetadata;
import org.w3c.dom.Document;
/**
* Interrogates XML files. Extracts the XML, and creates an XmlMetadata object, which is passed down
* the decorator pipeline.
*
* @author bdavis
*
*/
public class XmlInterrogator extends ExtensionInterrogator<XmlMetadata> {
private static final Log LOG = LogFactory.getLog(XmlInterrogator.class);
@Override
public void processMeta(XmlMetadata fileMeta) {
Document document = fileMeta.getParsedDocument();
if (document == null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Document was null. Problem parsing: " + fileMeta.getFilePointer().getAbsolutePath());
}
// attach the bad file so we see it in the reports...
fileMeta.getArchiveMeta().getEntries().add(fileMeta);
return;
}
super.processMeta(fileMeta);
}
@Override
public boolean isOfInterest(XmlMetadata fileMeta) {
return true;
}
@Override
public XmlMetadata archiveEntryToMeta(ZipEntryMetadata archiveEntry) {
File file = archiveEntry.getFilePointer();
LOG.debug("Processing XML: " + file.getAbsolutePath());
FileMetadata meta = null;
if (file.length() > 1048576L * 1) {
LOG.warn("XML larger than 1 MB: " + file.getAbsolutePath() + "; Skipping processing.");
meta = new FileMetadata();
meta.setArchiveMeta(archiveEntry.getArchiveMeta());
meta.setFilePointer(file);
Summary sr = new Summary();
sr.setDescription("File is too large; skipped.");
sr.setLevel(NotificationLevel.WARNING);
meta.getDecorations().add(sr);
}
else {
XmlMetadata xmlMeta = new XmlMetadata();
xmlMeta.setArchiveMeta(archiveEntry.getArchiveMeta());
xmlMeta.setFilePointer(file);
meta = xmlMeta;
return xmlMeta;
}
return null;
}
@Override
public XmlMetadata fileEntryToMeta(FileMetadata entry) {
File file = entry.getFilePointer();
LOG.debug("Processing XML: " + file.getAbsolutePath());
FileMetadata meta = null;
if (file.length() > 1048576L * 1) {
LOG.warn("XML larger than 1 MB: " + file.getAbsolutePath() + "; Skipping processing.");
meta = new FileMetadata();
//meta.setArchiveMeta(archiveEntry.getArchiveMeta());
meta.setFilePointer(file);
meta.setArchiveMeta(entry.getArchiveMeta());
Summary sr = new Summary();
sr.setDescription("File is too large; skipped.");
sr.setLevel(NotificationLevel.WARNING);
meta.getDecorations().add(sr);
}
else {
XmlMetadata xmlMeta = new XmlMetadata();
xmlMeta.setArchiveMeta(entry.getArchiveMeta());
xmlMeta.setFilePointer(file);
meta = xmlMeta;
return xmlMeta;
}
return null;
}
}<|fim▁end|> | |
<|file_name|>formButtons.directive.js<|end_file_name|><|fim▁begin|><|fim▁hole|> return {
restrict: 'AE',
replace: true,
scope: {
submitClick: '&submitClick',
cancelClick: '&cancelClick'
},
templateUrl: '/src/utils/views/formButtons.tmpl.html',
link: function (scope, elem) {
angular.element(elem[0].getElementsByClassName('form-button-submit')).on('click', function () {
scope.submitClick();
});
angular.element(elem[0].getElementsByClassName('form-button-cancel')).on('click', function () {
scope.cancelClick();
});
}
};
}<|fim▁end|> | module.exports = FormButtonsDirective;
function FormButtonsDirective () { |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.