prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>test.js<|end_file_name|><|fim▁begin|>/**
* @license Apache-2.0
*
* Copyright (c) 2018 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
// MODULES //
var tape = require( 'tape' );
var isnan = require( '@stdlib/math/base/assert/is-nan' );
var isPositiveZero = require( '@stdlib/math/base/assert/is-positive-zero' );
var PINF = require( '@stdlib/constants/float64/pinf' );
var NINF = require( '@stdlib/constants/float64/ninf' );
var Float64Array = require( '@stdlib/array/float64' );
var minmaxabs = require( './../lib' );
// TESTS //
tape( 'main export is a function', function test( t ) {
t.ok( true, __filename );
t.strictEqual( typeof minmaxabs, 'function', 'main export is a function' );
t.end();
});
tape( 'the function returns `NaN` for both the minimum and maximum absolute value if provided a `NaN`', function test( t ) {
var v;
v = minmaxabs( NaN, 3.14 );
t.strictEqual( isnan( v[ 0 ] ), true, 'returns NaN' );
t.strictEqual( isnan( v[ 1 ] ), true, 'returns NaN' );
v = minmaxabs( 3.14, NaN );
t.strictEqual( isnan( v[ 0 ] ), true, 'returns NaN' );
t.strictEqual( isnan( v[ 1 ] ), true, 'returns NaN' );
v = minmaxabs( NaN, NaN );
t.strictEqual( isnan( v[ 0 ] ), true, 'returns NaN' );
t.strictEqual( isnan( v[ 1 ] ), true, 'returns NaN' );
v = minmaxabs( NaN );
t.strictEqual( isnan( v[ 0 ] ), true, 'returns NaN' );
t.strictEqual( isnan( v[ 1 ] ), true, 'returns NaN' );
v = minmaxabs( 3.14, 4.2, NaN );
t.strictEqual( isnan( v[ 0 ] ), true, 'returns NaN' );
t.strictEqual( isnan( v[ 1 ] ), true, 'returns NaN' );
v = minmaxabs( NaN, 4.2, NaN );
t.strictEqual( isnan( v[ 0 ] ), true, 'returns NaN' );
t.strictEqual( isnan( v[ 1 ] ), true, 'returns NaN' );
v = minmaxabs( NaN, NaN, NaN );
t.strictEqual( isnan( v[ 0 ] ), true, 'returns NaN' );
t.strictEqual( isnan( v[ 1 ] ), true, 'returns NaN' );
t.end();
});
tape( 'the function returns `Infinity` as the maximum value if provided `-Infinity`', function test( t ) {
var v;
v = minmaxabs( NINF, 3.14 );
t.strictEqual( v[ 0 ], 3.14, 'returns expected value' );
t.strictEqual( v[ 1 ], PINF, 'returns expected value' );
v = minmaxabs( 3.14, NINF );
t.strictEqual( v[ 0 ], 3.14, 'returns expected value' );
t.strictEqual( v[ 1 ], PINF, 'returns expected value' );
v = minmaxabs( NINF );
t.strictEqual( v[ 0 ], PINF, 'returns expected value' );
t.strictEqual( v[ 1 ], PINF, 'returns expected value' );
v = minmaxabs( 3.14, 4.2, NINF );
t.strictEqual( v[ 0 ], 3.14, 'returns exected value' );
t.strictEqual( v[ 1 ], PINF, 'returns expected value' );
t.end();
});
tape( 'the function returns `+Infinity` as the maximum value if provided `+Infinity`', function test( t ) {
var v;
v = minmaxabs( PINF, 3.14 );
t.strictEqual( v[ 0 ], 3.14, 'returns expected value' );
t.strictEqual( v[ 1 ], PINF, 'returns expected value' );
v = minmaxabs( 3.14, PINF );
t.strictEqual( v[ 0 ], 3.14, 'returns expected value' );
t.strictEqual( v[ 1 ], PINF, 'returns expected value' );
v = minmaxabs( PINF );
t.strictEqual( v[ 0 ], PINF, 'returns expected value' );
t.strictEqual( v[ 1 ], PINF, 'returns +infinity' );
v = minmaxabs( 3.14, 4.2, PINF );
t.strictEqual( v[ 0 ], 3.14, 'returns expected value' );
t.strictEqual( v[ 1 ], PINF, 'returns expected value' );
t.end();
});
tape( 'the function returns correctly signed zeros', function test( t ) {
var v;
v = minmaxabs( +0.0, -0.0 );
t.strictEqual( isPositiveZero( v[ 0 ] ), true, 'returns +0' );
t.strictEqual( isPositiveZero( v[ 1 ] ), true, 'returns +0' );
v = minmaxabs( -0.0, +0.0 );
t.strictEqual( isPositiveZero( v[ 0 ] ), true, 'returns +0' );
t.strictEqual( isPositiveZero( v[ 1 ] ), true, 'returns +0' );
v = minmaxabs( -0.0, -0.0 );
t.strictEqual( isPositiveZero( v[ 0 ] ), true, 'returns +0' );
t.strictEqual( isPositiveZero( v[ 1 ] ), true, 'returns +0' );
v = minmaxabs( +0.0, +0.0 );
t.strictEqual( isPositiveZero( v[ 0 ] ), true, 'returns +0' );
t.strictEqual( isPositiveZero( v[ 1 ] ), true, 'returns +0' );
v = minmaxabs( -0.0 );
t.strictEqual( isPositiveZero( v[ 0 ] ), true, 'returns +0' );
t.strictEqual( isPositiveZero( v[ 1 ] ), true, 'returns +0' );
v = minmaxabs( +0.0 );
t.strictEqual( isPositiveZero( v[ 0 ] ), true, 'returns +0' );
t.strictEqual( isPositiveZero( v[ 1 ] ), true, 'returns +0' );
v = minmaxabs( +0.0, -0.0, +0.0 );
t.strictEqual( isPositiveZero( v[ 0 ] ), true, 'returns +0' );
t.strictEqual( isPositiveZero( v[ 1 ] ), true, 'returns +0' );
t.end();
});
tape( 'the function returns the minimum and maximum absolute values', function test( t ) {
var v;
v = minmaxabs( 4.2, 3.14 );
t.strictEqual( v[ 0 ], 3.14, 'returns min value' );
t.strictEqual( v[ 1 ], 4.2, 'returns max value' );
v = minmaxabs( -4.2, 3.14 );
t.strictEqual( v[ 0 ], 3.14, 'returns min value' );
t.strictEqual( v[ 1 ], 4.2, 'returns max value' );
v = minmaxabs( 3.14 );
t.strictEqual( v[ 0 ], 3.14, 'returns min value' );
t.strictEqual( v[ 1 ], 3.14, 'returns max value' );
v = minmaxabs( PINF );
t.strictEqual( v[ 0 ], PINF, 'returns min value' );
t.strictEqual( v[ 1 ], PINF, 'returns max value' );
v = minmaxabs( 4.2, 3.14, -1.0 );
t.strictEqual( v[ 0 ], 1.0, 'returns min value' );
t.strictEqual( v[ 1 ], 4.2, 'returns max value' );
v = minmaxabs( 4.2, 3.14, -1.0, -6.14 );
t.strictEqual( v[ 0 ], 1.0, 'returns min value' );
t.strictEqual( v[ 1 ], 6.14, 'returns max value' );
t.end();
});
tape( 'the function supports providing an output object (array)', function test( t ) {
var out;
var v;
out = [ 0.0, 0.0 ];
v = minmaxabs( out, 4.2, 3.14 );
t.strictEqual( v, out, 'returns output array' );
t.strictEqual( v[ 0 ], 3.14, 'returns min value' );
t.strictEqual( v[ 1 ], 4.2, 'returns max value' );
out = [ 0.0, 0.0 ];
v = minmaxabs( out, -4.2, -3.14 );
t.strictEqual( v, out, 'returns output array' );
t.strictEqual( v[ 0 ], 3.14, 'returns min value' );
t.strictEqual( v[ 1 ], 4.2, 'returns max value' );
out = [ 0.0, 0.0 ];
v = minmaxabs( out, 3.14 );
t.strictEqual( v, out, 'returns output array' );
t.strictEqual( v[ 0 ], 3.14, 'returns min value' );
t.strictEqual( v[ 1 ], 3.14, 'returns max value' );
out = [ 0.0, 0.0 ];
v = minmaxabs( out, PINF );
t.strictEqual( v, out, 'returns output array' );
t.strictEqual( v[ 0 ], PINF, 'returns min value' );
t.strictEqual( v[ 1 ], PINF, 'returns max value' );
out = [ 0.0, 0.0 ];
v = minmaxabs( out, 4.2, 3.14, -1.0 );
t.strictEqual( v, out, 'returns output array' );
t.strictEqual( v[ 0 ], 1.0, 'returns min value' );
t.strictEqual( v[ 1 ], 4.2, 'returns max value' );
<|fim▁hole|> t.strictEqual( v[ 1 ], 6.14, 'returns max value' );
t.end();
});
tape( 'the function supports providing an output object (typed array)', function test( t ) {
var out;
var v;
out = new Float64Array( 2 );
v = minmaxabs( out, 4.2, 3.14 );
t.strictEqual( v, out, 'returns output array' );
t.strictEqual( v[ 0 ], 3.14, 'returns min value' );
t.strictEqual( v[ 1 ], 4.2, 'returns max value' );
out = new Float64Array( 2 );
v = minmaxabs( out, -4.2, 3.14 );
t.strictEqual( v, out, 'returns output array' );
t.strictEqual( v[ 0 ], 3.14, 'returns min value' );
t.strictEqual( v[ 1 ], 4.2, 'returns max value' );
out = new Float64Array( 2 );
v = minmaxabs( out, 3.14 );
t.strictEqual( v, out, 'returns output array' );
t.strictEqual( v[ 0 ], 3.14, 'returns min value' );
t.strictEqual( v[ 1 ], 3.14, 'returns max value' );
out = new Float64Array( 2 );
v = minmaxabs( out, PINF );
t.strictEqual( v, out, 'returns output array' );
t.strictEqual( v[ 0 ], PINF, 'returns min value' );
t.strictEqual( v[ 1 ], PINF, 'returns max value' );
out = new Float64Array( 2 );
v = minmaxabs( out, 4.2, 3.14, -1.0 );
t.strictEqual( v, out, 'returns output array' );
t.strictEqual( v[ 0 ], 1.0, 'returns min value' );
t.strictEqual( v[ 1 ], 4.2, 'returns max value' );
out = new Float64Array( 2 );
v = minmaxabs( out, 4.2, 3.14, -1.0, -6.14 );
t.strictEqual( v, out, 'returns output array' );
t.strictEqual( v[ 0 ], 1.0, 'returns min value' );
t.strictEqual( v[ 1 ], 6.14, 'returns max value' );
t.end();
});<|fim▁end|>
|
out = [ 0.0, 0.0 ];
v = minmaxabs( out, 4.2, 3.14, -1.0, -6.14 );
t.strictEqual( v, out, 'returns output array' );
t.strictEqual( v[ 0 ], 1.0, 'returns min value' );
|
<|file_name|>issue-54239-private-type-triggers-lint.rs<|end_file_name|><|fim▁begin|>// Regression test for #54239, shouldn't trigger lint.<|fim▁hole|>
#![deny(missing_debug_implementations)]
struct DontLookAtMe(i32);
async fn secret() -> DontLookAtMe {
DontLookAtMe(41)
}
pub async fn looking() -> i32 { // Shouldn't trigger lint here.
secret().await.0
}
fn main() {}<|fim▁end|>
|
// check-pass
// edition:2018
|
<|file_name|>AreaMemoryManager.cpp<|end_file_name|><|fim▁begin|>/**
@file AreaMemoryManager.cpp
@maintainer Morgan McGuire, http://graphics.cs.williams.edu
@created 2009-01-20
@edited 2009-01-20
Copyright 2000-2009, Morgan McGuire.
All rights reserved.
*/
#include "G3D/AreaMemoryManager.h"
#include "G3D/System.h"
namespace G3D {
AreaMemoryManager::Buffer::Buffer(size_t size) : m_size(size), m_used(0) {
// Allocate space for a lot of buffers.
m_first = (uint8*)::malloc(m_size);
}
AreaMemoryManager::Buffer::~Buffer() {
::free(m_first);
}
void* AreaMemoryManager::Buffer::alloc(size_t s) {
if (s + m_used > m_size) {
return NULL;
} else {
void* old = m_first + m_used;
m_used += s;
<|fim▁hole|>
bool AreaMemoryManager::isThreadsafe() const {
return false;
}
AreaMemoryManager::Ref AreaMemoryManager::create(size_t sizeHint) {
return new AreaMemoryManager(sizeHint);
}
AreaMemoryManager::AreaMemoryManager(size_t sizeHint) : m_sizeHint(sizeHint) {
debugAssert(sizeHint > 0);
}
AreaMemoryManager::~AreaMemoryManager() {
deallocateAll();
}
size_t AreaMemoryManager::bytesAllocated() const {
return m_sizeHint * m_bufferArray.size();
}
void* AreaMemoryManager::alloc(size_t s) {
void* n = (m_bufferArray.size() > 0) ? m_bufferArray.last()->alloc(s) : NULL;
if (n == NULL) {
// This buffer is full
m_bufferArray.append(new Buffer(max(s, m_sizeHint)));
return m_bufferArray.last()->alloc(s);
} else {
return n;
}
}
void AreaMemoryManager::free(void* x) {
// Intentionally empty; we block deallocate
}
void AreaMemoryManager::deallocateAll() {
m_bufferArray.deleteAll();
m_bufferArray.clear();
}
}<|fim▁end|>
|
return old;
}
}
|
<|file_name|>miner.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2009-2010 Satoshi Nakamoto
// Copyright (c) 2009-2012 The Bitcoin developers
// Copyright (c) 2013 The NovaCoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "txdb.h"
#include "miner.h"
#include "kernel.h"
using namespace std;
//////////////////////////////////////////////////////////////////////////////
//
// BitcoinMiner
//
extern unsigned int nMinerSleep;
int static FormatHashBlocks(void* pbuffer, unsigned int len)
{
unsigned char* pdata = (unsigned char*)pbuffer;
unsigned int blocks = 1 + ((len + 8) / 64);
unsigned char* pend = pdata + 64 * blocks;
memset(pdata + len, 0, 64 * blocks - len);
pdata[len] = 0x80;
unsigned int bits = len * 8;
pend[-1] = (bits >> 0) & 0xff;
pend[-2] = (bits >> 8) & 0xff;
pend[-3] = (bits >> 16) & 0xff;
pend[-4] = (bits >> 24) & 0xff;
return blocks;
}
static const unsigned int pSHA256InitState[8] =
{0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19};
void SHA256Transform(void* pstate, void* pinput, const void* pinit)
{
SHA256_CTX ctx;
unsigned char data[64];
SHA256_Init(&ctx);
for (int i = 0; i < 16; i++)
((uint32_t*)data)[i] = ByteReverse(((uint32_t*)pinput)[i]);
for (int i = 0; i < 8; i++)
ctx.h[i] = ((uint32_t*)pinit)[i];
SHA256_Update(&ctx, data, sizeof(data));
for (int i = 0; i < 8; i++)
((uint32_t*)pstate)[i] = ctx.h[i];
}
// Some explaining would be appreciated
class COrphan
{
public:
CTransaction* ptx;
set<uint256> setDependsOn;
double dPriority;
double dFeePerKb;
COrphan(CTransaction* ptxIn)
{
ptx = ptxIn;
dPriority = dFeePerKb = 0;
}
void print() const
{
printf("COrphan(hash=%s, dPriority=%.1f, dFeePerKb=%.1f)\n",
ptx->GetHash().ToString().substr(0,10).c_str(), dPriority, dFeePerKb);
BOOST_FOREACH(uint256 hash, setDependsOn)
printf(" setDependsOn %s\n", hash.ToString().substr(0,10).c_str());
}
};
uint64_t nLastBlockTx = 0;
uint64_t nLastBlockSize = 0;
int64_t nLastCoinStakeSearchInterval = 0;
// We want to sort transactions by priority and fee, so:
typedef boost::tuple<double, double, CTransaction*> TxPriority;
class TxPriorityCompare
{
bool byFee;
public:
TxPriorityCompare(bool _byFee) : byFee(_byFee) { }
bool operator()(const TxPriority& a, const TxPriority& b)
{
if (byFee)
{
if (a.get<1>() == b.get<1>())
return a.get<0>() < b.get<0>();
return a.get<1>() < b.get<1>();
}
else
{
if (a.get<0>() == b.get<0>())
return a.get<1>() < b.get<1>();
return a.get<0>() < b.get<0>();
}
}
};
// CreateNewBlock: create new block (without proof-of-work/proof-of-stake)
CBlock* CreateNewBlock(CWallet* pwallet, bool fProofOfStake, int64_t* pFees)
{
// Create new block
auto_ptr<CBlock> pblock(new CBlock());
if (!pblock.get())
return NULL;
CBlockIndex* pindexPrev = pindexBest;
// Create coinbase tx
CTransaction txNew;
txNew.vin.resize(1);
txNew.vin[0].prevout.SetNull();
txNew.vout.resize(1);
if (!fProofOfStake)
{
CReserveKey reservekey(pwallet);
CPubKey pubkey;
if (!reservekey.GetReservedKey(pubkey))
return NULL;
txNew.vout[0].scriptPubKey.SetDestination(pubkey.GetID());
}
else
{
// Height first in coinbase required for block.version=2
txNew.vin[0].scriptSig = (CScript() << pindexPrev->nHeight+1) + COINBASE_FLAGS;
assert(txNew.vin[0].scriptSig.size() <= 100);
txNew.vout[0].SetEmpty();
}
// Add our coinbase tx as first transaction
pblock->vtx.push_back(txNew);
// Largest block you're willing to create:
unsigned int nBlockMaxSize = GetArg("-blockmaxsize", MAX_BLOCK_SIZE_GEN/2);
// Limit to betweeen 1K and MAX_BLOCK_SIZE-1K for sanity:
nBlockMaxSize = std::max((unsigned int)1000, std::min((unsigned int)(MAX_BLOCK_SIZE-1000), nBlockMaxSize));
// How much of the block should be dedicated to high-priority transactions,
// included regardless of the fees they pay
unsigned int nBlockPrioritySize = GetArg("-blockprioritysize", 27000);
nBlockPrioritySize = std::min(nBlockMaxSize, nBlockPrioritySize);
// Minimum block size you want to create; block will be filled with free transactions
// until there are no more or the block reaches this size:
unsigned int nBlockMinSize = GetArg("-blockminsize", 0);
nBlockMinSize = std::min(nBlockMaxSize, nBlockMinSize);
// Fee-per-kilobyte amount considered the same as "free"
// Be careful setting this: if you set it to zero then
// a transaction spammer can cheaply fill blocks using
// 1-satoshi-fee transactions. It should be set above the real
// cost to you of processing a transaction.
int64_t nMinTxFee = MIN_TX_FEE;
if (mapArgs.count("-mintxfee"))
ParseMoney(mapArgs["-mintxfee"], nMinTxFee);
pblock->nBits = GetNextTargetRequired(pindexPrev, fProofOfStake);
// Collect memory pool transactions into the block
int64_t nFees = 0;
{
LOCK2(cs_main, mempool.cs);
CTxDB txdb("r");
// Priority order to process transactions
list<COrphan> vOrphan; // list memory doesn't move
map<uint256, vector<COrphan*> > mapDependers;
// This vector will be sorted into a priority queue:
vector<TxPriority> vecPriority;
vecPriority.reserve(mempool.mapTx.size());
for (map<uint256, CTransaction>::iterator mi = mempool.mapTx.begin(); mi != mempool.mapTx.end(); ++mi)
{
CTransaction& tx = (*mi).second;
if (tx.IsCoinBase() || tx.IsCoinStake() || !IsFinalTx(tx, pindexPrev->nHeight + 1))
continue;
COrphan* porphan = NULL;
double dPriority = 0;
int64_t nTotalIn = 0;
bool fMissingInputs = false;
BOOST_FOREACH(const CTxIn& txin, tx.vin)
{
// Read prev transaction
CTransaction txPrev;
CTxIndex txindex;
if (!txPrev.ReadFromDisk(txdb, txin.prevout, txindex))
{
// This should never happen; all transactions in the memory
// pool should connect to either transactions in the chain
// or other transactions in the memory pool.
if (!mempool.mapTx.count(txin.prevout.hash))
{
printf("ERROR: mempool transaction missing input\n");
if (fDebug) assert("mempool transaction missing input" == 0);
fMissingInputs = true;
if (porphan)
vOrphan.pop_back();
break;
}
// Has to wait for dependencies
if (!porphan)
{
// Use list for automatic deletion
vOrphan.push_back(COrphan(&tx));
porphan = &vOrphan.back();
}
mapDependers[txin.prevout.hash].push_back(porphan);
porphan->setDependsOn.insert(txin.prevout.hash);
nTotalIn += mempool.mapTx[txin.prevout.hash].vout[txin.prevout.n].nValue;
continue;
}
int64_t nValueIn = txPrev.vout[txin.prevout.n].nValue;
nTotalIn += nValueIn;
int nConf = txindex.GetDepthInMainChain();
dPriority += (double)nValueIn * nConf;
}
if (fMissingInputs) continue;
// Priority is sum(valuein * age) / txsize
unsigned int nTxSize = ::GetSerializeSize(tx, SER_NETWORK, PROTOCOL_VERSION);
dPriority /= nTxSize;
// This is a more accurate fee-per-kilobyte than is used by the client code, because the
// client code rounds up the size to the nearest 1K. That's good, because it gives an
// incentive to create smaller transactions.
double dFeePerKb = double(nTotalIn-tx.GetValueOut()) / (double(nTxSize)/1000.0);
if (porphan)
{
porphan->dPriority = dPriority;
porphan->dFeePerKb = dFeePerKb;
}
else
vecPriority.push_back(TxPriority(dPriority, dFeePerKb, &(*mi).second));
}
// Collect transactions into block
map<uint256, CTxIndex> mapTestPool;
uint64_t nBlockSize = 1000;
uint64_t nBlockTx = 0;
int nBlockSigOps = 100;
bool fSortedByFee = (nBlockPrioritySize <= 0);
TxPriorityCompare comparer(fSortedByFee);
std::make_heap(vecPriority.begin(), vecPriority.end(), comparer);
while (!vecPriority.empty())
{
// Take highest priority transaction off the priority queue:
double dPriority = vecPriority.front().get<0>();
double dFeePerKb = vecPriority.front().get<1>();
CTransaction& tx = *(vecPriority.front().get<2>());
std::pop_heap(vecPriority.begin(), vecPriority.end(), comparer);
vecPriority.pop_back();
// Size limits
unsigned int nTxSize = ::GetSerializeSize(tx, SER_NETWORK, PROTOCOL_VERSION);
if (nBlockSize + nTxSize >= nBlockMaxSize)
continue;
// Legacy limits on sigOps:
unsigned int nTxSigOps = tx.GetLegacySigOpCount();
if (nBlockSigOps + nTxSigOps >= MAX_BLOCK_SIGOPS)
continue;
// Timestamp limit
if (tx.nTime > GetAdjustedTime() || (fProofOfStake && tx.nTime > pblock->vtx[0].nTime))
continue;
// Transaction fee
int64_t nMinFee = tx.GetMinFee(nBlockSize, GMF_BLOCK);
// Skip free transactions if we're past the minimum block size:
if (fSortedByFee && (dFeePerKb < nMinTxFee) && (nBlockSize + nTxSize >= nBlockMinSize))
continue;
// Prioritize by fee once past the priority size or we run out of high-priority
// transactions:
if (!fSortedByFee &&
((nBlockSize + nTxSize >= nBlockPrioritySize) || (dPriority < COIN * 144 / 250)))
{
fSortedByFee = true;
comparer = TxPriorityCompare(fSortedByFee);
std::make_heap(vecPriority.begin(), vecPriority.end(), comparer);
}
// Connecting shouldn't fail due to dependency on other memory pool transactions
// because we're already processing them in order of dependency
map<uint256, CTxIndex> mapTestPoolTmp(mapTestPool);
MapPrevTx mapInputs;
bool fInvalid;
if (!tx.FetchInputs(txdb, mapTestPoolTmp, false, true, mapInputs, fInvalid))
continue;
int64_t nTxFees = tx.GetValueIn(mapInputs)-tx.GetValueOut();
if (nTxFees < nMinFee)
continue;
nTxSigOps += tx.GetP2SHSigOpCount(mapInputs);
if (nBlockSigOps + nTxSigOps >= MAX_BLOCK_SIGOPS)
continue;
if (!tx.ConnectInputs(txdb, mapInputs, mapTestPoolTmp, CDiskTxPos(1,1,1), pindexPrev, false, true))
continue;
mapTestPoolTmp[tx.GetHash()] = CTxIndex(CDiskTxPos(1,1,1), tx.vout.size());
swap(mapTestPool, mapTestPoolTmp);
// Added
pblock->vtx.push_back(tx);
nBlockSize += nTxSize;
++nBlockTx;
nBlockSigOps += nTxSigOps;
nFees += nTxFees;
if (fDebug && GetBoolArg("-printpriority"))
{
printf("priority %.1f feeperkb %.1f txid %s\n",
dPriority, dFeePerKb, tx.GetHash().ToString().c_str());
}
// Add transactions that depend on this one to the priority queue
uint256 hash = tx.GetHash();
if (mapDependers.count(hash))
{
BOOST_FOREACH(COrphan* porphan, mapDependers[hash])
{
if (!porphan->setDependsOn.empty())
{
porphan->setDependsOn.erase(hash);
if (porphan->setDependsOn.empty())
{
vecPriority.push_back(TxPriority(porphan->dPriority, porphan->dFeePerKb, porphan->ptx));
std::push_heap(vecPriority.begin(), vecPriority.end(), comparer);
}
}
}
}
}
nLastBlockTx = nBlockTx;
nLastBlockSize = nBlockSize;
if (fDebug && GetBoolArg("-printpriority"))
printf("CreateNewBlock(): total size %"PRIu64"\n", nBlockSize);
if (!fProofOfStake)
pblock->vtx[0].vout[0].nValue = GetProofOfWorkReward(nFees);
if (pFees)
*pFees = nFees;
// Fill in header
pblock->hashPrevBlock = pindexPrev->GetBlockHash();
pblock->nTime = max(pindexPrev->GetPastTimeLimit()+1, pblock->GetMaxTransactionTime());
pblock->nTime = max(pblock->GetBlockTime(), PastDrift(pindexPrev->GetBlockTime()));
if (!fProofOfStake)
pblock->UpdateTime(pindexPrev);
pblock->nNonce = 0;
}
return pblock.release();
}
void IncrementExtraNonce(CBlock* pblock, CBlockIndex* pindexPrev, unsigned int& nExtraNonce)
{
// Update nExtraNonce
static uint256 hashPrevBlock;
if (hashPrevBlock != pblock->hashPrevBlock)
{
nExtraNonce = 0;
hashPrevBlock = pblock->hashPrevBlock;
}
++nExtraNonce;
unsigned int nHeight = pindexPrev->nHeight+1; // Height first in coinbase required for block.version=2
pblock->vtx[0].vin[0].scriptSig = (CScript() << nHeight << CBigNum(nExtraNonce)) + COINBASE_FLAGS;
assert(pblock->vtx[0].vin[0].scriptSig.size() <= 100);
pblock->hashMerkleRoot = pblock->BuildMerkleTree();
}
void FormatHashBuffers(CBlock* pblock, char* pmidstate, char* pdata, char* phash1)
{
//
// Pre-build hash buffers
//
struct
{
struct unnamed2
{
int nVersion;
uint256 hashPrevBlock;
uint256 hashMerkleRoot;
unsigned int nTime;
unsigned int nBits;
unsigned int nNonce;
}
block;
unsigned char pchPadding0[64];
uint256 hash1;
unsigned char pchPadding1[64];
}
tmp;
memset(&tmp, 0, sizeof(tmp));
tmp.block.nVersion = pblock->nVersion;
tmp.block.hashPrevBlock = pblock->hashPrevBlock;
tmp.block.hashMerkleRoot = pblock->hashMerkleRoot;
tmp.block.nTime = pblock->nTime;
tmp.block.nBits = pblock->nBits;
tmp.block.nNonce = pblock->nNonce;
FormatHashBlocks(&tmp.block, sizeof(tmp.block));
FormatHashBlocks(&tmp.hash1, sizeof(tmp.hash1));
// Byte swap all the input buffer
for (unsigned int i = 0; i < sizeof(tmp)/4; i++)
((unsigned int*)&tmp)[i] = ByteReverse(((unsigned int*)&tmp)[i]);
// Precalc the first half of the first hash, which stays constant
SHA256Transform(pmidstate, &tmp.block, pSHA256InitState);
memcpy(pdata, &tmp.block, 128);
memcpy(phash1, &tmp.hash1, 64);
}
bool CheckWork(CBlock* pblock, CWallet& wallet, CReserveKey& reservekey)
{
uint256 hashBlock = pblock->GetHash();
uint256 hashTarget = CBigNum().SetCompact(pblock->nBits).getuint256();
if(!pblock->IsProofOfWork())
return error("CheckWork() : %s is not a proof-of-work block", hashBlock.GetHex().c_str());
if (hashBlock > hashTarget)
return error("CheckWork() : proof-of-work not meeting target");
//// debug print
printf("CheckWork() : new proof-of-work block found \n hash: %s \ntarget: %s\n", hashBlock.GetHex().c_str(), hashTarget.GetHex().c_str());
pblock->print();
printf("generated %s\n", FormatMoney(pblock->vtx[0].vout[0].nValue).c_str());
// Found a solution
{
LOCK(cs_main);
if (pblock->hashPrevBlock != hashBestChain)
return error("CheckWork() : generated block is stale");
// Remove key from key pool
reservekey.KeepKey();
// Track how many getdata requests this block gets
{
LOCK(wallet.cs_wallet);
wallet.mapRequestCount[hashBlock] = 0;
}
// Process this block the same as if we had received it from another node
if (!ProcessBlock(NULL, pblock))
return error("CheckWork() : ProcessBlock, block not accepted");
}
return true;
}
bool CheckStake(CBlock* pblock, CWallet& wallet)
{
uint256 proofHash = 0, hashTarget = 0;
uint256 hashBlock = pblock->GetHash();
if(!pblock->IsProofOfStake())
return error("CheckStake() : %s is not a proof-of-stake block", hashBlock.GetHex().c_str());
// verify hash target and signature of coinstake tx
if (!CheckProofOfStake(pblock->vtx[1], pblock->nBits, proofHash, hashTarget))
return error("CheckStake() : proof-of-stake checking failed");
//// debug print
printf("CheckStake() : new proof-of-stake block found \n hash: %s \nproofhash: %s \ntarget: %s\n", hashBlock.GetHex().c_str(), proofHash.GetHex().c_str(), hashTarget.GetHex().c_str());
pblock->print();
printf("out %s\n", FormatMoney(pblock->vtx[1].GetValueOut()).c_str());
// Found a solution
{
LOCK(cs_main);
if (pblock->hashPrevBlock != hashBestChain)
return error("CheckStake() : generated block is stale");
// Track how many getdata requests this block gets
{
LOCK(wallet.cs_wallet);
wallet.mapRequestCount[hashBlock] = 0;
}
// Process this block the same as if we had received it from another node
if (!ProcessBlock(NULL, pblock))
return error("CheckStake() : ProcessBlock, block not accepted");
}
return true;
}
void StakeMiner(CWallet *pwallet)
{
SetThreadPriority(THREAD_PRIORITY_LOWEST);
// Make this thread recognisable as the mining thread
RenameThread("EddieCoin-miner");
bool fTryToSync = true;
while (true)
{
if (fShutdown)
return;
while (pwallet->IsLocked())
{
nLastCoinStakeSearchInterval = 0;
MilliSleep(1000);
if (fShutdown)
return;
}
while (vNodes.empty() || IsInitialBlockDownload())
{
nLastCoinStakeSearchInterval = 0;
fTryToSync = true;
MilliSleep(1000);
if (fShutdown)
return;
}
if (fTryToSync)
{
fTryToSync = false;
if (vNodes.size() < 3 || nBestHeight < GetNumBlocksOfPeers())
{
MilliSleep(60000);
continue;
}
}
//
// Create new block
//
int64_t nFees;
auto_ptr<CBlock> pblock(CreateNewBlock(pwallet, true, &nFees));
if (!pblock.get())
return;
// Trying to sign a block
if (pblock->SignBlock(*pwallet, nFees))
{
SetThreadPriority(THREAD_PRIORITY_NORMAL);<|fim▁hole|> SetThreadPriority(THREAD_PRIORITY_LOWEST);
MilliSleep(500);
}
else
MilliSleep(nMinerSleep);
}
}<|fim▁end|>
|
CheckStake(pblock.get(), *pwallet);
|
<|file_name|>EnumButton.py<|end_file_name|><|fim▁begin|>from src.tools.enum import enum
import pyxbmct.addonwindow as pyxbmct
from src.tools.dialog import dialog
EnumMode = enum(SELECT=0, ROTATE=1)
class EnumButton(object):
def __init__(self, label, values, current, default, changeCallback=None, saveCallback=None, customLabels=None, mode=EnumMode.SELECT, returnValue=False, alignment=pyxbmct.ALIGN_CENTER):
self.label = label
self.values = values
self.customLabels = customLabels
self.mode = mode
self.returnValue = returnValue
self.changeCallback = changeCallback
self.saveCallback = saveCallback
self.currentValue = current
self.defaultValue = default
self.currentIndex = None
self.defaultIndex = None
self.assignedValue = False
if saveCallback is None:
self.onSave = None
if customLabels:
self._findCurrentIndex()
label = str(customLabels[self.currentIndex])
else:
label = str(current)
if alignment is not None:
self.button = pyxbmct.Button(label, alignment=alignment)
else:
self.button = pyxbmct.Button(label)
def update(self, value):
if self.currentValue != value:
self.currentValue = value
if self.customLabels:
self._findCurrentIndex()
label = str(self.customLabels[self.currentIndex])
else:
self.currentIndex = None
label = str(value)
self.button.setLabel(label)
self.assignedValue = True
def onClick(self):
if self.mode == EnumMode.SELECT:
if self.customLabels:
values = self.customLabels
else:
values = self.values
selectedIndex = dialog.select(self.label, list((str(value) for value in values)))
if selectedIndex == -1:
return
index = selectedIndex
else:
if self.currentIndex is None:
self._findCurrentIndex()
if self.currentIndex == len(self.values) - 1:
index = 0
else:
index = self.currentIndex + 1
self.assign(index)
def onDefault(self):
if self.defaultIndex is None:
self._findDefaultIndex()
self.assign(self.defaultIndex)
def onSave(self):
if self.assignedValue:
if self.returnValue:
self.saveCallback(self.currentValue)
else:
self.saveCallback(self.currentIndex)
def assign(self, index):
value = self.values[index]
self.currentIndex = index
self.currentValue = value
if self.customLabels:
label = str(self.customLabels[index])
else:
label = str(value)
self.button.setLabel(label)
self.assignedValue = True
if self.changeCallback:
if self.returnValue:
self.changeCallback(value)
else:
self.changeCallback(index)
def _findDefaultIndex(self):
for i in range(0, len(self.values)):
value = self.values[i]
if value == self.defaultValue:
self.defaultIndex = i
if self.defaultIndex is None:
raise ValueError ('Default value not found in value list')
<|fim▁hole|> def _findCurrentIndex(self):
for i in range(0, len(self.values)):
value = self.values[i]
if value == self.currentValue:
self.currentIndex = i
if self.currentIndex is None:
raise ValueError ('Current value not found in value list')<|fim▁end|>
| |
<|file_name|>test_filter2d.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# ______________________________________________________________________
'''test_filter2d
Test the filter2d() example from the PyCon'12 slide deck.
'''
# ______________________________________________________________________
import numpy
from numba import *
from numba.decorators import jit
import sys
import unittest
# ______________________________________________________________________
def filter2d(image, filt):
M, N = image.shape
Mf, Nf = filt.shape
Mf2 = Mf // 2
Nf2 = Nf // 2
result = numpy.zeros_like(image)
for i in range(Mf2, M - Mf2):
for j in range(Nf2, N - Nf2):
num = 0.0
for ii in range(Mf):
for jj in range(Nf):
num += (filt[Mf-1-ii, Nf-1-jj] * image[i-Mf2+ii, j-Nf2+jj])
result[i, j] = num
return result
# ______________________________________________________________________
class TestFilter2d(unittest.TestCase):<|fim▁hole|> ufilter2d = jit(argtypes=[double[:,:], double[:,:]],
restype=double[:,:])(filter2d)
image = numpy.random.random((50, 50))
filt = numpy.random.random((5, 5))
filt /= filt.sum()
plain_old_result = filter2d(image, filt)
hot_new_result = ufilter2d(image, filt)
self.assertTrue((abs(plain_old_result - hot_new_result) < 1e-9).all())
# ______________________________________________________________________
@autojit
def func():
return numpy.empty(10)
if __name__ == "__main__":
# func()
# TestFilter2d('test_vectorized_filter2d').debug()
unittest.main(*sys.argv[1:])
# ______________________________________________________________________
# End of test_filter2d.py<|fim▁end|>
|
def test_vectorized_filter2d(self):
|
<|file_name|>basic-types.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-win32 Broken because of LLVM bug: http://llvm.org/bugs/show_bug.cgi?id=16249
// Caveats - gdb prints any 8-bit value (meaning rust i8 and u8 values)
// as its numerical value along with its associated ASCII char, there
// doesn't seem to be any way around this. Also, gdb doesn't know
// about UTF-32 character encoding and will print a rust char as only
// its numerical value.
// compile-flags:-Z extra-debug-info
// debugger:break _zzz
// debugger:run
// debugger:finish
// debugger:print b
// check:$1 = false
// debugger:print i
// check:$2 = -1
// debugger:print c
// check:$3 = 97
// debugger:print/d i8
// check:$4 = 68
// debugger:print i16<|fim▁hole|>// debugger:print i64
// check:$7 = -64
// debugger:print u
// check:$8 = 1
// debugger:print/d u8
// check:$9 = 100
// debugger:print u16
// check:$10 = 16
// debugger:print u32
// check:$11 = 32
// debugger:print u64
// check:$12 = 64
// debugger:print f
// check:$13 = 1.5
// debugger:print f32
// check:$14 = 2.5
// debugger:print f64
// check:$15 = 3.5
fn main() {
let b: bool = false;
let i: int = -1;
let c: char = 'a';
let i8: i8 = 68;
let i16: i16 = -16;
let i32: i32 = -32;
let i64: i64 = -64;
let u: uint = 1;
let u8: u8 = 100;
let u16: u16 = 16;
let u32: u32 = 32;
let u64: u64 = 64;
let f: float = 1.5;
let f32: f32 = 2.5;
let f64: f64 = 3.5;
_zzz();
}
fn _zzz() {()}<|fim▁end|>
|
// check:$5 = -16
// debugger:print i32
// check:$6 = -32
|
<|file_name|>utilities_test.ts<|end_file_name|><|fim▁begin|>/*
Copyright 2021 The CloudEvents Authors
SPDX-License-Identifier: Apache-2.0
*/
import "mocha";
import { expect } from "chai";
import { isStringOrThrow, equalsOrThrow, isBase64, asData } from "../../src/event/validation";
describe("Utilities", () => {
describe("isStringOrThrow", () => {
it("should throw when is not a string", () => {
expect(isStringOrThrow.bind({}, 3.6, new Error("works!"))).to.throw("works!");
});
it("should return true when is a string", () => {
expect(isStringOrThrow("cool", new Error("not throws!"))).to.equal(true);
});
});
describe("equalsOrThrow", () => {
it("should throw when they are not equals", () => {
expect(equalsOrThrow.bind({}, "z", "a", new Error("works!"))).to.throw("works!");
});
it("should return true when they are equals", () => {
expect(equalsOrThrow("z", "z", new Error())).to.equal(true);
});
});
describe("isBase64", () => {
it("should return false when is not base64 string", () => {
const actual = isBase64("non base 64");
expect(actual).to.equal(false);
});
it("should return true when is a base64 string", () => {
const actual = isBase64("Y2xvdWRldmVudHMK");
expect(actual).to.equal(true);
});
});<|fim▁hole|>
describe("asData", () => {
it("should throw error when data is not a valid json", () => {
const data = "not a json";
expect(asData.bind({}, data, "application/json")).to.throw();
});
it("should parse string content type as string", () => {
const expected = "a string";
const actual = asData(expected, "text/plain");
expect(typeof actual).to.equal("string");
expect(actual).to.equal(expected);
});
it("should parse 'application/json' as json object", () => {
const expected = {
much: "wow",
myext: {
ext: "x04",
},
};
const actual = asData(JSON.stringify(expected), "application/json");
expect(typeof actual).to.equal("object");
expect(actual).to.deep.equal(expected);
});
it("should parse 'application/cloudevents+json' as json object", () => {
const expected = {
much: "wow",
myext: {
ext: "x04",
},
};
const actual = asData(JSON.stringify(expected), "application/cloudevents+json");
expect(typeof actual).to.equal("object");
expect(actual).to.deep.equal(expected);
});
it("should parse 'text/json' as json object", () => {
const expected = {
much: "wow",
myext: {
ext: "x04",
},
};
const actual = asData(JSON.stringify(expected), "text/json");
expect(typeof actual).to.equal("object");
expect(actual).to.deep.equal(expected);
});
});
});<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2011-2014 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from datetime import datetime
import email
import mock
import time
from swift.common import swob
from swift.common.middleware.s3api.s3api import filter_factory
from swift.common.middleware.s3api.etree import fromstring
from swift.common.middleware.s3api.utils import Config
from test.unit import debug_logger
from test.unit.common.middleware.s3api.helpers import FakeSwift
class FakeApp(object):
def __init__(self):
self.swift = FakeSwift()
def _update_s3_path_info(self, env):
"""
For S3 requests, Swift auth middleware replaces a user name in
env['PATH_INFO'] with a valid tenant id.
E.g. '/v1/test:tester/bucket/object' will become
'/v1/AUTH_test/bucket/object'. This method emulates the behavior.
"""
tenant_user = env['s3api.auth_details']['access_key']
tenant, user = tenant_user.rsplit(':', 1)
path = env['PATH_INFO']
env['PATH_INFO'] = path.replace(tenant_user, 'AUTH_' + tenant)
def __call__(self, env, start_response):
if 's3api.auth_details' in env:
self._update_s3_path_info(env)
if env['REQUEST_METHOD'] == 'TEST':
def authorize_cb(req):
# Assume swift owner, if not yet set
req.environ.setdefault('REMOTE_USER', 'authorized')
req.environ.setdefault('swift_owner', True)
# But then default to blocking authz, to ensure we've replaced
# the default auth system
return swob.HTTPForbidden(request=req)
env['swift.authorize'] = authorize_cb
return self.swift(env, start_response)
class S3ApiTestCase(unittest.TestCase):
def __init__(self, name):
unittest.TestCase.__init__(self, name)
def setUp(self):
# setup default config
self.conf = Config({
'allow_no_owner': False,
'location': 'us-east-1',
'dns_compliant_bucket_names': True,
'max_bucket_listing': 1000,
'max_parts_listing': 1000,
'max_multi_delete_objects': 1000,
's3_acl': False,
'storage_domain': 'localhost',
'auth_pipeline_check': True,
'max_upload_part_num': 1000,
'check_bucket_owner': False,
'force_swift_request_proxy_log': False,
'allow_multipart_uploads': True,
'min_segment_size': 5242880,
})
# those 2 settings has existed the original test setup
self.conf.log_level = 'debug'
self.app = FakeApp()
self.swift = self.app.swift
self.s3api = filter_factory({}, **self.conf)(self.app)
self.logger = self.s3api.logger = self.swift.logger = debug_logger()
self.swift.register('HEAD', '/v1/AUTH_test',
swob.HTTPOk, {}, None)
self.swift.register('HEAD', '/v1/AUTH_test/bucket',
swob.HTTPNoContent, {}, None)
self.swift.register('PUT', '/v1/AUTH_test/bucket',
swob.HTTPCreated, {}, None)
self.swift.register('POST', '/v1/AUTH_test/bucket',
swob.HTTPNoContent, {}, None)
self.swift.register('DELETE', '/v1/AUTH_test/bucket',
swob.HTTPNoContent, {}, None)
self.swift.register('GET', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, {'etag': 'object etag'}, "")
self.swift.register('PUT', '/v1/AUTH_test/bucket/object',
swob.HTTPCreated, {'etag': 'object etag'}, None)
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object',
swob.HTTPNoContent, {}, None)
self.mock_get_swift_info_result = {'object_versioning': {}}
for s3api_path in (
'controllers.obj',
'controllers.bucket',
'controllers.multi_delete',
'controllers.versioning',
):
patcher = mock.patch(
'swift.common.middleware.s3api.%s.get_swift_info' % s3api_path,
return_value=self.mock_get_swift_info_result)
patcher.start()
self.addCleanup(patcher.stop)
def _get_error_code(self, body):
elem = fromstring(body, 'Error')
return elem.find('./Code').text
def _get_error_message(self, body):
elem = fromstring(body, 'Error')
return elem.find('./Message').text
def _test_method_error(self, method, path, response_class, headers={},
env={}, expected_xml_tags=None):
if not path.startswith('/'):
path = '/' + path # add a missing slash before the path
uri = '/v1/AUTH_test'
if path != '/':
uri += path
self.swift.register(method, uri, response_class, headers, None)
headers.update({'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
env.update({'REQUEST_METHOD': method})
req = swob.Request.blank(path, environ=env, headers=headers)
status, headers, body = self.call_s3api(req)
if expected_xml_tags is not None:
elem = fromstring(body, 'Error')
self.assertEqual(set(expected_xml_tags),
{x.tag for x in elem})
return self._get_error_code(body)
def get_date_header(self):
# email.utils.formatdate returns utc timestamp in default
return email.utils.formatdate(time.time())
def get_v4_amz_date_header(self, when=None):
if when is None:
when = datetime.utcnow()
return when.strftime('%Y%m%dT%H%M%SZ')
<|fim▁hole|> if app is None:
app = self.app
req.headers.setdefault("User-Agent", "Mozzarella Foxfire")
status = [None]
headers = [None]
def start_response(s, h, ei=None):
status[0] = s
headers[0] = swob.HeaderKeyDict(h)
body_iter = app(req.environ, start_response)
body = b''
caught_exc = None
try:
for chunk in body_iter:
body += chunk
except Exception as exc:
if expect_exception:
caught_exc = exc
else:
raise
if expect_exception:
return status[0], headers[0], body, caught_exc
else:
return status[0], headers[0], body
def call_s3api(self, req, **kwargs):
return self.call_app(req, app=self.s3api, **kwargs)<|fim▁end|>
|
def call_app(self, req, app=None, expect_exception=False):
|
<|file_name|>125.valid_palindrome.java<|end_file_name|><|fim▁begin|>/* 125.valid_palindrome
*/
public class Solution {
public int[] twoSum(int[] nums, int target) {
for (int i = 0; i < nums.length; i++) {
int ni = nums[i];
for (int j = i + 1; j < nums.length; j++) {
int nj = nums[j];
if (ni + nj == target) {
return new int[] {i, j};
}
}
}
<|fim▁hole|> }
}<|fim▁end|>
|
throw new IllegalArgumentException("No two sum solution");
|
<|file_name|>sendemail.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright (C) 2014 Stefano Guglielmetti
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import smtplib, os, sys
from email.MIMEMultipart import MIMEMultipart
from email.MIMEBase import MIMEBase
from email.MIMEText import MIMEText
from email.Utils import COMMASPACE, formatdate
from email import Encoders
#From address, to address, subject and message body
from_address = 'EMAIL_FROM_ADDRESS'
to_address = ['EMAIL_TO_ADDRESS']
email_subject = 'Alert!!! Zombies!!! Ahead!!!'
email_body = 'An intruder has been detected and needs to be eliminated!'
# Credentials (if needed)
username = 'YOUR_EMAIL_USERNAME'
password = 'YOUR_EMAIL_PASSWORD'
# The actual mail send
server = 'smtp.gmail.com:587'
def send_mail(send_from, send_to, subject, text, files=[], server="localhost"):
assert type(send_to)==list
assert type(files)==list
msg = MIMEMultipart()
msg['From'] = send_from
msg['To'] = COMMASPACE.join(send_to)
msg['Date'] = formatdate(localtime=True)
msg['Subject'] = subject
msg.attach( MIMEText(text) )
for f in files:
part = MIMEBase('application', "octet-stream")
part.set_payload( open(f,"rb").read() )
Encoders.encode_base64(part)
part.add_header('Content-Disposition', 'attachment; filename="%s"' % os.path.basename(f))
msg.attach(part)
smtp = smtplib.SMTP(server)
smtp.starttls()<|fim▁hole|> smtp.close()
send_mail(from_address, to_address, email_subject, email_body, [sys.argv[1]], server) #the first command line argument will be used as the image file name<|fim▁end|>
|
smtp.login(username,password)
smtp.sendmail(send_from, send_to, msg.as_string())
|
<|file_name|>GridCacheBinaryTransactionalEntryProcessorDeploymentSelfTest.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.binary;
import org.apache.ignite.cache.CacheAtomicityMode;
/**
* Cache EntryProcessor + Deployment for transactional cache.
*/
public class GridCacheBinaryTransactionalEntryProcessorDeploymentSelfTest extends
GridCacheBinaryAtomicEntryProcessorDeploymentSelfTest {
/** {@inheritDoc} */
@Override protected CacheAtomicityMode atomicityMode() {
return CacheAtomicityMode.TRANSACTIONAL;<|fim▁hole|>}<|fim▁end|>
|
}
|
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>#include <QtGui/QApplication><|fim▁hole|>#include "dialog.h"
int main(int argc, char *argv[])
{
QApplication a(argc, argv);
Dialog w;
w.show();
return a.exec();
}<|fim▁end|>
| |
<|file_name|>securitygrouprule_fitask.go<|end_file_name|><|fim▁begin|>/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by ""fitask" -type=SecurityGroupRule"; DO NOT EDIT
package awstasks
import (
"encoding/json"
"k8s.io/kops/upup/pkg/fi"
)
// SecurityGroupRule
// JSON marshalling boilerplate
type realSecurityGroupRule SecurityGroupRule
func (o *SecurityGroupRule) UnmarshalJSON(data []byte) error {
var jsonName string
if err := json.Unmarshal(data, &jsonName); err == nil {
o.Name = &jsonName
return nil
}
var r realSecurityGroupRule
if err := json.Unmarshal(data, &r); err != nil {
return err
}
*o = SecurityGroupRule(r)<|fim▁hole|>
func (e *SecurityGroupRule) GetName() *string {
return e.Name
}
func (e *SecurityGroupRule) SetName(name string) {
e.Name = &name
}
func (e *SecurityGroupRule) String() string {
return fi.TaskAsString(e)
}<|fim▁end|>
|
return nil
}
var _ fi.HasName = &SecurityGroupRule{}
|
<|file_name|>cube-portfolio-2-ns.js<|end_file_name|><|fim▁begin|>(function($, window, document, undefined) {
<|fim▁hole|> wrap, filtersCallback;
/*********************************
init cubeportfolio
*********************************/
gridContainer.cubeportfolio({
layoutMode: 'grid',
rewindNav: true,
scrollByPage: false,
defaultFilter: '*',
animationType: 'slideLeft',
gapHorizontal: 0,
gapVertical: 0,
gridAdjustment: 'responsive',
mediaQueries: [{
width: 800,
cols: 2
}, {
width: 500,
cols: 2
}, {
width: 320,
cols: 1
}],
caption: 'zoom',
displayType: 'lazyLoading',
displayTypeSpeed: 100
});
/*********************************
add listener for filters
*********************************/
if (filtersContainer.hasClass('cbp-l-filters-dropdown')) {
wrap = filtersContainer.find('.cbp-l-filters-dropdownWrap');
wrap.on({
'http://htmlstream.com/preview/unify-v1.9/assets/js/plugins/cube-portfolio/mouseover.cbp': function() {
wrap.addClass('cbp-l-filters-dropdownWrap-open');
},
'http://htmlstream.com/preview/unify-v1.9/assets/js/plugins/cube-portfolio/mouseleave.cbp': function() {
wrap.removeClass('cbp-l-filters-dropdownWrap-open');
}
});
filtersCallback = function(me) {
wrap.find('.cbp-filter-item').removeClass('cbp-filter-item-active');
wrap.find('.cbp-l-filters-dropdownHeader').text(me.text());
me.addClass('cbp-filter-item-active');
wrap.trigger('http://htmlstream.com/preview/unify-v1.9/assets/js/plugins/cube-portfolio/mouseleave.cbp');
};
} else {
filtersCallback = function(me) {
me.addClass('cbp-filter-item-active').siblings().removeClass('cbp-filter-item-active');
};
}
filtersContainer.on('http://htmlstream.com/preview/unify-v1.9/assets/js/plugins/cube-portfolio/click.cbp', '.cbp-filter-item', function() {
var me = $(this);
if (me.hasClass('cbp-filter-item-active')) {
return;
}
// get cubeportfolio data and check if is still animating (reposition) the items.
if (!$.data(gridContainer[0], 'cubeportfolio').isAnimating) {
filtersCallback.call(null, me);
}
// filter the items
gridContainer.cubeportfolio('filter', me.data('filter'), function() {});
});
/*********************************
activate counter for filters
*********************************/
gridContainer.cubeportfolio('showCounter', filtersContainer.find('.cbp-filter-item'), function() {
// read from url and change filter active
var match = /#cbpf=(.*?)([#|?&]|$)/gi.exec(location.href),
item;
if (match !== null) {
item = filtersContainer.find('.cbp-filter-item').filter('[data-filter="' + match[1] + '"]');
if (item.length) {
filtersCallback.call(null, item);
}
}
});
})(jQuery, window, document);<|fim▁end|>
|
'use strict';
var gridContainer = $('#grid-container'),
filtersContainer = $('#filters-container'),
|
<|file_name|>defines.js<|end_file_name|><|fim▁begin|>// -------------------------------------------------------------------------------
// OEA Original colors
// --------------------------------------------------------------------------------
export default {
blue: "#3299BB",
lightBlue: "#88D4E1",
lighterBlue: "#D6EBF1",
white: "#fff",<|fim▁hole|> backgroundGreen:"#d1f1eb",
darkGreen: "#1abc9c",
backgroundRed: "#f7c9c4",
red: "#E74C3C",
textGrey: "#333333",
buttonFace: "buttonface",
textColor: "#555",
linkColor: "#1abc9c",
// Colors
almost_black: "#424242",
firm: "#1abc9c",
gray: "#BCBCBC",
lightergrey: "#F3F3F3",
lightgray: "#E9E9E9",
inverse: "#fff",
// Kate Colors
kateDarkOrange: "rgb(25,153,0)",
kateOrange: "rgb(255,153,0)",
kateLightOrange: "rgb(255,173,51)",
kateLightBlue: "#A6F2FF",
// MIT OCW colors
mitocwRed: "#D20035",
mitocwDarkBrown: "#574B49",
mitocwMediumBrown: "#775C57",
mitocwLightBrown: "#E7E0DB",
mitocwDarkGray: "#363636",
mitocwLightGray: "#999",
// pony
ponyLightGray: "#EDEDED",
ponyDarkGray: "#BFBFBF",
ponyLightBlue: "#8FF1FF",
ponyLightPurple: "#770073",
ponyPink: "#FF002C",
ponyOrange: "#FF6728",
ponyYellow: "#FFF8B0",
ponyLightGreen: "#82D771",
ponyGreen: "#44C635",
ponyBlue: "#008DCF",
ponyPurple: "#A756A3",
// Lumen Learing
lumenSeafoam: "#108043",
lumenDarkGreen: "#003136",
lumenBlue: "#1e74d1",
lumenRed: "#ad4646",
// Twitter bootstrap overrides
navbarInverseBg: "#424242",
navbarInverseLinkColor: "#E9E9E9",
navbarInverseLinkHoverColor: "rgb(255,173,51)",
navbarInverseLinkHoverBg: "#424242",
navbarInverseLinkActiveColor: "rgb(255,173,51)",
navbarInverseLinkActiveBg: "#E9E9E9",
inputColor: "#4d4d4d",
}<|fim▁end|>
|
black: "#000",
backgroundGray: "#E9E9E9",
|
<|file_name|>_version.py<|end_file_name|><|fim▁begin|># Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|># distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Package version for dm_memorytasks.
Kept in separate file so it can be used during installation.
"""
__version__ = '1.0.3' # https://www.python.org/dev/peps/pep-0440/<|fim▁end|>
|
#
# Unless required by applicable law or agreed to in writing, software
|
<|file_name|>jquery.booleanEditor.js<|end_file_name|><|fim▁begin|>// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle. If not, see <http://www.gnu.org/licenses/>.
/**
* @package enrol_attributes
* @author Julien Furrer <[email protected]>
* @copyright 2012-2015 Université de Lausanne (@link http://www.unil.ch}
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
;
(function ($) {
$.booleanEditor = {
defaults: {
rules: [],
change: null
},
paramList: M.enrol_attributes.paramList,
operatorList: [
{label: " = ", value: "=="},
// {label: "=/=", value: "!="},
// {label: "contains", value: "contains"},
]
};
$.fn.extend({
booleanEditor: function (options) {
var isMethodCall = (typeof options == 'string'), // is it a method call or booleanEditor instantiation ?
args = Array.prototype.slice.call(arguments, 1);
if (isMethodCall) switch (options) {
case 'serialize':
var mode = ( args[0] ) ? args[0].mode : '',
ser_obj = serialize(this);
switch (mode) {
case 'json':
return $.toJSON(ser_obj);
break;
case 'object':
default:
return ser_obj;
}
break;
case 'getExpression':
return getBooleanExpression(this);
break;
default:
return;
}
settings = $.extend({}, $.booleanEditor.defaults, options);
return this.each(function () {
if (settings.change) {
$(this).data('change', settings.change)
}
$(this)
.addClass("enrol-attributes-boolean-editor")
.append(createRuleList($('<ul></ul>'), settings.rules));
changed(this);
});
}
});
function serialize(root_elem) {
var ser_obj = {rules: []};
var group_c_op = $("select:first[name='cond-operator']", root_elem).val();
if (group_c_op)
ser_obj.cond_op = group_c_op;
$("ul:first > li", root_elem).each(function () {
r = $(this);
if (r.hasClass('group')) {
ser_obj['rules'].push(serialize(this));
}
else {
var cond_obj = {
param: $("select[name='comparison-param'] option:selected", r).val(),
comp_op: $("select[name='comparison-operator']", r).val(),
value: $("input[name='value']", r).val()
};
var cond_op = $("select[name='cond-operator']", r).val();
if (cond_op)
cond_obj.cond_op = cond_op;
ser_obj['rules'].push(cond_obj);
}
});
return ser_obj;
}
function getBooleanExpression(editor) {
var expression = "";
$("ul:first > li", editor).each(function () {
r = $(this);
var c_op = $("select[name='cond-operator']", r).val();
if (c_op != undefined) c_op = '<span class="cond-op"> ' + c_op + ' </span>';
if (r.hasClass('group')) {
expression += c_op + '<span class="group-op group-group">(</span>' + getBooleanExpression(this) + '<span class="group-op group-group">)</span>';
}
else {
expression += [
c_op,
'<span class="group-op group-cond">(</span>',
'<span class="comp-param">' + $("select[name='comparison-param'] option:selected", r).text() + '</span>',
'<span class="comp-op"> ' + $("select[name='comparison-operator']", r).val() + ' </span>',
'<span class="comp-val">' + '\'' + $("input[name='value']", r).val() + '\'' + '</span>',
'<span class="group-op group-cond">)</span>'
].join("");
}
});
return expression;
}
function changed(o) {
$o = $(o);
if (!$o.hasClass('enrol-attributes-boolean-editor')) {
$o = $o.parents('.enrol-attributes-boolean-editor').eq(0);
}
if ($o.data('change')) {
$o.data('change').apply($o.get(0));
}
}
function createRuleList(list_elem, rules) {
//var list_elem = $(list_elem);
if (list_elem.parent("li").eq(0).hasClass("group")) {
console.log("inside a group");
return;
}
if (rules.length == 0) {
// No rules, create a new one
list_elem.append(getRuleConditionElement({first: true}));
} else {
// Read all rules
for (var r_idx = 0; r_idx < rules.length; r_idx++) {
var r = rules[r_idx];
r['first'] = (r_idx == 0);
// If the rule is an array, create a group of rules
if (r.rules && (typeof r.rules[0] == 'object')) {
r.group = true;
var rg = getRuleConditionElement(r);
list_elem.append(rg);
createRuleList($("ul:first", rg), r.rules);
}
else {
list_elem.append(getRuleConditionElement(r));
}
}
}
return list_elem;
};
<|fim▁hole|> function getRuleConditionElement(config) {
config = $.extend({},
{
first: false,
group: false,
cond_op: null,
param: null,
comp_op: null,
value: ''
},
config
);
// If group flag is set, wrap content with <ul></ul>, content is obtained by a recursive call
// to the function, passing a copy of config with flag group set to false
var cond_block_content = $('<div class="sre-condition-box"></div>');
if (config.group) {
cond_block_content.append('<ul></ul>');
} else {
cond_block_content
.append(makeSelectList({ // The list of parameters to be compared
name: 'comparison-param',
params: $.booleanEditor.paramList,
selected_value: config.param
}).addClass("comp-param"))
.append($('<span>').addClass("comp-op").text('='))
// .append( makeSelectList({ // The comparison operator
// name: 'comparison-operator',
// params: $.booleanEditor.operatorList,
// selected_value: config.comp_op
// }).addClass("comp-op"))
.append($('<input type="text" name="value" value="' + config.value + '"/>')
.change(function () {
changed(this)
})
); // The value of the comparions
}
var ruleConditionElement = $('<li></li>')
.addClass((config.group) ? 'group' : 'rule')
.append(createRuleOperatorSelect(config))
.append(cond_block_content)
.append(createButtonPannel())
return ruleConditionElement;
};
function createRuleOperatorSelect(config) {
return (config.first) ? '' :
makeSelectList({
'name': 'cond-operator',
params: [
{label: 'AND', value: 'and'},
{label: 'OR', value: 'or'}
],
selected_value: config.cond_op
}).addClass('sre-condition-rule-operator');
}
function createButtonPannel() {
var buttonPannel = $('<div class="button-pannel"></div>')
.append($('<button type="button" class="button-add-cond">'+ M.util.get_string('addcondition', 'enrol_attributes') +'</button>')
.click(function () {
addNewConditionAfter($(this).parents('li').get(0));
})
)
.append($('<button type="button" class="button-add-group">'+ M.util.get_string('addgroup', 'enrol_attributes') +'</button>')
.click(function () {
addNewGroupAfter($(this).parents('li').get(0));
})
)
.append($('<button type="button" class="button-del-cond">'+ M.util.get_string('deletecondition', 'enrol_attributes') +'</button>')
.click(function () {
deleteCondition($(this).parents('li').eq(0));
})
);
$('button', buttonPannel).each(function () {
$(this)
.focus(function () {
this.blur()
})
.attr("title", $(this).text())
.wrapInner('<span/>');
});
return buttonPannel;
}
function makeSelectList(config) {
config = $.extend({},
{
name: 'list_name',
params: [{label: 'label', value: 'value'}],
selected_value: null
},
config);
var selectList = $('<select name="' + config.name + '"></select>')
.change(function () {
changed(this);
});
$.each(config.params, function (i, p) {
var p_obj = $('<option></option>')
.attr({label: p.label, value: p.value})
.text(p.label);
if (p.value == config.selected_value) {
p_obj.attr("selected", "selected");
}
p_obj.appendTo(selectList);
});
return selectList;
}
//
// -->> Conditions manipulation <<--
//
function addNewConditionAfter(elem, config) {
getRuleConditionElement(config)
.hide()
.insertAfter(elem)
.fadeIn("normal", function () {
changed(elem)
});
}
function addNewGroupAfter(elem, config) {
getRuleConditionElement({group: true})
.hide()
.insertAfter(elem)
.find("ul:first")
.append(getRuleConditionElement($.extend({}, config, {first: true})))
.end()
.fadeIn("normal", function () {
changed(elem)
});
}
/*
*
* Supprimer une condition : supprimer éventuellement le parent si dernier enfant,
* mettre à jour le parent dans tous les cas.
*
*/
function deleteCondition(elem) {
if (elem.parent().parent().hasClass('enrol-attributes-boolean-editor')) {
// Level 1
if (elem.siblings().length == 0) {
return;
}
} else {
// Higher level
if (elem.siblings().length == 0) {
// The last cond of the group, target the group itself, to be removed
elem = elem.parents('li').eq(0);
}
}
p = elem.parent();
elem.fadeOut("normal", function () {
$(this).remove();
$("li:first .sre-condition-rule-operator", ".enrol-attributes-boolean-editor ul").remove();
changed(p);
});
}
})(jQuery);<|fim▁end|>
|
/**
* Build the HTML code for editing a rule condition.
* A rule is composed of one or more rule conditions linked by boolean operators
*/
|
<|file_name|>books.js<|end_file_name|><|fim▁begin|>"use strict";
module.exports = function (app) {
app.route('/book')
.get(function (req, res) {
res.send('Get a random book');<|fim▁hole|> res.send('Add a book');
})
.put(function (req, res) {
res.send('Update the book');
});
}<|fim▁end|>
|
})
.post(function (req, res) {
|
<|file_name|>tensor.rs<|end_file_name|><|fim▁begin|>use std::ffi::CString;
use std::fmt;
use std::ptr::{self, NonNull};
use dynet_sys;
use super::{ApiResult, Dim, Result, Wrap};
/// A struct to represent a tensor.
///
/// # Examples
///
/// ```
/// # use dynet::{DynetParams, ParameterCollection, ParameterInitGlorot};
/// dynet::initialize(&mut DynetParams::from_args(false));
///
/// let mut m = ParameterCollection::new();
///
/// let initializer = ParameterInitGlorot::default();
/// let mut p_W = m.add_parameters([8, 2], &initializer);
/// let t_W = p_W.values();
/// println!("parameter W: dim={}, values=\n[\n{}\n]", t_W.dim(), t_W);
/// let v_W = t_W.as_vector();
/// ```
#[derive(Debug)]
pub struct Tensor {
inner: NonNull<dynet_sys::dynetTensor_t>,
owned: bool,
}
impl_wrap!(Tensor, dynetTensor_t);
impl_drop!(Tensor, dynetDeleteTensor);
impl Tensor {
/// Returns the dim of the tensor.
pub fn dim(&self) -> Dim {
unsafe {
let mut dim_ptr: *mut dynet_sys::dynetDim_t = ptr::null_mut();
check_api_status!(dynet_sys::dynetGetTensorDim(self.as_ptr(), &mut dim_ptr));
Dim::from_raw(dim_ptr, true)
}
}
/// Retrieves one internal value in the tensor.
///
/// # Panics
///
/// Panics if the tensor has more than one element.
pub fn as_scalar(&self) -> f32 {
unsafe {
let mut retval: f32 = 0.0;
check_api_status!(dynet_sys::dynetEvaluateTensorAsScalar(
self.as_ptr(),
&mut retval,
));
retval
}
}
/// Retrieves internal values in the tensor as a vector.
///
/// For higher order tensors this returns the flattened value.
pub fn as_vector(&self) -> Vec<f32> {
unsafe {
let mut size: usize = 0;
check_api_status!(dynet_sys::dynetEvaluateTensorAsArray(
self.as_ptr(),
ptr::null_mut(),
&mut size,
));
let mut retval = vec![0f32; size];
check_api_status!(dynet_sys::dynetEvaluateTensorAsArray(<|fim▁hole|> self.as_ptr(),
retval.as_mut_ptr(),
&mut size,
));
retval
}
}
}
impl fmt::Display for Tensor {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
unsafe {
let mut size: usize = 0;
check_api_status!(dynet_sys::dynetRepresentTensorAsString(
self.as_ptr(),
ptr::null_mut(),
&mut size,
));
let buffer = CString::new(vec![b'0'; size]).unwrap().into_raw();
check_api_status!(dynet_sys::dynetRepresentTensorAsString(
self.as_ptr(),
buffer,
&mut size,
));
f.write_str(CString::from_raw(buffer).to_str().unwrap())
}
}
}<|fim▁end|>
| |
<|file_name|>add-contributor.js<|end_file_name|><|fim▁begin|>import path from 'path'
import inquirer from 'inquirer'
import downloadTwitterPhoto from './utils/download-twitter-photo'
inquirer.prompt([<|fim▁hole|> type: 'input',
message: 'Twitter handle?',
},
]).then(({twitter}) => {
const destinationPath = path.join(process.cwd(), 'data/contributors')
downloadTwitterPhoto(twitter, destinationPath)
})<|fim▁end|>
|
{
name: 'twitter',
|
<|file_name|>confirmations.rs<|end_file_name|><|fim▁begin|>// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Types used in Confirmations queue (Trusted Signer)
use std::fmt;
use serde::{Serialize, Serializer};
use v1::types::{U256, TransactionRequest, RichRawTransaction, H160, H256, H520, Bytes};
use v1::helpers;
/// Confirmation waiting in a queue
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize)]
pub struct ConfirmationRequest {
/// Id of this confirmation
pub id: U256,
/// Payload
pub payload: ConfirmationPayload,
}
impl From<helpers::ConfirmationRequest> for ConfirmationRequest {
fn from(c: helpers::ConfirmationRequest) -> Self {
ConfirmationRequest {
id: c.id.into(),
payload: c.payload.into(),
}
}
}
/// Sign request
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize)]
pub struct SignRequest {
/// Address
pub address: H160,
/// Hash to sign
pub hash: H256,
}
impl From<(H160, H256)> for SignRequest {
fn from(tuple: (H160, H256)) -> Self {
SignRequest {
address: tuple.0,
hash: tuple.1,
}
}
}
/// Decrypt request
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize)]
pub struct DecryptRequest {
/// Address
pub address: H160,
/// Message to decrypt
pub msg: Bytes,
}
impl From<(H160, Bytes)> for DecryptRequest {
fn from(tuple: (H160, Bytes)) -> Self {
DecryptRequest {
address: tuple.0,
msg: tuple.1,
}
}
}
/// Confirmation response for particular payload<|fim▁hole|> /// Transaction Hash
SendTransaction(H256),
/// Transaction RLP
SignTransaction(RichRawTransaction),
/// Signature
Signature(H520),
/// Decrypted data
Decrypt(Bytes),
}
impl Serialize for ConfirmationResponse {
fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>
where S: Serializer
{
match *self {
ConfirmationResponse::SendTransaction(ref hash) => hash.serialize(serializer),
ConfirmationResponse::SignTransaction(ref rlp) => rlp.serialize(serializer),
ConfirmationResponse::Signature(ref signature) => signature.serialize(serializer),
ConfirmationResponse::Decrypt(ref data) => data.serialize(serializer),
}
}
}
/// Confirmation payload, i.e. the thing to be confirmed
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize)]
pub enum ConfirmationPayload {
/// Send Transaction
#[serde(rename="transaction")]
SendTransaction(TransactionRequest),
/// Sign Transaction
#[serde(rename="transaction")]
SignTransaction(TransactionRequest),
/// Signature
#[serde(rename="sign")]
Signature(SignRequest),
/// Decryption
#[serde(rename="decrypt")]
Decrypt(DecryptRequest),
}
impl From<helpers::ConfirmationPayload> for ConfirmationPayload {
fn from(c: helpers::ConfirmationPayload) -> Self {
match c {
helpers::ConfirmationPayload::SendTransaction(t) => ConfirmationPayload::SendTransaction(t.into()),
helpers::ConfirmationPayload::SignTransaction(t) => ConfirmationPayload::SignTransaction(t.into()),
helpers::ConfirmationPayload::Signature(address, hash) => ConfirmationPayload::Signature(SignRequest {
address: address.into(),
hash: hash.into(),
}),
helpers::ConfirmationPayload::Decrypt(address, msg) => ConfirmationPayload::Decrypt(DecryptRequest {
address: address.into(),
msg: msg.into(),
}),
}
}
}
/// Possible modifications to the confirmed transaction sent by `Trusted Signer`
#[derive(Debug, PartialEq, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct TransactionModification {
/// Modified gas price
#[serde(rename="gasPrice")]
pub gas_price: Option<U256>,
}
/// Represents two possible return values.
#[derive(Debug, Clone)]
pub enum Either<A, B> where
A: fmt::Debug + Clone,
B: fmt::Debug + Clone,
{
/// Primary value
Either(A),
/// Secondary value
Or(B),
}
impl<A, B> From<A> for Either<A, B> where
A: fmt::Debug + Clone,
B: fmt::Debug + Clone,
{
fn from(a: A) -> Self {
Either::Either(a)
}
}
impl<A, B> Serialize for Either<A, B> where
A: Serialize + fmt::Debug + Clone,
B: Serialize + fmt::Debug + Clone,
{
fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>
where S: Serializer
{
match *self {
Either::Either(ref a) => a.serialize(serializer),
Either::Or(ref b) => b.serialize(serializer),
}
}
}
#[cfg(test)]
mod tests {
use std::str::FromStr;
use serde_json;
use v1::types::U256;
use v1::helpers;
use super::*;
#[test]
fn should_serialize_sign_confirmation() {
// given
let request = helpers::ConfirmationRequest {
id: 15.into(),
payload: helpers::ConfirmationPayload::Signature(1.into(), 5.into()),
};
// when
let res = serde_json::to_string(&ConfirmationRequest::from(request));
let expected = r#"{"id":"0xf","payload":{"sign":{"address":"0x0000000000000000000000000000000000000001","hash":"0x0000000000000000000000000000000000000000000000000000000000000005"}}}"#;
// then
assert_eq!(res.unwrap(), expected.to_owned());
}
#[test]
fn should_serialize_transaction_confirmation() {
// given
let request = helpers::ConfirmationRequest {
id: 15.into(),
payload: helpers::ConfirmationPayload::SendTransaction(helpers::FilledTransactionRequest {
from: 0.into(),
to: None,
gas: 15_000.into(),
gas_price: 10_000.into(),
value: 100_000.into(),
data: vec![1, 2, 3],
nonce: Some(1.into()),
}),
};
// when
let res = serde_json::to_string(&ConfirmationRequest::from(request));
let expected = r#"{"id":"0xf","payload":{"transaction":{"from":"0x0000000000000000000000000000000000000000","to":null,"gasPrice":"0x2710","gas":"0x3a98","value":"0x186a0","data":"0x010203","nonce":"0x1"}}}"#;
// then
assert_eq!(res.unwrap(), expected.to_owned());
}
#[test]
fn should_deserialize_modification() {
// given
let s1 = r#"{
"gasPrice":"0xba43b7400"
}"#;
let s2 = r#"{}"#;
// when
let res1: TransactionModification = serde_json::from_str(s1).unwrap();
let res2: TransactionModification = serde_json::from_str(s2).unwrap();
// then
assert_eq!(res1, TransactionModification {
gas_price: Some(U256::from_str("0ba43b7400").unwrap()),
});
assert_eq!(res2, TransactionModification {
gas_price: None,
});
}
}<|fim▁end|>
|
#[derive(Debug, Clone, PartialEq)]
pub enum ConfirmationResponse {
|
<|file_name|>sparse_nndescent.py<|end_file_name|><|fim▁begin|># Author: Leland McInnes <[email protected]>
# Enough simple sparse operations in numba to enable sparse UMAP
#
# License: BSD 3 clause
from __future__ import print_function
import locale
import numpy as np
import numba
from pynndescent.utils import (
tau_rand_int,
make_heap,
new_build_candidates,
deheap_sort,
checked_flagged_heap_push,
apply_graph_updates_high_memory,
apply_graph_updates_low_memory,
)
from pynndescent.sparse import sparse_euclidean
locale.setlocale(locale.LC_NUMERIC, "C")
EMPTY_GRAPH = make_heap(1, 1)
@numba.njit(parallel=True, cache=True)
def generate_leaf_updates(leaf_block, dist_thresholds, inds, indptr, data, dist):
updates = [[(-1, -1, np.inf)] for i in range(leaf_block.shape[0])]
for n in numba.prange(leaf_block.shape[0]):
for i in range(leaf_block.shape[1]):
p = leaf_block[n, i]
if p < 0:
break
for j in range(i + 1, leaf_block.shape[1]):
q = leaf_block[n, j]
if q < 0:
break
from_inds = inds[indptr[p] : indptr[p + 1]]
from_data = data[indptr[p] : indptr[p + 1]]
to_inds = inds[indptr[q] : indptr[q + 1]]
to_data = data[indptr[q] : indptr[q + 1]]
d = dist(from_inds, from_data, to_inds, to_data)
if d < dist_thresholds[p] or d < dist_thresholds[q]:
updates[n].append((p, q, d))
return updates
@numba.njit(locals={"d": numba.float32, "p": numba.int32, "q": numba.int32}, cache=True)
def init_rp_tree(inds, indptr, data, dist, current_graph, leaf_array):
n_leaves = leaf_array.shape[0]
block_size = 65536
n_blocks = n_leaves // block_size
for i in range(n_blocks + 1):
block_start = i * block_size
block_end = min(n_leaves, (i + 1) * block_size)
leaf_block = leaf_array[block_start:block_end]
dist_thresholds = current_graph[1][:, 0]
updates = generate_leaf_updates(
leaf_block, dist_thresholds, inds, indptr, data, dist
)
for j in range(len(updates)):
for k in range(len(updates[j])):
p, q, d = updates[j][k]
if p == -1 or q == -1:
continue
checked_flagged_heap_push(
current_graph[1][p],
current_graph[0][p],
current_graph[2][p],
d,
q,
np.uint8(1),
)
checked_flagged_heap_push(
current_graph[1][q],
current_graph[0][q],
current_graph[2][q],
d,
p,
np.uint8(1),
)
@numba.njit(
fastmath=True,
locals={"d": numba.float32, "i": numba.int32, "idx": numba.int32},
cache=True,
)
def init_random(n_neighbors, inds, indptr, data, heap, dist, rng_state):
n_samples = indptr.shape[0] - 1
for i in range(n_samples):
if heap[0][i, 0] < 0.0:
for j in range(n_neighbors - np.sum(heap[0][i] >= 0.0)):
idx = np.abs(tau_rand_int(rng_state)) % n_samples
from_inds = inds[indptr[idx] : indptr[idx + 1]]
from_data = data[indptr[idx] : indptr[idx + 1]]
to_inds = inds[indptr[i] : indptr[i + 1]]
to_data = data[indptr[i] : indptr[i + 1]]
d = dist(from_inds, from_data, to_inds, to_data)
checked_flagged_heap_push(
heap[1][i], heap[0][i], heap[2][i], d, idx, np.uint8(1)
)
return
@numba.njit(parallel=True, cache=True)
def generate_graph_updates(
new_candidate_block, old_candidate_block, dist_thresholds, inds, indptr, data, dist
):
block_size = new_candidate_block.shape[0]
updates = [[(-1, -1, np.inf)] for i in range(block_size)]
max_candidates = new_candidate_block.shape[1]
for i in numba.prange(block_size):
for j in range(max_candidates):
p = int(new_candidate_block[i, j])
if p < 0:
continue
for k in range(j, max_candidates):
q = int(new_candidate_block[i, k])
if q < 0:
continue
from_inds = inds[indptr[p] : indptr[p + 1]]
from_data = data[indptr[p] : indptr[p + 1]]
to_inds = inds[indptr[q] : indptr[q + 1]]
to_data = data[indptr[q] : indptr[q + 1]]
d = dist(from_inds, from_data, to_inds, to_data)
if d <= dist_thresholds[p] or d <= dist_thresholds[q]:
updates[i].append((p, q, d))
for k in range(max_candidates):
q = int(old_candidate_block[i, k])
if q < 0:
continue
from_inds = inds[indptr[p] : indptr[p + 1]]
from_data = data[indptr[p] : indptr[p + 1]]
to_inds = inds[indptr[q] : indptr[q + 1]]
to_data = data[indptr[q] : indptr[q + 1]]
d = dist(from_inds, from_data, to_inds, to_data)
if d <= dist_thresholds[p] or d <= dist_thresholds[q]:
updates[i].append((p, q, d))
return updates
@numba.njit()
def nn_descent_internal_low_memory_parallel(
current_graph,
inds,
indptr,
data,
n_neighbors,
rng_state,
max_candidates=50,
dist=sparse_euclidean,
n_iters=10,
delta=0.001,
verbose=False,
):
n_vertices = indptr.shape[0] - 1
block_size = 16384
n_blocks = n_vertices // block_size
n_threads = numba.get_num_threads()
for n in range(n_iters):
if verbose:
print("\t", n + 1, " / ", n_iters)
(new_candidate_neighbors, old_candidate_neighbors) = new_build_candidates(
current_graph, max_candidates, rng_state, n_threads
)
c = 0
for i in range(n_blocks + 1):
block_start = i * block_size
block_end = min(n_vertices, (i + 1) * block_size)
new_candidate_block = new_candidate_neighbors[block_start:block_end]
old_candidate_block = old_candidate_neighbors[block_start:block_end]
dist_thresholds = current_graph[1][:, 0]
updates = generate_graph_updates(
new_candidate_block,
old_candidate_block,
dist_thresholds,
inds,
indptr,
data,
dist,
)
c += apply_graph_updates_low_memory(current_graph, updates, n_threads)
if c <= delta * n_neighbors * n_vertices:
if verbose:
print("\tStopping threshold met -- exiting after", n + 1, "iterations")
return
@numba.njit()
def nn_descent_internal_high_memory_parallel(
current_graph,
inds,
indptr,
data,
n_neighbors,
rng_state,
max_candidates=50,
dist=sparse_euclidean,
n_iters=10,
delta=0.001,
verbose=False,
):
n_vertices = indptr.shape[0] - 1
block_size = 16384
n_blocks = n_vertices // block_size
n_threads = numba.get_num_threads()<|fim▁hole|> in_graph = [
set(current_graph[0][i].astype(np.int64))
for i in range(current_graph[0].shape[0])
]
for n in range(n_iters):
if verbose:
print("\t", n + 1, " / ", n_iters)
(new_candidate_neighbors, old_candidate_neighbors) = new_build_candidates(
current_graph, max_candidates, rng_state, n_threads
)
c = 0
for i in range(n_blocks + 1):
block_start = i * block_size
block_end = min(n_vertices, (i + 1) * block_size)
new_candidate_block = new_candidate_neighbors[block_start:block_end]
old_candidate_block = old_candidate_neighbors[block_start:block_end]
dist_thresholds = current_graph[1][:, 0]
updates = generate_graph_updates(
new_candidate_block,
old_candidate_block,
dist_thresholds,
inds,
indptr,
data,
dist,
)
c += apply_graph_updates_high_memory(current_graph, updates, in_graph)
if c <= delta * n_neighbors * n_vertices:
if verbose:
print("\tStopping threshold met -- exiting after", n + 1, "iterations")
return
@numba.njit()
def nn_descent(
inds,
indptr,
data,
n_neighbors,
rng_state,
max_candidates=50,
dist=sparse_euclidean,
n_iters=10,
delta=0.001,
init_graph=EMPTY_GRAPH,
rp_tree_init=True,
leaf_array=None,
low_memory=False,
verbose=False,
):
n_samples = indptr.shape[0] - 1
if init_graph[0].shape[0] == 1: # EMPTY_GRAPH
current_graph = make_heap(n_samples, n_neighbors)
if rp_tree_init:
init_rp_tree(inds, indptr, data, dist, current_graph, leaf_array)
init_random(n_neighbors, inds, indptr, data, current_graph, dist, rng_state)
elif init_graph[0].shape[0] == n_samples and init_graph[0].shape[1] == n_neighbors:
current_graph = init_graph
else:
raise ValueError("Invalid initial graph specified!")
if low_memory:
nn_descent_internal_low_memory_parallel(
current_graph,
inds,
indptr,
data,
n_neighbors,
rng_state,
max_candidates=max_candidates,
dist=dist,
n_iters=n_iters,
delta=delta,
verbose=verbose,
)
else:
nn_descent_internal_high_memory_parallel(
current_graph,
inds,
indptr,
data,
n_neighbors,
rng_state,
max_candidates=max_candidates,
dist=dist,
n_iters=n_iters,
delta=delta,
verbose=verbose,
)
return deheap_sort(current_graph[0], current_graph[1])<|fim▁end|>
| |
<|file_name|>isolatedconnectedimagefilter.cpp<|end_file_name|><|fim▁begin|>/***********************************************************************************
* *
* Voreen - The Volume Rendering Engine *
* *
* Copyright (C) 2005-2013 University of Muenster, Germany. *
* Visualization and Computer Graphics Group <http://viscg.uni-muenster.de> *
* For a list of authors please refer to the file "CREDITS.txt". *
* *
* This file is part of the Voreen software package. Voreen is free software: *
* you can redistribute it and/or modify it under the terms of the GNU General *
* Public License version 2 as published by the Free Software Foundation. *
* *
* Voreen is distributed in the hope that it will be useful, but WITHOUT ANY *
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR *
* A PARTICULAR PURPOSE. See the GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License in the file *
* "LICENSE.txt" along with this file. If not, see <http://www.gnu.org/licenses/>. *
* *
* For non-commercial academic use see the license exception specified in the file *
* "LICENSE-academic.txt". To get information about commercial licensing please *
* contact the authors. *<|fim▁hole|> ***********************************************************************************/
#include "isolatedconnectedimagefilter.h"
#include "voreen/core/datastructures/volume/volumeram.h"
#include "voreen/core/datastructures/volume/volume.h"
#include "voreen/core/datastructures/volume/volumeatomic.h"
#include "voreen/core/ports/conditions/portconditionvolumetype.h"
#include "modules/itk/utils/itkwrapper.h"
#include "voreen/core/datastructures/volume/operators/volumeoperatorconvert.h"
#include "itkImage.h"
#include "itkIsolatedConnectedImageFilter.h"
#include <iostream>
namespace voreen {
const std::string IsolatedConnectedImageFilterITK::loggerCat_("voreen.IsolatedConnectedImageFilterITK");
IsolatedConnectedImageFilterITK::IsolatedConnectedImageFilterITK()
: ITKProcessor(),
inport1_(Port::INPORT, "InputImage"),
outport1_(Port::OUTPORT, "OutputImage"),
seedPointPort1_(Port::INPORT, "seedPointInput1"),
seedPointPort2_(Port::INPORT, "seedPointInput2"),
enableProcessing_("enabled", "Enable", false),
replaceValue_("replaceValue", "ReplaceValue"),
isolatedValueTolerance_("isolatedValueTolerance", "IsolatedValueTolerance"),
upper_("upper", "Upper"),
lower_("lower", "Lower"),
findUpperThreshold_("findUpperThreshold", "FindUpperThreshold", false)
{
addPort(inport1_);
PortConditionLogicalOr* orCondition1 = new PortConditionLogicalOr();
orCondition1->addLinkedCondition(new PortConditionVolumeTypeUInt8());
orCondition1->addLinkedCondition(new PortConditionVolumeTypeInt8());
orCondition1->addLinkedCondition(new PortConditionVolumeTypeUInt16());
orCondition1->addLinkedCondition(new PortConditionVolumeTypeInt16());
orCondition1->addLinkedCondition(new PortConditionVolumeTypeUInt32());
orCondition1->addLinkedCondition(new PortConditionVolumeTypeInt32());
orCondition1->addLinkedCondition(new PortConditionVolumeTypeFloat());
orCondition1->addLinkedCondition(new PortConditionVolumeTypeDouble());
inport1_.addCondition(orCondition1);
addPort(outport1_);
addPort(seedPointPort1_);
addPort(seedPointPort2_);
addProperty(enableProcessing_);
addProperty(replaceValue_);
addProperty(isolatedValueTolerance_);
addProperty(upper_);
addProperty(lower_);
addProperty(findUpperThreshold_);
}
Processor* IsolatedConnectedImageFilterITK::create() const {
return new IsolatedConnectedImageFilterITK();
}
template<class T>
void IsolatedConnectedImageFilterITK::isolatedConnectedImageFilterITK() {
replaceValue_.setVolume(inport1_.getData());
isolatedValueTolerance_.setVolume(inport1_.getData());
upper_.setVolume(inport1_.getData());
lower_.setVolume(inport1_.getData());
if (!enableProcessing_.get()) {
outport1_.setData(inport1_.getData(), false);
return;
}
typedef itk::Image<T, 3> InputImageType1;
typedef itk::Image<T, 3> OutputImageType1;
typename InputImageType1::Pointer p1 = voreenToITK<T>(inport1_.getData());
//Filter define
typedef itk::IsolatedConnectedImageFilter<InputImageType1, OutputImageType1> FilterType;
typename FilterType::Pointer filter = FilterType::New();
filter->SetInput(p1);
if (seedPointPort1_.hasChanged()) {
const PointListGeometry<tgt::vec3>* pointList1 = dynamic_cast< const PointListGeometry<tgt::vec3>* >(seedPointPort1_.getData());
if (pointList1) {
seedPoints1 = pointList1->getData();
}
}
filter->ClearSeeds1();
typename InputImageType1::IndexType seed1;
for (size_t i = 0; i < seedPoints1.size(); i++) {
seed1[0] = seedPoints1[i].x;
seed1[1] = seedPoints1[i].y;
seed1[2] = seedPoints1[i].z;
filter->AddSeed1(seed1);
}
if (seedPointPort2_.hasChanged()) {
const PointListGeometry<tgt::vec3>* pointList2 = dynamic_cast< const PointListGeometry<tgt::vec3>* >(seedPointPort2_.getData());
if (pointList2) {
seedPoints2 = pointList2->getData();
}
}
filter->ClearSeeds2();
typename InputImageType1::IndexType seed2;
for (size_t i = 0; i < seedPoints2.size(); i++) {
seed2[0] = seedPoints2[i].x;
seed2[1] = seedPoints2[i].y;
seed2[2] = seedPoints2[i].z;
filter->AddSeed2(seed2);
}
filter->SetReplaceValue(replaceValue_.getValue<T>());
filter->SetIsolatedValueTolerance(isolatedValueTolerance_.getValue<T>());
filter->SetUpper(upper_.getValue<T>());
filter->SetLower(lower_.getValue<T>());
filter->SetFindUpperThreshold(findUpperThreshold_.get());
observe(filter.GetPointer());
try
{
filter->Update();
}
catch (itk::ExceptionObject &e)
{
LERROR(e);
}
Volume* outputVolume1 = 0;
outputVolume1 = ITKToVoreenCopy<T>(filter->GetOutput());
if (outputVolume1) {
transferRWM(inport1_.getData(), outputVolume1);
transferTransformation(inport1_.getData(), outputVolume1);
outport1_.setData(outputVolume1);
} else
outport1_.setData(0);
}
void IsolatedConnectedImageFilterITK::process() {
const VolumeBase* inputHandle1 = inport1_.getData();
const VolumeRAM* inputVolume1 = inputHandle1->getRepresentation<VolumeRAM>();
if (dynamic_cast<const VolumeRAM_UInt8*>(inputVolume1)) {
isolatedConnectedImageFilterITK<uint8_t>();
}
else if (dynamic_cast<const VolumeRAM_Int8*>(inputVolume1)) {
isolatedConnectedImageFilterITK<int8_t>();
}
else if (dynamic_cast<const VolumeRAM_UInt16*>(inputVolume1)) {
isolatedConnectedImageFilterITK<uint16_t>();
}
else if (dynamic_cast<const VolumeRAM_Int16*>(inputVolume1)) {
isolatedConnectedImageFilterITK<int16_t>();
}
else if (dynamic_cast<const VolumeRAM_UInt32*>(inputVolume1)) {
isolatedConnectedImageFilterITK<uint32_t>();
}
else if (dynamic_cast<const VolumeRAM_Int32*>(inputVolume1)) {
isolatedConnectedImageFilterITK<int32_t>();
}
else if (dynamic_cast<const VolumeRAM_Float*>(inputVolume1)) {
isolatedConnectedImageFilterITK<float>();
}
else if (dynamic_cast<const VolumeRAM_Double*>(inputVolume1)) {
isolatedConnectedImageFilterITK<double>();
}
else {
LERROR("Inputformat of Volume 1 is not supported!");
}
}
} // namespace<|fim▁end|>
|
* *
|
<|file_name|>client.js<|end_file_name|><|fim▁begin|>var PROPS = injectProps;
var hookOnReload = injectOnReload;
const io = require('socket.io-client');
const socket = io(`http://127.0.0.1:${PROPS.port}`);
const { id: ext_id } = chrome.runtime;
const onReload = (query = {}, cb) => {
query = Object.assign({
url: [`chrome-extension://${ext_id}/*`]
}, query);
chrome.tabs.query(query, (tabs) => {
cb(tabs);
hookOnReload();
chrome.runtime.reload();
});
};
const onReopen = (tabs = [], cb) => {
tabs.forEach((tab) => {
var { windowId, index, url, active, pinned, openerTabId } = tab;
var new_tab = { windowId, index, url, active, pinned, openerTabId };<|fim▁hole|>}
socket.on('reload', onReload);
socket.on('reopen', onReopen);<|fim▁end|>
|
chrome.tabs.create(new_tab);
});
cb(tabs);
|
<|file_name|>logged-in-user.ts<|end_file_name|><|fim▁begin|>import { autoinject } from 'aurelia-framework';
import { EventAggregator } from 'aurelia-event-aggregator';
import { AuthService } from 'aurelia-authentication';
import { UserEntity } from './user-entity';
@autoinject()
export class LoggedInUser {
isLoggedIn: boolean = false;
user: UserEntity;
constructor(private eventAggregator: EventAggregator,
private authService: AuthService) {
this.isLoggedIn = this.authService.isAuthenticated();
this.eventAggregator.subscribe('authentication-change', this.authStateChanged.bind(this));
// Fetch logged in user data on object construction,
// user may already be logged in but browser refresh
// cleared up all data. Normally, this is set from
// sign-in.js view model.
if (this.isLoggedIn === true) {
this.authService.getMe()
.then((user: UserEntity) => {
this.user = user;
})
.catch((error: any) => {
// TODO: add logger
});
}
}
authStateChanged(authenticated: boolean): void {
this.isLoggedIn = authenticated;
if (authenticated === false) {
this.user = null;
}
}<|fim▁hole|>}<|fim▁end|>
| |
<|file_name|>WrapTextTwoTone.js<|end_file_name|><|fim▁begin|>import React from 'react';
import createSvgIcon from './utils/createSvgIcon';<|fim▁hole|> <path d="M4 17h6v2H4zm13-6H4v2h13.25c1.1 0 2 .9 2 2s-.9 2-2 2H15v-2l-3 3 3 3v-2h2c2.21 0 4-1.79 4-4s-1.79-4-4-4zM4 5h16v2H4z" />
, 'WrapTextTwoTone');<|fim▁end|>
|
export default createSvgIcon(
|
<|file_name|>EncodingUtil.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.vfs.encoding;
import com.intellij.AppTopics;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileEditor.FileDocumentManagerAdapter;
import com.intellij.openapi.fileEditor.impl.LoadTextUtil;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.FileTypes;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectLocator;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.ThrowableComputable;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.*;
import com.intellij.refactoring.util.CommonRefactoringUtil;
import com.intellij.util.ArrayUtil;
import com.intellij.util.messages.MessageBusConnection;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.Arrays;
<|fim▁hole|>public class EncodingUtil {
enum Magic8 {
ABSOLUTELY,
WELL_IF_YOU_INSIST,
NO_WAY
}
// check if file can be loaded in the encoding correctly:
// returns true if bytes on disk, converted to text with the charset, converted back to bytes matched
static Magic8 isSafeToReloadIn(@NotNull VirtualFile virtualFile, @NotNull String text, @NotNull byte[] bytes, @NotNull Charset charset) {
// file has BOM but the charset hasn't
byte[] bom = virtualFile.getBOM();
if (bom != null && !CharsetToolkit.canHaveBom(charset, bom)) return Magic8.NO_WAY;
// the charset has mandatory BOM (e.g. UTF-xx) but the file hasn't or has wrong
byte[] mandatoryBom = CharsetToolkit.getMandatoryBom(charset);
if (mandatoryBom != null && !ArrayUtil.startsWith(bytes, mandatoryBom)) return Magic8.NO_WAY;
String loaded = LoadTextUtil.getTextByBinaryPresentation(bytes, charset).toString();
String separator = FileDocumentManager.getInstance().getLineSeparator(virtualFile, null);
String toSave = StringUtil.convertLineSeparators(loaded, separator);
String failReason = LoadTextUtil.wasCharsetDetectedFromBytes(virtualFile);
if (failReason != null && CharsetToolkit.UTF8_CHARSET.equals(virtualFile.getCharset()) && !CharsetToolkit.UTF8_CHARSET.equals(charset)) {
return Magic8.NO_WAY; // can't reload utf8-autodetected file in another charset
}
byte[] bytesToSave;
try {
bytesToSave = toSave.getBytes(charset);
}
catch (UnsupportedOperationException e) {
return Magic8.NO_WAY;
}
if (bom != null && !ArrayUtil.startsWith(bytesToSave, bom)) {
bytesToSave = ArrayUtil.mergeArrays(bom, bytesToSave); // for 2-byte encodings String.getBytes(Charset) adds BOM automatically
}
return !Arrays.equals(bytesToSave, bytes) ? Magic8.NO_WAY : loaded.equals(text) ? Magic8.ABSOLUTELY : Magic8.WELL_IF_YOU_INSIST;
}
static Magic8 isSafeToConvertTo(@NotNull VirtualFile virtualFile, @NotNull String text, @NotNull byte[] bytesOnDisk, @NotNull Charset charset) {
try {
String lineSeparator = FileDocumentManager.getInstance().getLineSeparator(virtualFile, null);
String textToSave = lineSeparator.equals("\n") ? text : StringUtil.convertLineSeparators(text, lineSeparator);
Pair<Charset, byte[]> chosen = LoadTextUtil.chooseMostlyHarmlessCharset(virtualFile.getCharset(), charset, textToSave);
byte[] saved = chosen.second;
CharSequence textLoadedBack = LoadTextUtil.getTextByBinaryPresentation(saved, charset);
return !text.equals(textLoadedBack.toString()) ? Magic8.NO_WAY : Arrays.equals(saved, bytesOnDisk) ? Magic8.ABSOLUTELY : Magic8.WELL_IF_YOU_INSIST;
}
catch (UnsupportedOperationException e) { // unsupported encoding
return Magic8.NO_WAY;
}
}
static void saveIn(@NotNull final Document document,
final Editor editor,
@NotNull final VirtualFile virtualFile,
@NotNull final Charset charset) {
FileDocumentManager documentManager = FileDocumentManager.getInstance();
documentManager.saveDocument(document);
final Project project = ProjectLocator.getInstance().guessProjectForFile(virtualFile);
boolean writable = project == null ? virtualFile.isWritable() : ReadonlyStatusHandler.ensureFilesWritable(project, virtualFile);
if (!writable) {
CommonRefactoringUtil.showErrorHint(project, editor, "Cannot save the file " + virtualFile.getPresentableUrl(), "Unable to Save", null);
return;
}
// first, save the file in the new charset and then mark the file as having the correct encoding
try {
ApplicationManager.getApplication().runWriteAction(new ThrowableComputable<Object, IOException>() {
@Override
public Object compute() throws IOException {
virtualFile.setCharset(charset);
LoadTextUtil.write(project, virtualFile, virtualFile, document.getText(), document.getModificationStamp());
return null;
}
});
}
catch (IOException io) {
Messages.showErrorDialog(project, io.getMessage(), "Error Writing File");
}
EncodingProjectManagerImpl.suppressReloadDuring(() -> EncodingManager.getInstance().setEncoding(virtualFile, charset));
}
static void reloadIn(@NotNull final VirtualFile virtualFile, @NotNull final Charset charset) {
final FileDocumentManager documentManager = FileDocumentManager.getInstance();
//Project project = ProjectLocator.getInstance().guessProjectForFile(myFile);
//if (documentManager.isFileModified(myFile)) {
// int result = Messages.showDialog(project, "File is modified. Reload file anyway?", "File is Modified", new String[]{"Reload", "Cancel"}, 0, AllIcons.General.WarningDialog);
// if (result != 0) return;
//}
if (documentManager.getCachedDocument(virtualFile) == null) {
// no need to reload document
EncodingManager.getInstance().setEncoding(virtualFile, charset);
return;
}
final Disposable disposable = Disposer.newDisposable();
MessageBusConnection connection = ApplicationManager.getApplication().getMessageBus().connect(disposable);
connection.subscribe(AppTopics.FILE_DOCUMENT_SYNC, new FileDocumentManagerAdapter() {
@Override
public void beforeFileContentReload(VirtualFile file, @NotNull Document document) {
if (!file.equals(virtualFile)) return;
Disposer.dispose(disposable); // disconnect
EncodingManager.getInstance().setEncoding(file, charset);
LoadTextUtil.setCharsetWasDetectedFromBytes(file, null);
}
});
// if file was modified, the user will be asked here
try {
EncodingProjectManagerImpl.suppressReloadDuring(() -> ((VirtualFileListener)documentManager).contentsChanged(
new VirtualFileEvent(null, virtualFile, virtualFile.getName(), virtualFile.getParent())));
}
finally {
Disposer.dispose(disposable);
}
}
// returns (hardcoded charset from the file type, explanation) or (null, null) if file type does not restrict encoding
@NotNull
private static Pair<Charset, String> checkHardcodedCharsetFileType(@NotNull VirtualFile virtualFile) {
FileType fileType = virtualFile.getFileType();
if (fileType.isBinary()) return Pair.create(null, "binary file");
// in lesser IDEs all special file types are plain text so check for that first
if (fileType == FileTypes.PLAIN_TEXT) return Pair.create(null, null);
if (fileType == StdFileTypes.GUI_DESIGNER_FORM) return Pair.create(CharsetToolkit.UTF8_CHARSET, "IDEA GUI Designer form");
if (fileType == StdFileTypes.IDEA_MODULE) return Pair.create(CharsetToolkit.UTF8_CHARSET, "IDEA module file");
if (fileType == StdFileTypes.IDEA_PROJECT) return Pair.create(CharsetToolkit.UTF8_CHARSET, "IDEA project file");
if (fileType == StdFileTypes.IDEA_WORKSPACE) return Pair.create(CharsetToolkit.UTF8_CHARSET, "IDEA workspace file");
if (fileType == StdFileTypes.PROPERTIES) return Pair.create(virtualFile.getCharset(), ".properties file");
if (fileType == StdFileTypes.XML || fileType == StdFileTypes.JSPX) {
return Pair.create(virtualFile.getCharset(), "XML file");
}
return Pair.create(null, null);
}
@NotNull
// returns pair (existing charset (null means N/A); failReason: null means enabled, notnull means disabled and contains error message)
public static Pair<Charset, String> checkCanReload(@NotNull VirtualFile virtualFile) {
if (virtualFile.isDirectory()) {
return Pair.create(null, "file is a directory");
}
FileDocumentManager documentManager = FileDocumentManager.getInstance();
Document document = documentManager.getDocument(virtualFile);
if (document == null) return Pair.create(null, "binary file");
Charset charsetFromContent = ((EncodingManagerImpl)EncodingManager.getInstance()).computeCharsetFromContent(virtualFile);
Charset existing = charsetFromContent;
String failReason = LoadTextUtil.wasCharsetDetectedFromBytes(virtualFile);
if (failReason != null) {
// no point changing encoding if it was auto-detected
existing = virtualFile.getCharset();
}
else if (charsetFromContent != null) {
failReason = "hard coded in text";
}
else {
Pair<Charset, String> fileTypeCheck = checkHardcodedCharsetFileType(virtualFile);
if (fileTypeCheck.second != null) {
failReason = fileTypeCheck.second;
existing = fileTypeCheck.first;
}
}
if (failReason != null) {
return Pair.create(existing, failReason);
}
return Pair.create(virtualFile.getCharset(), null);
}
@Nullable("null means enabled, notnull means disabled and contains error message")
static String checkCanConvert(@NotNull VirtualFile virtualFile) {
if (virtualFile.isDirectory()) {
return "file is a directory";
}
String failReason = null;
Charset charsetFromContent = ((EncodingManagerImpl)EncodingManager.getInstance()).computeCharsetFromContent(virtualFile);
if (charsetFromContent != null) {
failReason = "Encoding is hard-coded in the text";
}
else {
Pair<Charset, String> check = checkHardcodedCharsetFileType(virtualFile);
if (check.second != null) {
failReason = check.second;
}
}
if (failReason != null) {
return failReason;
}
return null;
}
// null means enabled, (current charset, error description) otherwise
@Nullable
public static Pair<Charset, String> checkSomeActionEnabled(@NotNull VirtualFile selectedFile) {
String saveError = checkCanConvert(selectedFile);
if (saveError == null) return null;
Pair<Charset, String> reloadError = checkCanReload(selectedFile);
if (reloadError.second == null) return null;
return Pair.create(reloadError.first, saveError);
}
}<|fim▁end|>
| |
<|file_name|>logical_geometry.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Geometry in flow-relative space.
use euclid::{Point2D, Rect, Size2D, SideOffsets2D};
use euclid::num::Zero;
use properties::style_structs;
use std::cmp::{max, min};
use std::fmt::{self, Debug, Error, Formatter};
use std::ops::{Add, Sub};
use unicode_bidi as bidi;
pub enum BlockFlowDirection {
TopToBottom,
RightToLeft,
LeftToRight
}
pub enum InlineBaseDirection {
LeftToRight,
RightToLeft
}
// TODO: improve the readability of the WritingMode serialization, refer to the Debug:fmt()
bitflags!(
#[cfg_attr(feature = "servo", derive(MallocSizeOf, Serialize))]
pub struct WritingMode: u8 {
const RTL = 1 << 0;
const VERTICAL = 1 << 1;
const VERTICAL_LR = 1 << 2;
/// For vertical writing modes only. When set, line-over/line-under
/// sides are inverted from block-start/block-end. This flag is
/// set when sideways-lr is used.
const LINE_INVERTED = 1 << 3;
const SIDEWAYS = 1 << 4;
const UPRIGHT = 1 << 5;
}
);
impl WritingMode {
/// Return a WritingMode bitflags from the relevant CSS properties.
pub fn new(inheritedbox_style: &style_structs::InheritedBox) -> Self {
use properties::longhands::direction::computed_value::T as direction;
use properties::longhands::writing_mode::computed_value::T as writing_mode;
let mut flags = WritingMode::empty();
match inheritedbox_style.clone_direction() {
direction::ltr => {},
direction::rtl => {
flags.insert(WritingMode::RTL);
},
}
match inheritedbox_style.clone_writing_mode() {
writing_mode::horizontal_tb => {},
writing_mode::vertical_rl => {
flags.insert(WritingMode::VERTICAL);
},
writing_mode::vertical_lr => {
flags.insert(WritingMode::VERTICAL);
flags.insert(WritingMode::VERTICAL_LR);
},
#[cfg(feature = "gecko")]
writing_mode::sideways_rl => {
flags.insert(WritingMode::VERTICAL);
flags.insert(WritingMode::SIDEWAYS);
},
#[cfg(feature = "gecko")]
writing_mode::sideways_lr => {
flags.insert(WritingMode::VERTICAL);
flags.insert(WritingMode::VERTICAL_LR);
flags.insert(WritingMode::LINE_INVERTED);
flags.insert(WritingMode::SIDEWAYS);
},
}
#[cfg(feature = "gecko")]
{
use properties::longhands::text_orientation::computed_value::T as text_orientation;
// If FLAG_SIDEWAYS is already set, this means writing-mode is
// either sideways-rl or sideways-lr, and for both of these values,
// text-orientation has no effect.
if !flags.intersects(WritingMode::SIDEWAYS) {
match inheritedbox_style.clone_text_orientation() {
text_orientation::mixed => {},
text_orientation::upright => {
flags.insert(WritingMode::UPRIGHT);
},
text_orientation::sideways => {
flags.insert(WritingMode::SIDEWAYS);
},
}
}
}
flags
}
#[inline]
pub fn is_vertical(&self) -> bool {
self.intersects(WritingMode::VERTICAL)<|fim▁hole|> pub fn is_vertical_lr(&self) -> bool {
self.intersects(WritingMode::VERTICAL_LR)
}
/// Assuming .is_vertical(), does the inline direction go top to bottom?
#[inline]
pub fn is_inline_tb(&self) -> bool {
// https://drafts.csswg.org/css-writing-modes-3/#logical-to-physical
self.intersects(WritingMode::RTL) == self.intersects(WritingMode::LINE_INVERTED)
}
#[inline]
pub fn is_bidi_ltr(&self) -> bool {
!self.intersects(WritingMode::RTL)
}
#[inline]
pub fn is_sideways(&self) -> bool {
self.intersects(WritingMode::SIDEWAYS)
}
#[inline]
pub fn is_upright(&self) -> bool {
self.intersects(WritingMode::UPRIGHT)
}
#[inline]
pub fn inline_start_physical_side(&self) -> PhysicalSide {
match (self.is_vertical(), self.is_inline_tb(), self.is_bidi_ltr()) {
(false, _, true) => PhysicalSide::Left,
(false, _, false) => PhysicalSide::Right,
(true, true, _) => PhysicalSide::Top,
(true, false, _) => PhysicalSide::Bottom,
}
}
#[inline]
pub fn inline_end_physical_side(&self) -> PhysicalSide {
match (self.is_vertical(), self.is_inline_tb(), self.is_bidi_ltr()) {
(false, _, true) => PhysicalSide::Right,
(false, _, false) => PhysicalSide::Left,
(true, true, _) => PhysicalSide::Bottom,
(true, false, _) => PhysicalSide::Top,
}
}
#[inline]
pub fn block_start_physical_side(&self) -> PhysicalSide {
match (self.is_vertical(), self.is_vertical_lr()) {
(false, _) => PhysicalSide::Top,
(true, true) => PhysicalSide::Left,
(true, false) => PhysicalSide::Right,
}
}
#[inline]
pub fn block_end_physical_side(&self) -> PhysicalSide {
match (self.is_vertical(), self.is_vertical_lr()) {
(false, _) => PhysicalSide::Bottom,
(true, true) => PhysicalSide::Right,
(true, false) => PhysicalSide::Left,
}
}
#[inline]
pub fn block_flow_direction(&self) -> BlockFlowDirection {
match (self.is_vertical(), self.is_vertical_lr()) {
(false, _) => BlockFlowDirection::TopToBottom,
(true, true) => BlockFlowDirection::LeftToRight,
(true, false) => BlockFlowDirection::RightToLeft,
}
}
#[inline]
pub fn inline_base_direction(&self) -> InlineBaseDirection {
if self.intersects(WritingMode::RTL) {
InlineBaseDirection::RightToLeft
} else {
InlineBaseDirection::LeftToRight
}
}
#[inline]
/// The default bidirectional embedding level for this writing mode.
///
/// Returns bidi level 0 if the mode is LTR, or 1 otherwise.
pub fn to_bidi_level(&self) -> bidi::Level {
if self.is_bidi_ltr() {
bidi::Level::ltr()
} else {
bidi::Level::rtl()
}
}
}
impl fmt::Display for WritingMode {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), Error> {
if self.is_vertical() {
write!(formatter, "V")?;
if self.is_vertical_lr() {
write!(formatter, " LR")?;
} else {
write!(formatter, " RL")?;
}
if self.intersects(WritingMode::SIDEWAYS) {
write!(formatter, " Sideways")?;
}
if self.intersects(WritingMode::LINE_INVERTED) {
write!(formatter, " Inverted")?;
}
} else {
write!(formatter, "H")?;
}
if self.is_bidi_ltr() {
write!(formatter, " LTR")
} else {
write!(formatter, " RTL")
}
}
}
/// Wherever logical geometry is used, the writing mode is known based on context:
/// every method takes a `mode` parameter.
/// However, this context is easy to get wrong.
/// In debug builds only, logical geometry objects store their writing mode
/// (in addition to taking it as a parameter to methods) and check it.
/// In non-debug builds, make this storage zero-size and the checks no-ops.
#[cfg(not(debug_assertions))]
#[derive(Clone, Copy, Eq, PartialEq)]
#[cfg_attr(feature = "servo", derive(Serialize))]
struct DebugWritingMode;
#[cfg(debug_assertions)]
#[derive(Clone, Copy, Eq, PartialEq)]
#[cfg_attr(feature = "servo", derive(Serialize))]
struct DebugWritingMode {
mode: WritingMode
}
#[cfg(not(debug_assertions))]
impl DebugWritingMode {
#[inline]
fn check(&self, _other: WritingMode) {}
#[inline]
fn check_debug(&self, _other: DebugWritingMode) {}
#[inline]
fn new(_mode: WritingMode) -> DebugWritingMode {
DebugWritingMode
}
}
#[cfg(debug_assertions)]
impl DebugWritingMode {
#[inline]
fn check(&self, other: WritingMode) {
assert!(self.mode == other)
}
#[inline]
fn check_debug(&self, other: DebugWritingMode) {
assert!(self.mode == other.mode)
}
#[inline]
fn new(mode: WritingMode) -> DebugWritingMode {
DebugWritingMode { mode: mode }
}
}
impl Debug for DebugWritingMode {
#[cfg(not(debug_assertions))]
fn fmt(&self, formatter: &mut Formatter) -> Result<(), Error> {
write!(formatter, "?")
}
#[cfg(debug_assertions)]
fn fmt(&self, formatter: &mut Formatter) -> Result<(), Error> {
write!(formatter, "{}", self.mode)
}
}
// Used to specify the logical direction.
#[derive(Clone, Copy, Debug, PartialEq)]
#[cfg_attr(feature = "servo", derive(Serialize))]
pub enum Direction {
Inline,
Block
}
/// A 2D size in flow-relative dimensions
#[derive(Clone, Copy, Eq, PartialEq)]
#[cfg_attr(feature = "servo", derive(Serialize))]
pub struct LogicalSize<T> {
pub inline: T, // inline-size, a.k.a. logical width, a.k.a. measure
pub block: T, // block-size, a.k.a. logical height, a.k.a. extent
debug_writing_mode: DebugWritingMode,
}
impl<T: Debug> Debug for LogicalSize<T> {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), Error> {
write!(formatter, "LogicalSize({:?}, i{:?}×b{:?})",
self.debug_writing_mode, self.inline, self.block)
}
}
// Can not implement the Zero trait: its zero() method does not have the `mode` parameter.
impl<T: Zero> LogicalSize<T> {
#[inline]
pub fn zero(mode: WritingMode) -> LogicalSize<T> {
LogicalSize {
inline: Zero::zero(),
block: Zero::zero(),
debug_writing_mode: DebugWritingMode::new(mode),
}
}
}
impl<T: Copy> LogicalSize<T> {
#[inline]
pub fn new(mode: WritingMode, inline: T, block: T) -> LogicalSize<T> {
LogicalSize {
inline: inline,
block: block,
debug_writing_mode: DebugWritingMode::new(mode),
}
}
#[inline]
pub fn from_physical(mode: WritingMode, size: Size2D<T>) -> LogicalSize<T> {
if mode.is_vertical() {
LogicalSize::new(mode, size.height, size.width)
} else {
LogicalSize::new(mode, size.width, size.height)
}
}
#[inline]
pub fn width(&self, mode: WritingMode) -> T {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
self.block
} else {
self.inline
}
}
#[inline]
pub fn set_width(&mut self, mode: WritingMode, width: T) {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
self.block = width
} else {
self.inline = width
}
}
#[inline]
pub fn height(&self, mode: WritingMode) -> T {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
self.inline
} else {
self.block
}
}
#[inline]
pub fn set_height(&mut self, mode: WritingMode, height: T) {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
self.inline = height
} else {
self.block = height
}
}
#[inline]
pub fn to_physical(&self, mode: WritingMode) -> Size2D<T> {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
Size2D::new(self.block, self.inline)
} else {
Size2D::new(self.inline, self.block)
}
}
#[inline]
pub fn convert(&self, mode_from: WritingMode, mode_to: WritingMode) -> LogicalSize<T> {
if mode_from == mode_to {
self.debug_writing_mode.check(mode_from);
*self
} else {
LogicalSize::from_physical(mode_to, self.to_physical(mode_from))
}
}
}
impl<T: Add<T, Output=T>> Add for LogicalSize<T> {
type Output = LogicalSize<T>;
#[inline]
fn add(self, other: LogicalSize<T>) -> LogicalSize<T> {
self.debug_writing_mode.check_debug(other.debug_writing_mode);
LogicalSize {
debug_writing_mode: self.debug_writing_mode,
inline: self.inline + other.inline,
block: self.block + other.block,
}
}
}
impl<T: Sub<T, Output=T>> Sub for LogicalSize<T> {
type Output = LogicalSize<T>;
#[inline]
fn sub(self, other: LogicalSize<T>) -> LogicalSize<T> {
self.debug_writing_mode.check_debug(other.debug_writing_mode);
LogicalSize {
debug_writing_mode: self.debug_writing_mode,
inline: self.inline - other.inline,
block: self.block - other.block,
}
}
}
/// A 2D point in flow-relative dimensions
#[derive(Clone, Copy, Eq, PartialEq)]
#[cfg_attr(feature = "servo", derive(Serialize))]
pub struct LogicalPoint<T> {
/// inline-axis coordinate
pub i: T,
/// block-axis coordinate
pub b: T,
debug_writing_mode: DebugWritingMode,
}
impl<T: Debug> Debug for LogicalPoint<T> {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), Error> {
write!(formatter, "LogicalPoint({:?} (i{:?}, b{:?}))",
self.debug_writing_mode, self.i, self.b)
}
}
// Can not implement the Zero trait: its zero() method does not have the `mode` parameter.
impl<T: Zero> LogicalPoint<T> {
#[inline]
pub fn zero(mode: WritingMode) -> LogicalPoint<T> {
LogicalPoint {
i: Zero::zero(),
b: Zero::zero(),
debug_writing_mode: DebugWritingMode::new(mode),
}
}
}
impl<T: Copy> LogicalPoint<T> {
#[inline]
pub fn new(mode: WritingMode, i: T, b: T) -> LogicalPoint<T> {
LogicalPoint {
i: i,
b: b,
debug_writing_mode: DebugWritingMode::new(mode),
}
}
}
impl<T: Copy + Sub<T, Output=T>> LogicalPoint<T> {
#[inline]
pub fn from_physical(mode: WritingMode, point: Point2D<T>, container_size: Size2D<T>)
-> LogicalPoint<T> {
if mode.is_vertical() {
LogicalPoint {
i: if mode.is_inline_tb() { point.y } else { container_size.height - point.y },
b: if mode.is_vertical_lr() { point.x } else { container_size.width - point.x },
debug_writing_mode: DebugWritingMode::new(mode),
}
} else {
LogicalPoint {
i: if mode.is_bidi_ltr() { point.x } else { container_size.width - point.x },
b: point.y,
debug_writing_mode: DebugWritingMode::new(mode),
}
}
}
#[inline]
pub fn x(&self, mode: WritingMode, container_size: Size2D<T>) -> T {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
if mode.is_vertical_lr() { self.b } else { container_size.width - self.b }
} else {
if mode.is_bidi_ltr() { self.i } else { container_size.width - self.i }
}
}
#[inline]
pub fn set_x(&mut self, mode: WritingMode, x: T, container_size: Size2D<T>) {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
self.b = if mode.is_vertical_lr() { x } else { container_size.width - x }
} else {
self.i = if mode.is_bidi_ltr() { x } else { container_size.width - x }
}
}
#[inline]
pub fn y(&self, mode: WritingMode, container_size: Size2D<T>) -> T {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
if mode.is_inline_tb() { self.i } else { container_size.height - self.i }
} else {
self.b
}
}
#[inline]
pub fn set_y(&mut self, mode: WritingMode, y: T, container_size: Size2D<T>) {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
self.i = if mode.is_inline_tb() { y } else { container_size.height - y }
} else {
self.b = y
}
}
#[inline]
pub fn to_physical(&self, mode: WritingMode, container_size: Size2D<T>) -> Point2D<T> {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
Point2D::new(
if mode.is_vertical_lr() { self.b } else { container_size.width - self.b },
if mode.is_inline_tb() { self.i } else { container_size.height - self.i })
} else {
Point2D::new(
if mode.is_bidi_ltr() { self.i } else { container_size.width - self.i },
self.b)
}
}
#[inline]
pub fn convert(&self, mode_from: WritingMode, mode_to: WritingMode, container_size: Size2D<T>)
-> LogicalPoint<T> {
if mode_from == mode_to {
self.debug_writing_mode.check(mode_from);
*self
} else {
LogicalPoint::from_physical(
mode_to, self.to_physical(mode_from, container_size), container_size)
}
}
}
impl<T: Copy + Add<T, Output=T>> LogicalPoint<T> {
/// This doesn’t really makes sense,
/// but happens when dealing with multiple origins.
#[inline]
pub fn add_point(&self, other: &LogicalPoint<T>) -> LogicalPoint<T> {
self.debug_writing_mode.check_debug(other.debug_writing_mode);
LogicalPoint {
debug_writing_mode: self.debug_writing_mode,
i: self.i + other.i,
b: self.b + other.b,
}
}
}
impl<T: Copy + Add<T, Output=T>> Add<LogicalSize<T>> for LogicalPoint<T> {
type Output = LogicalPoint<T>;
#[inline]
fn add(self, other: LogicalSize<T>) -> LogicalPoint<T> {
self.debug_writing_mode.check_debug(other.debug_writing_mode);
LogicalPoint {
debug_writing_mode: self.debug_writing_mode,
i: self.i + other.inline,
b: self.b + other.block,
}
}
}
impl<T: Copy + Sub<T, Output=T>> Sub<LogicalSize<T>> for LogicalPoint<T> {
type Output = LogicalPoint<T>;
#[inline]
fn sub(self, other: LogicalSize<T>) -> LogicalPoint<T> {
self.debug_writing_mode.check_debug(other.debug_writing_mode);
LogicalPoint {
debug_writing_mode: self.debug_writing_mode,
i: self.i - other.inline,
b: self.b - other.block,
}
}
}
/// A "margin" in flow-relative dimensions
/// Represents the four sides of the margins, borders, or padding of a CSS box,
/// or a combination of those.
/// A positive "margin" can be added to a rectangle to obtain a bigger rectangle.
#[derive(Clone, Copy, Eq, PartialEq)]
#[cfg_attr(feature = "servo", derive(Serialize))]
pub struct LogicalMargin<T> {
pub block_start: T,
pub inline_end: T,
pub block_end: T,
pub inline_start: T,
debug_writing_mode: DebugWritingMode,
}
impl<T: Debug> Debug for LogicalMargin<T> {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), Error> {
let writing_mode_string = if cfg!(debug_assertions) {
format!("{:?}, ", self.debug_writing_mode)
} else {
"".to_owned()
};
write!(formatter, "LogicalMargin({}i:{:?}..{:?} b:{:?}..{:?})",
writing_mode_string,
self.inline_start,
self.inline_end,
self.block_start,
self.block_end)
}
}
impl<T: Zero> LogicalMargin<T> {
#[inline]
pub fn zero(mode: WritingMode) -> LogicalMargin<T> {
LogicalMargin {
block_start: Zero::zero(),
inline_end: Zero::zero(),
block_end: Zero::zero(),
inline_start: Zero::zero(),
debug_writing_mode: DebugWritingMode::new(mode),
}
}
}
impl<T: Copy> LogicalMargin<T> {
#[inline]
pub fn new(mode: WritingMode, block_start: T, inline_end: T, block_end: T, inline_start: T)
-> LogicalMargin<T> {
LogicalMargin {
block_start: block_start,
inline_end: inline_end,
block_end: block_end,
inline_start: inline_start,
debug_writing_mode: DebugWritingMode::new(mode),
}
}
#[inline]
pub fn new_all_same(mode: WritingMode, value: T) -> LogicalMargin<T> {
LogicalMargin::new(mode, value, value, value, value)
}
#[inline]
pub fn from_physical(mode: WritingMode, offsets: SideOffsets2D<T>) -> LogicalMargin<T> {
let block_start;
let inline_end;
let block_end;
let inline_start;
if mode.is_vertical() {
if mode.is_vertical_lr() {
block_start = offsets.left;
block_end = offsets.right;
} else {
block_start = offsets.right;
block_end = offsets.left;
}
if mode.is_inline_tb() {
inline_start = offsets.top;
inline_end = offsets.bottom;
} else {
inline_start = offsets.bottom;
inline_end = offsets.top;
}
} else {
block_start = offsets.top;
block_end = offsets.bottom;
if mode.is_bidi_ltr() {
inline_start = offsets.left;
inline_end = offsets.right;
} else {
inline_start = offsets.right;
inline_end = offsets.left;
}
}
LogicalMargin::new(mode, block_start, inline_end, block_end, inline_start)
}
#[inline]
pub fn top(&self, mode: WritingMode) -> T {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
if mode.is_inline_tb() { self.inline_start } else { self.inline_end }
} else {
self.block_start
}
}
#[inline]
pub fn set_top(&mut self, mode: WritingMode, top: T) {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
if mode.is_inline_tb() { self.inline_start = top } else { self.inline_end = top }
} else {
self.block_start = top
}
}
#[inline]
pub fn right(&self, mode: WritingMode) -> T {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
if mode.is_vertical_lr() { self.block_end } else { self.block_start }
} else {
if mode.is_bidi_ltr() { self.inline_end } else { self.inline_start }
}
}
#[inline]
pub fn set_right(&mut self, mode: WritingMode, right: T) {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
if mode.is_vertical_lr() { self.block_end = right } else { self.block_start = right }
} else {
if mode.is_bidi_ltr() { self.inline_end = right } else { self.inline_start = right }
}
}
#[inline]
pub fn bottom(&self, mode: WritingMode) -> T {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
if mode.is_inline_tb() { self.inline_end } else { self.inline_start }
} else {
self.block_end
}
}
#[inline]
pub fn set_bottom(&mut self, mode: WritingMode, bottom: T) {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
if mode.is_inline_tb() { self.inline_end = bottom } else { self.inline_start = bottom }
} else {
self.block_end = bottom
}
}
#[inline]
pub fn left(&self, mode: WritingMode) -> T {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
if mode.is_vertical_lr() { self.block_start } else { self.block_end }
} else {
if mode.is_bidi_ltr() { self.inline_start } else { self.inline_end }
}
}
#[inline]
pub fn set_left(&mut self, mode: WritingMode, left: T) {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
if mode.is_vertical_lr() { self.block_start = left } else { self.block_end = left }
} else {
if mode.is_bidi_ltr() { self.inline_start = left } else { self.inline_end = left }
}
}
#[inline]
pub fn to_physical(&self, mode: WritingMode) -> SideOffsets2D<T> {
self.debug_writing_mode.check(mode);
let top;
let right;
let bottom;
let left;
if mode.is_vertical() {
if mode.is_vertical_lr() {
left = self.block_start;
right = self.block_end;
} else {
right = self.block_start;
left = self.block_end;
}
if mode.is_inline_tb() {
top = self.inline_start;
bottom = self.inline_end;
} else {
bottom = self.inline_start;
top = self.inline_end;
}
} else {
top = self.block_start;
bottom = self.block_end;
if mode.is_bidi_ltr() {
left = self.inline_start;
right = self.inline_end;
} else {
right = self.inline_start;
left = self.inline_end;
}
}
SideOffsets2D::new(top, right, bottom, left)
}
#[inline]
pub fn convert(&self, mode_from: WritingMode, mode_to: WritingMode) -> LogicalMargin<T> {
if mode_from == mode_to {
self.debug_writing_mode.check(mode_from);
*self
} else {
LogicalMargin::from_physical(mode_to, self.to_physical(mode_from))
}
}
}
impl<T: PartialEq + Zero> LogicalMargin<T> {
#[inline]
pub fn is_zero(&self) -> bool {
self.block_start == Zero::zero() && self.inline_end == Zero::zero() &&
self.block_end == Zero::zero() && self.inline_start == Zero::zero()
}
}
impl<T: Copy + Add<T, Output=T>> LogicalMargin<T> {
#[inline]
pub fn inline_start_end(&self) -> T {
self.inline_start + self.inline_end
}
#[inline]
pub fn block_start_end(&self) -> T {
self.block_start + self.block_end
}
#[inline]
pub fn start_end(&self, direction: Direction) -> T {
match direction {
Direction::Inline =>
self.inline_start + self.inline_end,
Direction::Block =>
self.block_start + self.block_end
}
}
#[inline]
pub fn top_bottom(&self, mode: WritingMode) -> T {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
self.inline_start_end()
} else {
self.block_start_end()
}
}
#[inline]
pub fn left_right(&self, mode: WritingMode) -> T {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
self.block_start_end()
} else {
self.inline_start_end()
}
}
}
impl<T: Add<T, Output=T>> Add for LogicalMargin<T> {
type Output = LogicalMargin<T>;
#[inline]
fn add(self, other: LogicalMargin<T>) -> LogicalMargin<T> {
self.debug_writing_mode.check_debug(other.debug_writing_mode);
LogicalMargin {
debug_writing_mode: self.debug_writing_mode,
block_start: self.block_start + other.block_start,
inline_end: self.inline_end + other.inline_end,
block_end: self.block_end + other.block_end,
inline_start: self.inline_start + other.inline_start,
}
}
}
impl<T: Sub<T, Output=T>> Sub for LogicalMargin<T> {
type Output = LogicalMargin<T>;
#[inline]
fn sub(self, other: LogicalMargin<T>) -> LogicalMargin<T> {
self.debug_writing_mode.check_debug(other.debug_writing_mode);
LogicalMargin {
debug_writing_mode: self.debug_writing_mode,
block_start: self.block_start - other.block_start,
inline_end: self.inline_end - other.inline_end,
block_end: self.block_end - other.block_end,
inline_start: self.inline_start - other.inline_start,
}
}
}
/// A rectangle in flow-relative dimensions
#[derive(Clone, Copy, Eq, PartialEq)]
#[cfg_attr(feature = "servo", derive(Serialize))]
pub struct LogicalRect<T> {
pub start: LogicalPoint<T>,
pub size: LogicalSize<T>,
debug_writing_mode: DebugWritingMode,
}
impl<T: Debug> Debug for LogicalRect<T> {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), Error> {
let writing_mode_string = if cfg!(debug_assertions) {
format!("{:?}, ", self.debug_writing_mode)
} else {
"".to_owned()
};
write!(formatter, "LogicalRect({}i{:?}×b{:?}, @ (i{:?},b{:?}))",
writing_mode_string,
self.size.inline,
self.size.block,
self.start.i,
self.start.b)
}
}
impl<T: Zero> LogicalRect<T> {
#[inline]
pub fn zero(mode: WritingMode) -> LogicalRect<T> {
LogicalRect {
start: LogicalPoint::zero(mode),
size: LogicalSize::zero(mode),
debug_writing_mode: DebugWritingMode::new(mode),
}
}
}
impl<T: Copy> LogicalRect<T> {
#[inline]
pub fn new(mode: WritingMode, inline_start: T, block_start: T, inline: T, block: T)
-> LogicalRect<T> {
LogicalRect {
start: LogicalPoint::new(mode, inline_start, block_start),
size: LogicalSize::new(mode, inline, block),
debug_writing_mode: DebugWritingMode::new(mode),
}
}
#[inline]
pub fn from_point_size(mode: WritingMode, start: LogicalPoint<T>, size: LogicalSize<T>)
-> LogicalRect<T> {
start.debug_writing_mode.check(mode);
size.debug_writing_mode.check(mode);
LogicalRect {
start: start,
size: size,
debug_writing_mode: DebugWritingMode::new(mode),
}
}
}
impl<T: Copy + Add<T, Output=T> + Sub<T, Output=T>> LogicalRect<T> {
#[inline]
pub fn from_physical(mode: WritingMode, rect: Rect<T>, container_size: Size2D<T>)
-> LogicalRect<T> {
let inline_start;
let block_start;
let inline;
let block;
if mode.is_vertical() {
inline = rect.size.height;
block = rect.size.width;
if mode.is_vertical_lr() {
block_start = rect.origin.x;
} else {
block_start = container_size.width - (rect.origin.x + rect.size.width);
}
if mode.is_inline_tb() {
inline_start = rect.origin.y;
} else {
inline_start = container_size.height - (rect.origin.y + rect.size.height);
}
} else {
inline = rect.size.width;
block = rect.size.height;
block_start = rect.origin.y;
if mode.is_bidi_ltr() {
inline_start = rect.origin.x;
} else {
inline_start = container_size.width - (rect.origin.x + rect.size.width);
}
}
LogicalRect {
start: LogicalPoint::new(mode, inline_start, block_start),
size: LogicalSize::new(mode, inline, block),
debug_writing_mode: DebugWritingMode::new(mode),
}
}
#[inline]
pub fn inline_end(&self) -> T {
self.start.i + self.size.inline
}
#[inline]
pub fn block_end(&self) -> T {
self.start.b + self.size.block
}
#[inline]
pub fn to_physical(&self, mode: WritingMode, container_size: Size2D<T>) -> Rect<T> {
self.debug_writing_mode.check(mode);
let x;
let y;
let width;
let height;
if mode.is_vertical() {
width = self.size.block;
height = self.size.inline;
if mode.is_vertical_lr() {
x = self.start.b;
} else {
x = container_size.width - self.block_end();
}
if mode.is_inline_tb() {
y = self.start.i;
} else {
y = container_size.height - self.inline_end();
}
} else {
width = self.size.inline;
height = self.size.block;
y = self.start.b;
if mode.is_bidi_ltr() {
x = self.start.i;
} else {
x = container_size.width - self.inline_end();
}
}
Rect {
origin: Point2D::new(x, y),
size: Size2D::new(width, height),
}
}
#[inline]
pub fn convert(&self, mode_from: WritingMode, mode_to: WritingMode, container_size: Size2D<T>)
-> LogicalRect<T> {
if mode_from == mode_to {
self.debug_writing_mode.check(mode_from);
*self
} else {
LogicalRect::from_physical(
mode_to, self.to_physical(mode_from, container_size), container_size)
}
}
pub fn translate_by_size(&self, offset: LogicalSize<T>) -> LogicalRect<T> {
LogicalRect {
start: self.start + offset,
..*self
}
}
pub fn translate(&self, offset: &LogicalPoint<T>) -> LogicalRect<T> {
LogicalRect {
start: self.start + LogicalSize {
inline: offset.i,
block: offset.b,
debug_writing_mode: offset.debug_writing_mode,
},
size: self.size,
debug_writing_mode: self.debug_writing_mode,
}
}
}
impl<T: Copy + Ord + Add<T, Output=T> + Sub<T, Output=T>> LogicalRect<T> {
#[inline]
pub fn union(&self, other: &LogicalRect<T>) -> LogicalRect<T> {
self.debug_writing_mode.check_debug(other.debug_writing_mode);
let inline_start = min(self.start.i, other.start.i);
let block_start = min(self.start.b, other.start.b);
LogicalRect {
start: LogicalPoint {
i: inline_start,
b: block_start,
debug_writing_mode: self.debug_writing_mode,
},
size: LogicalSize {
inline: max(self.inline_end(), other.inline_end()) - inline_start,
block: max(self.block_end(), other.block_end()) - block_start,
debug_writing_mode: self.debug_writing_mode,
},
debug_writing_mode: self.debug_writing_mode,
}
}
}
impl<T: Copy + Add<T, Output=T> + Sub<T, Output=T>> Add<LogicalMargin<T>> for LogicalRect<T> {
type Output = LogicalRect<T>;
#[inline]
fn add(self, other: LogicalMargin<T>) -> LogicalRect<T> {
self.debug_writing_mode.check_debug(other.debug_writing_mode);
LogicalRect {
start: LogicalPoint {
// Growing a rectangle on the start side means pushing its
// start point on the negative direction.
i: self.start.i - other.inline_start,
b: self.start.b - other.block_start,
debug_writing_mode: self.debug_writing_mode,
},
size: LogicalSize {
inline: self.size.inline + other.inline_start_end(),
block: self.size.block + other.block_start_end(),
debug_writing_mode: self.debug_writing_mode,
},
debug_writing_mode: self.debug_writing_mode,
}
}
}
impl<T: Copy + Add<T, Output=T> + Sub<T, Output=T>> Sub<LogicalMargin<T>> for LogicalRect<T> {
type Output = LogicalRect<T>;
#[inline]
fn sub(self, other: LogicalMargin<T>) -> LogicalRect<T> {
self.debug_writing_mode.check_debug(other.debug_writing_mode);
LogicalRect {
start: LogicalPoint {
// Shrinking a rectangle on the start side means pushing its
// start point on the positive direction.
i: self.start.i + other.inline_start,
b: self.start.b + other.block_start,
debug_writing_mode: self.debug_writing_mode,
},
size: LogicalSize {
inline: self.size.inline - other.inline_start_end(),
block: self.size.block - other.block_start_end(),
debug_writing_mode: self.debug_writing_mode,
},
debug_writing_mode: self.debug_writing_mode,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PhysicalSide {
Top,
Right,
Bottom,
Left,
}<|fim▁end|>
|
}
/// Assuming .is_vertical(), does the block direction go left to right?
#[inline]
|
<|file_name|>narcissistic.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# encoding: utf-8
# Here's a simple script (feels like a program, though) that prints out
# the first n narcissistic numbers, where n is provided on the command line. import sys
import sys
def numDigits(num):
"""Returns the number of digits making
up a number, not counting leading zeroes,
except for the number 0 itself."""
if(num ==0): return 1
digitCount = 0
while(num > 0):
digitCount += 1
num /= 10
return digitCount
def isNarcissistic(num):
"""Returns True if and only if the number is a narcissistic number.""" <|fim▁hole|> total = 0
exp = numDigits(num)
while(num>0):
digits = num%10
total += digits ** exp
num /= 10
return total == originalNum
def listNarcissisticNumbers(numNeeded):
"""Searches for an prints out the first 'numNeeded' narcissistic numbers."""
numFound = 0
numToConsider = 0
print ("here are the first ", numNeeded, " narcissistic numbers.")
while(numFound < numNeeded):
if(isNarcissistic(numFound)):
numFound += 1
print ("Find a Narcissistic number: ",numToConsider)
numToConsider += 1
print("Done!")
def getNumberNeeded():
numNeeded = 9; # this is the default number
if len(sys.argv) > 1:
try:
numNeeded = int(sys.argv[1])
except ValueError:
print ("Non-integral argument encountered... using default.")
return numNeeded
listNarcissisticNumbers(getNumberNeeded())<|fim▁end|>
|
originalNum = num
|
<|file_name|>Consumer.java<|end_file_name|><|fim▁begin|><|fim▁hole|>
import cn.aezo.demo.rabbitmq.util.RabbitmqU;
import com.rabbitmq.client.AMQP;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.Connection;
import com.rabbitmq.client.DefaultConsumer;
import com.rabbitmq.client.Envelope;
import java.io.IOException;
/**
* 测试 topic 类型消息模型
*
* 结果如下:
* consumer1收到消息:[aezo.order.vip] 这是 0 条消息
* consumer2收到消息:[smalle.vip] 这是 2 条消息
* consumer1收到消息:[aezo.user] 这是 1 条消息
* consumer1收到消息:[smalle.vip] 这是 2 条消息
* consumer1收到消息:[aezo.order.vip] 这是 3 条消息
* consumer1收到消息:[aezo.user] 这是 4 条消息
*
* @author smalle
* @date 2020-08-29 16:31
*/
public class Consumer {
private static final String EXCHANGE_NAME = "topic_logs";
public static void main(String[] args) throws IOException {
consumer1();
consumer2();
}
public static void consumer1() throws IOException {
Connection connection = RabbitmqU.getConnection();
Channel channel = connection.createChannel();
channel.exchangeDeclare(EXCHANGE_NAME, "topic");
// 获取一个临时队列。管理后台的Queues-Features会增加"AD"(autoDelete)和"Excl"(exclusive)标识
String queueName = channel.queueDeclare().getQueue();
// **将临时队列和交换机绑定,并订阅某些类型的消息**
channel.queueBind(queueName, EXCHANGE_NAME, "aezo.#"); // *匹配一个单词,#匹配多个单词
channel.queueBind(queueName, EXCHANGE_NAME, "*.vip");
channel.basicConsume(queueName, true, new DefaultConsumer(channel) {
@Override
public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body) throws IOException {
System.out.println("consumer1收到消息:" + new String(body, "UTF-8"));
}
});
}
public static void consumer2() throws IOException {
Connection connection = RabbitmqU.getConnection();
Channel channel = connection.createChannel();
channel.exchangeDeclare(EXCHANGE_NAME, "topic");
String queueName = channel.queueDeclare().getQueue();
// * 表示一个单词。此时无法匹配 aezo.order.vip 和 aezo.vip.hello 等
channel.queueBind(queueName, EXCHANGE_NAME, "*.vip");
channel.basicConsume(queueName, true, new DefaultConsumer(channel) {
@Override
public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body) throws IOException {
System.out.println("consumer2收到消息:" + new String(body, "UTF-8"));
}
});
}
}<|fim▁end|>
|
package cn.aezo.demo.rabbitmq.c05_model_topic;
|
<|file_name|>Guard.cpp<|end_file_name|><|fim▁begin|>/* -*-coding: mule-utf-8-unix; fill-column: 58; -*-
Copyright (C) 2009, 2013 Sergei Lodyagin
This file is part of the Cohors Concurro library.
This library is free software: you can redistribute it
and/or modify it under the terms of the GNU Lesser
General Public License as published by the Free Software
Foundation, either version 3 of the License, or (at your
option) any later version.
This library is distributed in the hope that it will be
useful, but WITHOUT ANY WARRANTY; without even the
implied warranty of MERCHANTABILITY or FITNESS FOR A
PARTICULAR PURPOSE. See the GNU Lesser General Public
License for more details.
You should have received a copy of the GNU Lesser General
Public License along with this program. If not, see
<http://www.gnu.org/licenses/>.
*/
/**
* @file
*
* @author Sergei Lodyagin
*/
#include "Guard.h"
namespace curr {
DEFINE_AXIS(
NReaders1WriterAxis,<|fim▁hole|> "reader_entering",
"readers_entered", // read operations on an object in
// progress
"reader_exiting",
"writer_entered" // write operation on an object
// in progress
},
{
{ "free", "reader_entering" },
{ "reader_entering", "readers_entered" },
// more readers
{ "readers_entered", "reader_entering" },
{ "readers_entered", "reader_exiting" },
{ "reader_exiting", "readers_entered" },
{ "reader_exiting", "free" },
// <NB> only one writer
{ "free", "writer_entered" },
{ "writer_entered", "free" }
});
}<|fim▁end|>
|
{
"free",
|
<|file_name|>zilla-shortcodes-lib.js<|end_file_name|><|fim▁begin|>jQuery(document).ready(function($) {
$(".zilla-tabs").tabs();<|fim▁hole|> var $this = $(this);
if( $this.attr('data-id') == 'closed' ) {
$this.accordion({ header: '.zilla-toggle-title', collapsible: true, active: false });
} else {
$this.accordion({ header: '.zilla-toggle-title', collapsible: true});
}
$this.on('accordionactivate', function( e, ui ) {
$this.accordion('refresh');
});
$(window).on('resize', function() {
$this.accordion('refresh');
});
});
});<|fim▁end|>
|
$(".zilla-toggle").each( function () {
|
<|file_name|>iteratorSpreadInCall6.ts<|end_file_name|><|fim▁begin|>//@target: ES6
function foo(...s: (symbol | number)[]) { }
class SymbolIterator {
next() {
return {
value: Symbol(),
done: false
};
}
[Symbol.iterator]() {
return this;
}
}
<|fim▁hole|> return {
value: "",
done: false
};
}
[Symbol.iterator]() {
return this;
}
}
foo(...new SymbolIterator, ...new _StringIterator);<|fim▁end|>
|
class _StringIterator {
next() {
|
<|file_name|>elasticache_subnet_group.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License<|fim▁hole|># along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: elasticache_subnet_group
version_added: "2.0"
short_description: manage Elasticache subnet groups
description:
- Creates, modifies, and deletes Elasticache subnet groups. This module has a dependency on python-boto >= 2.5.
options:
state:
description:
- Specifies whether the subnet should be present or absent.
required: true
default: present
choices: [ 'present' , 'absent' ]
name:
description:
- Database subnet group identifier.
required: true
description:
description:
- Elasticache subnet group description. Only set when a new group is added.
required: false
default: null
subnets:
description:
- List of subnet IDs that make up the Elasticache subnet group.
required: false
default: null
author: "Tim Mahoney (@timmahoney)"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Add or change a subnet group
- elasticache_subnet_group:
state: present
name: norwegian-blue
description: My Fancy Ex Parrot Subnet Group
subnets:
- subnet-aaaaaaaa
- subnet-bbbbbbbb
# Remove a subnet group
- elasticache_subnet_group:
state: absent
name: norwegian-blue
'''
try:
import boto
from boto.elasticache.layer1 import ElastiCacheConnection
from boto.regioninfo import RegionInfo
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state = dict(required=True, choices=['present', 'absent']),
name = dict(required=True),
description = dict(required=False),
subnets = dict(required=False, type='list'),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
state = module.params.get('state')
group_name = module.params.get('name').lower()
group_description = module.params.get('description')
group_subnets = module.params.get('subnets') or {}
if state == 'present':
for required in ['name', 'description', 'subnets']:
if not module.params.get(required):
module.fail_json(msg = str("Parameter %s required for state='present'" % required))
else:
for not_allowed in ['description', 'subnets']:
if module.params.get(not_allowed):
module.fail_json(msg = str("Parameter %s not allowed for state='absent'" % not_allowed))
# Retrieve any AWS settings from the environment.
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module)
if not region:
module.fail_json(msg = str("Either region or AWS_REGION or EC2_REGION environment variable or boto config aws_region or ec2_region must be set."))
"""Get an elasticache connection"""
try:
endpoint = "elasticache.%s.amazonaws.com" % region
connect_region = RegionInfo(name=region, endpoint=endpoint)
conn = ElastiCacheConnection(region=connect_region, **aws_connect_kwargs)
except boto.exception.NoAuthHandlerFound as e:
module.fail_json(msg=e.message)
try:
changed = False
exists = False
try:
matching_groups = conn.describe_cache_subnet_groups(group_name, max_records=100)
exists = len(matching_groups) > 0
except BotoServerError as e:
if e.error_code != 'CacheSubnetGroupNotFoundFault':
module.fail_json(msg = e.error_message)
if state == 'absent':
if exists:
conn.delete_cache_subnet_group(group_name)
changed = True
else:
if not exists:
new_group = conn.create_cache_subnet_group(group_name, cache_subnet_group_description=group_description, subnet_ids=group_subnets)
changed = True
else:
changed_group = conn.modify_cache_subnet_group(group_name, cache_subnet_group_description=group_description, subnet_ids=group_subnets)
changed = True
except BotoServerError as e:
if e.error_message != 'No modifications were requested.':
module.fail_json(msg = e.error_message)
else:
changed = False
module.exit_json(changed=changed)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()<|fim▁end|>
| |
<|file_name|>shell_pre.js<|end_file_name|><|fim▁begin|>// eSpeak and other code here are under the GNU GPL.
function generateSpeech(text, args) {<|fim▁hole|> var self = { text: text, args: args, ret: null };
(function() {<|fim▁end|>
| |
<|file_name|>protocol_final.py<|end_file_name|><|fim▁begin|>from tasty.types.driver import TestDriver
__params__ = {'la': 32, 'lb': 32}
driver = TestDriver()
<|fim▁hole|> client.b = Unsigned(bitlen=lb).input(src=driver, desc='b')
client.ga = Garbled(val=client.a)
client.gb = Garbled(val=client.b)
client.gc = client.ga + client.gb
server.gc <<= client.gc
server.c = Unsigned(val=server.gc)
server.c.output(dest=driver, desc='c')<|fim▁end|>
|
def protocol(client, server, params):
la = params['la']
lb = params['lb']
client.a = Unsigned(bitlen=la).input(src=driver, desc='a')
|
<|file_name|>tools.py<|end_file_name|><|fim▁begin|>"""Module containing tools that are useful when benchmarking algorithms
"""
from math import hypot, sqrt
from functools import wraps
from itertools import repeat
try:
import numpy
except ImportError:
numpy = False
class translate(object):
"""Decorator for evaluation functions, it translates the objective
function by *vector* which should be the same length as the individual
size. When called the decorated function should take as first argument the
individual to be evaluated. The inverse translation vector is actually
applied to the individual and the resulting list is given to the
evaluation function. Thus, the evaluation function shall not be expecting
an individual as it will receive a plain list.
This decorator adds a :func:`translate` method to the decorated function.
"""
def __init__(self, vector):
self.vector = vector
def __call__(self, func):
# wraps is used to combine stacked decorators that would add functions
@wraps(func)
def wrapper(individual, *args, **kargs):
# A subtraction is applied since the translation is applied to the
# individual and not the function
return func([v - t for v, t in zip(individual, self.vector)],
*args, **kargs)
wrapper.translate = self.translate
return wrapper
def translate(self, vector):
"""Set the current translation to *vector*. After decorating the
evaluation function, this function will be available directly from
the function object. ::
@translate([0.25, 0.5, ..., 0.1])
def evaluate(individual):
return sum(individual),
# This will cancel the translation
evaluate.translate([0.0, 0.0, ..., 0.0])
"""
self.vector = vector
class rotate(object):
"""Decorator for evaluation functions, it rotates the objective function
by *matrix* which should be a valid orthogonal NxN rotation matrix, with N
the length of an individual. When called the decorated function should
take as first argument the individual to be evaluated. The inverse
rotation matrix is actually applied to the individual and the resulting
list is given to the evaluation function. Thus, the evaluation function
shall not be expecting an individual as it will receive a plain list
(numpy.array). The multiplication is done using numpy.
This decorator adds a :func:`rotate` method to the decorated function.
.. note::
A random orthogonal matrix Q can be created via QR decomposition. ::
A = numpy.random.random((n,n))
Q, _ = numpy.linalg.qr(A)
"""
def __init__(self, matrix):
if not numpy:
raise RuntimeError("Numpy is required for using the rotation "
"decorator")
# The inverse is taken since the rotation is applied to the individual
# and not the function which is the inverse
self.matrix = numpy.linalg.inv(matrix)
def __call__(self, func):
# wraps is used to combine stacked decorators that would add functions
@wraps(func)
def wrapper(individual, *args, **kargs):
return func(numpy.dot(self.matrix, individual), *args, **kargs)
wrapper.rotate = self.rotate
return wrapper
def rotate(self, matrix):
"""Set the current rotation to *matrix*. After decorating the
evaluation function, this function will be available directly from
the function object. ::
# Create a random orthogonal matrix
A = numpy.random.random((n,n))
Q, _ = numpy.linalg.qr(A)
@rotate(Q)
def evaluate(individual):
return sum(individual),
# This will reset rotation to identity
evaluate.rotate(numpy.identity(n))
"""
self.matrix = numpy.linalg.inv(matrix)
class noise(object):
"""Decorator for evaluation functions, it evaluates the objective function
and adds noise by calling the function(s) provided in the *noise*
argument. The noise functions are called without any argument, consider
using the :class:`~deap.base.Toolbox` or Python's
:func:`functools.partial` to provide any required argument. If a single
function is provided it is applied to all objectives of the evaluation
function. If a list of noise functions is provided, it must be of length
equal to the number of objectives. The noise argument also accept
:obj:`None`, which will leave the objective without noise.
This decorator adds a :func:`noise` method to the decorated
function.
"""
def __init__(self, noise):
try:
self.rand_funcs = tuple(noise)
except TypeError:
self.rand_funcs = repeat(noise)
def __call__(self, func):
# wraps is used to combine stacked decorators that would add functions
@wraps(func)
def wrapper(individual, *args, **kargs):
result = func(individual, *args, **kargs)
noisy = list()
for r, f in zip(result, self.rand_funcs):
if f is None:
noisy.append(r)
else:
noisy.append(r + f())
return tuple(noisy)
wrapper.noise = self.noise
return wrapper
def noise(self, noise):
"""Set the current noise to *noise*. After decorating the
evaluation function, this function will be available directly from
the function object. ::
prand = functools.partial(random.gauss, mu=0.0, sigma=1.0)
@noise(prand)
def evaluate(individual):
return sum(individual),
# This will remove noise from the evaluation function
evaluate.noise(None)
"""
try:
self.rand_funcs = tuple(noise)
except TypeError:
self.rand_funcs = repeat(noise)
class scale(object):
"""Decorator for evaluation functions, it scales the objective function by
*factor* which should be the same length as the individual size. When
called the decorated function should take as first argument the individual
to be evaluated. The inverse factor vector is actually applied to the
individual and the resulting list is given to the evaluation function.
Thus, the evaluation function shall not be expecting an individual as it
will receive a plain list.
This decorator adds a :func:`scale` method to the decorated function.
"""
def __init__(self, factor):
# Factor is inverted since it is aplied to the individual and not the
# objective function
self.factor = tuple(1.0/f for f in factor)
def __call__(self, func):
# wraps is used to combine stacked decorators that would add functions
@wraps(func)
def wrapper(individual, *args, **kargs):
return func([v * f for v, f in zip(individual, self.factor)],
*args, **kargs)
wrapper.scale = self.scale
return wrapper
def scale(self, factor):
"""Set the current scale to *factor*. After decorating the
evaluation function, this function will be available directly from
the function object. ::
@scale([0.25, 2.0, ..., 0.1])
def evaluate(individual):
return sum(individual),
# This will cancel the scaling
evaluate.scale([1.0, 1.0, ..., 1.0])
"""
# Factor is inverted since it is aplied to the individual and not the
# objective function
self.factor = tuple(1.0/f for f in factor)
class bound(object):
"""Decorator for crossover and mutation functions, it changes the
individuals after the modification is done to bring it back in the allowed
*bounds*. The *bounds* are functions taking individual and returning
wheter of not the variable is allowed. You can provide one or multiple such
functions. In the former case, the function is used on all dimensions and
in the latter case, the number of functions must be greater or equal to
the number of dimension of the individuals.
The *type* determines how the attributes are brought back into the valid
range
This decorator adds a :func:`bound` method to the decorated function.
"""
def _clip(self, individual):
return individual
def _wrap(self, individual):
return individual
<|fim▁hole|> def _mirror(self, individual):
return individual
def __call__(self, func):
@wraps(func)
def wrapper(*args, **kargs):
individuals = func(*args, **kargs)
return self.bound(individuals)
wrapper.bound = self.bound
return wrapper
def __init__(self, bounds, type):
try:
self.bounds = tuple(bounds)
except TypeError:
self.bounds = itertools.repeat(bounds)
if type == "mirror":
self.bound = self._mirror
elif type == "wrap":
self.bound = self._wrap
elif type == "clip":
self.bound = self._clip
def diversity(first_front, first, last):
"""Given a Pareto front `first_front` and the two extreme points of the
optimal Pareto front, this function returns a metric of the diversity
of the front as explained in the original NSGA-II article by K. Deb.
The smaller the value is, the better the front is.
"""
df = hypot(first_front[0].fitness.values[0] - first[0],
first_front[0].fitness.values[1] - first[1])
dl = hypot(first_front[-1].fitness.values[0] - last[0],
first_front[-1].fitness.values[1] - last[1])
dt = [hypot(first.fitness.values[0] - second.fitness.values[0],
first.fitness.values[1] - second.fitness.values[1])
for first, second in zip(first_front[:-1], first_front[1:])]
if len(first_front) == 1:
return df + dl
dm = sum(dt)/len(dt)
di = sum(abs(d_i - dm) for d_i in dt)
delta = (df + dl + di)/(df + dl + len(dt) * dm )
return delta
def convergence(first_front, optimal_front):
"""Given a Pareto front `first_front` and the optimal Pareto front,
this function returns a metric of convergence
of the front as explained in the original NSGA-II article by K. Deb.
The smaller the value is, the closer the front is to the optimal one.
"""
distances = []
for ind in first_front:
distances.append(float("inf"))
for opt_ind in optimal_front:
dist = 0.
for i in xrange(len(opt_ind)):
dist += (ind.fitness.values[i] - opt_ind[i])**2
if dist < distances[-1]:
distances[-1] = dist
distances[-1] = sqrt(distances[-1])
return sum(distances) / len(distances)<|fim▁end|>
| |
<|file_name|>switch.py<|end_file_name|><|fim▁begin|>"""Support for HomematicIP Cloud switches."""
import logging
<|fim▁hole|>from homematicip.aio.group import AsyncSwitchingGroup
from homeassistant.components.switch import SwitchDevice
from . import DOMAIN as HMIPC_DOMAIN, HMIPC_HAPID, HomematicipGenericDevice
from .device import ATTR_GROUP_MEMBER_UNREACHABLE
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(
hass, config, async_add_entities, discovery_info=None):
"""Set up the HomematicIP Cloud switch devices."""
pass
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the HomematicIP switch from a config entry."""
home = hass.data[HMIPC_DOMAIN][config_entry.data[HMIPC_HAPID]].home
devices = []
for device in home.devices:
if isinstance(device, AsyncBrandSwitchMeasuring):
# BrandSwitchMeasuring inherits PlugableSwitchMeasuring
# This device is implemented in the light platform and will
# not be added in the switch platform
pass
elif isinstance(device, (AsyncPlugableSwitchMeasuring,
AsyncFullFlushSwitchMeasuring)):
devices.append(HomematicipSwitchMeasuring(home, device))
elif isinstance(device, AsyncPlugableSwitch):
devices.append(HomematicipSwitch(home, device))
elif isinstance(device, AsyncOpenCollector8Module):
for channel in range(1, 9):
devices.append(HomematicipMultiSwitch(home, device, channel))
elif isinstance(device, AsyncMultiIOBox):
for channel in range(1, 3):
devices.append(HomematicipMultiSwitch(home, device, channel))
for group in home.groups:
if isinstance(group, AsyncSwitchingGroup):
devices.append(
HomematicipGroupSwitch(home, group))
if devices:
async_add_entities(devices)
class HomematicipSwitch(HomematicipGenericDevice, SwitchDevice):
"""representation of a HomematicIP Cloud switch device."""
def __init__(self, home, device):
"""Initialize the switch device."""
super().__init__(home, device)
@property
def is_on(self):
"""Return true if device is on."""
return self._device.on
async def async_turn_on(self, **kwargs):
"""Turn the device on."""
await self._device.turn_on()
async def async_turn_off(self, **kwargs):
"""Turn the device off."""
await self._device.turn_off()
class HomematicipGroupSwitch(HomematicipGenericDevice, SwitchDevice):
"""representation of a HomematicIP switching group."""
def __init__(self, home, device, post='Group'):
"""Initialize switching group."""
device.modelType = 'HmIP-{}'.format(post)
super().__init__(home, device, post)
@property
def is_on(self):
"""Return true if group is on."""
return self._device.on
@property
def available(self):
"""Switch-Group available."""
# A switch-group must be available, and should not be affected by the
# individual availability of group members.
# This allows switching even when individual group members
# are not available.
return True
@property
def device_state_attributes(self):
"""Return the state attributes of the switch-group."""
attr = {}
if self._device.unreach:
attr[ATTR_GROUP_MEMBER_UNREACHABLE] = True
return attr
async def async_turn_on(self, **kwargs):
"""Turn the group on."""
await self._device.turn_on()
async def async_turn_off(self, **kwargs):
"""Turn the group off."""
await self._device.turn_off()
class HomematicipSwitchMeasuring(HomematicipSwitch):
"""Representation of a HomematicIP measuring switch device."""
@property
def current_power_w(self):
"""Return the current power usage in W."""
return self._device.currentPowerConsumption
@property
def today_energy_kwh(self):
"""Return the today total energy usage in kWh."""
if self._device.energyCounter is None:
return 0
return round(self._device.energyCounter)
class HomematicipMultiSwitch(HomematicipGenericDevice, SwitchDevice):
"""Representation of a HomematicIP Cloud multi switch device."""
def __init__(self, home, device, channel):
"""Initialize the multi switch device."""
self.channel = channel
super().__init__(home, device, 'Channel{}'.format(channel))
@property
def unique_id(self):
"""Return a unique ID."""
return "{}_{}_{}".format(self.__class__.__name__,
self.post, self._device.id)
@property
def is_on(self):
"""Return true if device is on."""
return self._device.functionalChannels[self.channel].on
async def async_turn_on(self, **kwargs):
"""Turn the device on."""
await self._device.turn_on(self.channel)
async def async_turn_off(self, **kwargs):
"""Turn the device off."""
await self._device.turn_off(self.channel)<|fim▁end|>
|
from homematicip.aio.device import (
AsyncBrandSwitchMeasuring, AsyncFullFlushSwitchMeasuring, AsyncMultiIOBox,
AsyncOpenCollector8Module, AsyncPlugableSwitch,
AsyncPlugableSwitchMeasuring)
|
<|file_name|>checked_sub_mul.rs<|end_file_name|><|fim▁begin|>use malachite_base::num::basic::signeds::PrimitiveSigned;
use malachite_base::num::basic::unsigneds::PrimitiveUnsigned;
use malachite_base_test_util::bench::bucketers::triple_max_bit_bucketer;
use malachite_base_test_util::bench::{run_benchmark, BenchmarkType};
use malachite_base_test_util::generators::common::{GenConfig, GenMode};
use malachite_base_test_util::generators::{signed_triple_gen, unsigned_triple_gen_var_19};
use malachite_base_test_util::runner::Runner;
pub(crate) fn register(runner: &mut Runner) {
register_unsigned_demos!(runner, demo_checked_sub_mul_unsigned);
register_signed_demos!(runner, demo_checked_sub_mul_signed);
register_unsigned_benches!(runner, benchmark_checked_sub_mul_unsigned);
register_signed_benches!(runner, benchmark_checked_sub_mul_signed);
}
fn demo_checked_sub_mul_unsigned<T: PrimitiveUnsigned>(
gm: GenMode,
config: GenConfig,
limit: usize,
) {
for (x, y, z) in unsigned_triple_gen_var_19::<T>()<|fim▁hole|> "{}.checked_sub_mul({}, {}) = {:?}",
x,
y,
z,
x.checked_sub_mul(y, z)
);
}
}
fn demo_checked_sub_mul_signed<T: PrimitiveSigned>(gm: GenMode, config: GenConfig, limit: usize) {
for (x, y, z) in signed_triple_gen::<T>().get(gm, &config).take(limit) {
println!(
"({}).checked_sub_mul({}, {}) = {:?}",
x,
y,
z,
x.checked_sub_mul(y, z)
);
}
}
fn benchmark_checked_sub_mul_unsigned<T: PrimitiveUnsigned>(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
&format!("{}.checked_sub_mul({}, {})", T::NAME, T::NAME, T::NAME),
BenchmarkType::Single,
unsigned_triple_gen_var_19::<T>().get(gm, &config),
gm.name(),
limit,
file_name,
&triple_max_bit_bucketer("x", "y", "z"),
&mut [("Malachite", &mut |(x, y, z)| {
no_out!(x.checked_sub_mul(y, z))
})],
);
}
fn benchmark_checked_sub_mul_signed<T: PrimitiveSigned>(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
&format!("{}.checked_sub_mul({}, {})", T::NAME, T::NAME, T::NAME),
BenchmarkType::Single,
signed_triple_gen::<T>().get(gm, &config),
gm.name(),
limit,
file_name,
&triple_max_bit_bucketer("x", "y", "z"),
&mut [("Malachite", &mut |(x, y, z)| {
no_out!(x.checked_sub_mul(y, z))
})],
);
}<|fim▁end|>
|
.get(gm, &config)
.take(limit)
{
println!(
|
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|># coding: utf-8
'''Common test fixtures
@author: Jesse Schwartzentruber (:truber)
@license:
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
'''
import logging
import os
import shutil
import subprocess
import tempfile
import pytest
from django.contrib.auth.models import User, Permission
from django.contrib.contenttypes.models import ContentType
from covmanager.models import Collection, CollectionFile, Repository
from crashmanager.models import Client, Tool, User as cmUser
LOG = logging.getLogger("fm.covmanager.tests")
def _check_git():
try:
proc = subprocess.Popen(["git"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = proc.communicate()
if output and proc.wait() == 1:
return True
except OSError: # FileNotFoundError
pass
return False
def _check_hg():
try:
proc = subprocess.Popen(["hg"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = proc.communicate()
if output and proc.wait() == 0:
return True
except OSError: # FileNotFoundError
pass
return False
HAVE_GIT = _check_git()
HAVE_HG = _check_hg()
@pytest.fixture
def covmanager_test(db): # pylint: disable=invalid-name,unused-argument
"""Common setup/teardown tasks for all server unittests"""
user = User.objects.create_user('test', '[email protected]', 'test')
user.user_permissions.clear()
content_type = ContentType.objects.get_for_model(cmUser)
perm = Permission.objects.get(content_type=content_type, codename='view_covmanager')
user.user_permissions.add(perm)
user_np = User.objects.create_user('test-noperm', '[email protected]', 'test')
user_np.user_permissions.clear()
@pytest.fixture
def cm(request, settings, tmpdir):
class _result(object):
have_git = HAVE_GIT
have_hg = HAVE_HG
@classmethod
def create_repository(cls, repotype, name="testrepo"):
location = tempfile.mkdtemp(prefix='testrepo', dir=os.path.dirname(__file__))
request.addfinalizer(lambda: shutil.rmtree(location))
if repotype == "git":
if not HAVE_GIT:
pytest.skip("git is not available")
classname = "GITSourceCodeProvider"
elif repotype == "hg":
if not HAVE_HG:
pytest.skip("hg is not available")
classname = "HGSourceCodeProvider"<|fim▁hole|> raise Exception("unknown repository type: %s (expecting git or hg)" % repotype)
result = Repository.objects.create(classname=classname, name=name, location=location)
LOG.debug("Created Repository pk=%d", result.pk)
if repotype == "git":
cls.git(result, "init")
elif repotype == "hg":
cls.hg(result, "init")
return result
@staticmethod
def create_collection_file(data):
# Use a specific temporary directory to upload covmanager files
# This is required as Django now needs a path relative to that folder in FileField
location = str(tmpdir)
CollectionFile.file.field.storage.location = location
tmp_fd, path = tempfile.mkstemp(suffix=".data", dir=location)
os.close(tmp_fd)
with open(path, "w") as fp:
fp.write(data)
result = CollectionFile.objects.create(file=os.path.basename(path))
LOG.debug("Created CollectionFile pk=%d", result.pk)
return result
@classmethod
def create_collection(cls,
created=None,
description="",
repository=None,
revision="",
branch="",
tools=("testtool",),
client="testclient",
coverage='{"linesTotal":0,'
'"name":null,'
'"coveragePercent":0.0,'
'"children":{},'
'"linesMissed":0,'
'"linesCovered":0}'):
# create collectionfile
coverage = cls.create_collection_file(coverage)
# create client
client, created = Client.objects.get_or_create(name=client)
if created:
LOG.debug("Created Client pk=%d", client.pk)
# create repository
if repository is None:
repository = cls.create_repository("git")
result = Collection.objects.create(description=description,
repository=repository,
revision=revision,
branch=branch,
client=client,
coverage=coverage)
LOG.debug("Created Collection pk=%d", result.pk)
# create tools
for tool in tools:
tool, created = Tool.objects.get_or_create(name=tool)
if created:
LOG.debug("Created Tool pk=%d", tool.pk)
result.tools.add(tool)
return result
@staticmethod
def git(repo, *args):
path = os.getcwd()
try:
os.chdir(repo.location)
return subprocess.check_output(["git"] + list(args)).decode("utf-8")
finally:
os.chdir(path)
@staticmethod
def hg(repo, *args):
path = os.getcwd()
try:
os.chdir(repo.location)
return subprocess.check_output(["hg"] + list(args)).decode("utf-8")
finally:
os.chdir(path)
return _result()<|fim▁end|>
|
else:
|
<|file_name|>omitBy.js<|end_file_name|><|fim▁begin|>import baseIteratee from './_baseIteratee';
import basePickBy from './_basePickBy';
/**
* The opposite of `_.pickBy`; this method creates an object composed of
* the own and inherited enumerable properties of `object` that `predicate`
* doesn't return truthy for. The predicate is invoked with two arguments:
* (value, key).
*
* @static
* @memberOf _
* @category Object
* @param {Object} object The source object.
* @param {Function|Object|string} [predicate=_.identity] The function invoked per property.<|fim▁hole|> * var object = { 'a': 1, 'b': '2', 'c': 3 };
*
* _.omitBy(object, _.isNumber);
* // => { 'b': '2' }
*/
function omitBy(object, predicate) {
predicate = baseIteratee(predicate);
return basePickBy(object, function(value, key) {
return !predicate(value, key);
});
}
export default omitBy;<|fim▁end|>
|
* @returns {Object} Returns the new object.
* @example
*
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
#![cfg_attr(stage0, feature(custom_attribute))]
#![crate_name = "rustdoc"]
#![unstable(feature = "rustdoc", issue = "27812")]
#![staged_api]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/",
html_playground_url = "https://play.rust-lang.org/")]
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(dynamic_lib)]
#![feature(libc)]
#![feature(path_ext)]
#![feature(path_relative_from)]
#![feature(rustc_private)]
#![feature(set_stdio)]
#![feature(slice_patterns)]
#![feature(staged_api)]
#![feature(test)]
#![feature(unicode)]
#![feature(vec_push_all)]
extern crate arena;
extern crate getopts;
extern crate libc;
extern crate rustc;
extern crate rustc_trans;
extern crate rustc_driver;
extern crate rustc_resolve;
extern crate rustc_lint;
extern crate rustc_back;
extern crate serialize;
extern crate syntax;
extern crate test as testing;
extern crate rustc_unicode;
#[macro_use] extern crate log;
extern crate serialize as rustc_serialize; // used by deriving
use std::cell::RefCell;
use std::collections::HashMap;
use std::env;
use std::fs::File;
use std::io::{self, Read, Write};
use std::path::PathBuf;
use std::process;
use std::rc::Rc;
use std::sync::mpsc::channel;
use externalfiles::ExternalHtml;
use serialize::Decodable;
use serialize::json::{self, Json};
use rustc::session::search_paths::SearchPaths;
// reexported from `clean` so it can be easily updated with the mod itself
pub use clean::SCHEMA_VERSION;
#[macro_use]
pub mod externalfiles;
pub mod clean;
pub mod core;
pub mod doctree;
pub mod fold;
pub mod html {
pub mod highlight;
pub mod escape;
pub mod item_type;
pub mod format;
pub mod layout;
pub mod markdown;
pub mod render;
pub mod toc;
}
pub mod markdown;
pub mod passes;
pub mod plugins;
pub mod visit_ast;
pub mod test;
mod flock;
type Pass = (&'static str, // name
fn(clean::Crate) -> plugins::PluginResult, // fn
&'static str); // description
const PASSES: &'static [Pass] = &[
("strip-hidden", passes::strip_hidden,
"strips all doc(hidden) items from the output"),
("unindent-comments", passes::unindent_comments,
"removes excess indentation on comments in order for markdown to like it"),
("collapse-docs", passes::collapse_docs,
"concatenates all document attributes into one document attribute"),
("strip-private", passes::strip_private,
"strips all private items from a crate which cannot be seen externally"),
];
const DEFAULT_PASSES: &'static [&'static str] = &[
"strip-hidden",
"strip-private",
"collapse-docs",
"unindent-comments",
];
thread_local!(pub static ANALYSISKEY: Rc<RefCell<Option<core::CrateAnalysis>>> = {
Rc::new(RefCell::new(None))
});
struct Output {
krate: clean::Crate,
json_plugins: Vec<plugins::PluginJson>,
passes: Vec<String>,
}
pub fn main() {
const STACK_SIZE: usize = 32000000; // 32MB
let res = std::thread::Builder::new().stack_size(STACK_SIZE).spawn(move || {
let s = env::args().collect::<Vec<_>>();
main_args(&s)
}).unwrap().join().unwrap_or(101);
process::exit(res as i32);
}
pub fn opts() -> Vec<getopts::OptGroup> {
use getopts::*;
vec!(
optflag("h", "help", "show this help message"),
optflag("V", "version", "print rustdoc's version"),
optflag("v", "verbose", "use verbose output"),
optopt("r", "input-format", "the input type of the specified file",
"[rust|json]"),
optopt("w", "output-format", "the output type to write",
"[html|json]"),
optopt("o", "output", "where to place the output", "PATH"),
optopt("", "crate-name", "specify the name of this crate", "NAME"),
optmulti("L", "library-path", "directory to add to crate search path",
"DIR"),
optmulti("", "cfg", "pass a --cfg to rustc", ""),
optmulti("", "extern", "pass an --extern to rustc", "NAME=PATH"),
optmulti("", "plugin-path", "directory to load plugins from", "DIR"),
optmulti("", "passes", "list of passes to also run, you might want \
to pass it multiple times; a value of `list` \
will print available passes",
"PASSES"),
optmulti("", "plugins", "space separated list of plugins to also load",
"PLUGINS"),
optflag("", "no-defaults", "don't run the default passes"),
optflag("", "test", "run code examples as tests"),
optmulti("", "test-args", "arguments to pass to the test runner",
"ARGS"),
optopt("", "target", "target triple to document", "TRIPLE"),
optmulti("", "markdown-css", "CSS files to include via <link> in a rendered Markdown file",
"FILES"),
optmulti("", "html-in-header",
"files to include inline in the <head> section of a rendered Markdown file \
or generated documentation",
"FILES"),
optmulti("", "html-before-content",
"files to include inline between <body> and the content of a rendered \
Markdown file or generated documentation",
"FILES"),
optmulti("", "html-after-content",
"files to include inline between the content and </body> of a rendered \
Markdown file or generated documentation",
"FILES"),
optopt("", "markdown-playground-url",
"URL to send code snippets to", "URL"),
optflag("", "markdown-no-toc", "don't include table of contents")
)
}
pub fn usage(argv0: &str) {
println!("{}",
getopts::usage(&format!("{} [options] <input>", argv0),
&opts()));
}
pub fn main_args(args: &[String]) -> isize {
let matches = match getopts::getopts(&args[1..], &opts()) {
Ok(m) => m,
Err(err) => {
println!("{}", err);
return 1;
}
};
if matches.opt_present("h") || matches.opt_present("help") {
usage(&args[0]);
return 0;
} else if matches.opt_present("version") {
rustc_driver::version("rustdoc", &matches);
return 0;
}
if matches.opt_strs("passes") == ["list"] {
println!("Available passes for running rustdoc:");
for &(name, _, description) in PASSES {
println!("{:>20} - {}", name, description);
}
println!("{}", "\nDefault passes for rustdoc:"); // FIXME: #9970
for &name in DEFAULT_PASSES {
println!("{:>20}", name);
}
return 0;
}
if matches.free.is_empty() {
println!("expected an input file to act on");
return 1;
} if matches.free.len() > 1 {
println!("only one input file may be specified");
return 1;
}
let input = &matches.free[0];
let mut libs = SearchPaths::new();
for s in &matches.opt_strs("L") {
libs.add_path(s);
}
let externs = match parse_externs(&matches) {
Ok(ex) => ex,
Err(err) => {
println!("{}", err);
return 1;
}
};
let test_args = matches.opt_strs("test-args");
let test_args: Vec<String> = test_args.iter()
.flat_map(|s| s.split_whitespace())
.map(|s| s.to_string())
.collect();
let should_test = matches.opt_present("test");
let markdown_input = input.ends_with(".md") || input.ends_with(".markdown");
let output = matches.opt_str("o").map(|s| PathBuf::from(&s));
let cfgs = matches.opt_strs("cfg");
let external_html = match ExternalHtml::load(
&matches.opt_strs("html-in-header"),
&matches.opt_strs("html-before-content"),
&matches.opt_strs("html-after-content")) {
Some(eh) => eh,
None => return 3
};
let crate_name = matches.opt_str("crate-name");
match (should_test, markdown_input) {
(true, true) => {
return markdown::test(input, libs, externs, test_args)
}
(true, false) => {
return test::run(input, cfgs, libs, externs, test_args, crate_name)
}
(false, true) => return markdown::render(input,
output.unwrap_or(PathBuf::from("doc")),
&matches, &external_html,
!matches.opt_present("markdown-no-toc")),
(false, false) => {}
}
let out = match acquire_input(input, externs, &matches) {
Ok(out) => out,
Err(s) => {
println!("input error: {}", s);
return 1;
}
};
let Output { krate, json_plugins, passes, } = out;
info!("going to format");
match matches.opt_str("w").as_ref().map(|s| &**s) {
Some("html") | None => {
match html::render::run(krate, &external_html,
output.unwrap_or(PathBuf::from("doc")),
passes.into_iter().collect()) {
Ok(()) => {}
Err(e) => panic!("failed to generate documentation: {}", e),
}
}
Some("json") => {
match json_output(krate, json_plugins,
output.unwrap_or(PathBuf::from("doc.json"))) {
Ok(()) => {}
Err(e) => panic!("failed to write json: {}", e),
}
}
Some(s) => {
println!("unknown output format: {}", s);
return 1;
}
}
return 0;
}
/// Looks inside the command line arguments to extract the relevant input format
/// and files and then generates the necessary rustdoc output for formatting.
fn acquire_input(input: &str,
externs: core::Externs,
matches: &getopts::Matches) -> Result<Output, String> {
match matches.opt_str("r").as_ref().map(|s| &**s) {
Some("rust") => Ok(rust_input(input, externs, matches)),
Some("json") => json_input(input),
Some(s) => Err(format!("unknown input format: {}", s)),
None => {
if input.ends_with(".json") {
json_input(input)
} else {
Ok(rust_input(input, externs, matches))
}
}
}
}
/// Extracts `--extern CRATE=PATH` arguments from `matches` and
/// returns a `HashMap` mapping crate names to their paths or else an
/// error message.
fn parse_externs(matches: &getopts::Matches) -> Result<core::Externs, String> {
let mut externs = HashMap::new();
for arg in &matches.opt_strs("extern") {
let mut parts = arg.splitn(2, '=');
let name = match parts.next() {
Some(s) => s,
None => {
return Err("--extern value must not be empty".to_string());
}
};
let location = match parts.next() {
Some(s) => s,
None => {
return Err("--extern value must be of the format `foo=bar`".to_string());
}
};
let name = name.to_string();
externs.entry(name).or_insert(vec![]).push(location.to_string());
}
Ok(externs)
}
/// Interprets the input file as a rust source file, passing it through the
/// compiler all the way through the analysis passes. The rustdoc output is then
/// generated from the cleaned AST of the crate.
///
/// This form of input will run all of the plug/cleaning passes
fn rust_input(cratefile: &str, externs: core::Externs, matches: &getopts::Matches) -> Output {
let mut default_passes = !matches.opt_present("no-defaults");
let mut passes = matches.opt_strs("passes");
let mut plugins = matches.opt_strs("plugins");
// First, parse the crate and extract all relevant information.
let mut paths = SearchPaths::new();
for s in &matches.opt_strs("L") {
paths.add_path(s);
}
let cfgs = matches.opt_strs("cfg");
let triple = matches.opt_str("target");
let cr = PathBuf::from(cratefile);
info!("starting to run rustc");
let (tx, rx) = channel();
rustc_driver::monitor(move || {
use rustc::session::config::Input;
tx.send(core::run_core(paths, cfgs, externs, Input::File(cr),
triple)).unwrap();
});
let (mut krate, analysis) = rx.recv().unwrap();
info!("finished with rustc");
let mut analysis = Some(analysis);
ANALYSISKEY.with(|s| {
*s.borrow_mut() = analysis.take();
});
match matches.opt_str("crate-name") {
Some(name) => krate.name = name,
None => {}
}
// Process all of the crate attributes, extracting plugin metadata along
// with the passes which we are supposed to run.
match krate.module.as_ref().unwrap().doc_list() {
Some(nested) => {
for inner in nested {
match *inner {
clean::Word(ref x)
if "no_default_passes" == *x => {
default_passes = false;
}
clean::NameValue(ref x, ref value)
if "passes" == *x => {
for pass in value.split_whitespace() {
passes.push(pass.to_string());
}
}
clean::NameValue(ref x, ref value)
if "plugins" == *x => {
for p in value.split_whitespace() {
plugins.push(p.to_string());
}
}
_ => {}
}
}
}
None => {}
}
if default_passes {
for name in DEFAULT_PASSES.iter().rev() {
passes.insert(0, name.to_string());
}
}
// Load all plugins/passes into a PluginManager
let path = matches.opt_str("plugin-path")
.unwrap_or("/tmp/rustdoc/plugins".to_string());
let mut pm = plugins::PluginManager::new(PathBuf::from(path));
for pass in &passes {
let plugin = match PASSES.iter()
.position(|&(p, _, _)| {
p == *pass
}) {
Some(i) => PASSES[i].1,
None => {
error!("unknown pass {}, skipping", *pass);
continue
},
};
pm.add_plugin(plugin);
}
info!("loading plugins...");
for pname in plugins {
pm.load_plugin(pname);
}
// Run everything!
info!("Executing passes/plugins");
let (krate, json) = pm.run_plugins(krate);
return Output { krate: krate, json_plugins: json, passes: passes, };
}
/// This input format purely deserializes the json output file. No passes are
/// run over the deserialized output.
fn json_input(input: &str) -> Result<Output, String> {
let mut bytes = Vec::new();
match File::open(input).and_then(|mut f| f.read_to_end(&mut bytes)) {
Ok(_) => {}
Err(e) => return Err(format!("couldn't open {}: {}", input, e)),
};
match json::from_reader(&mut &bytes[..]) {
Err(s) => Err(format!("{:?}", s)),
Ok(Json::Object(obj)) => {
let mut obj = obj;
// Make sure the schema is what we expect
match obj.remove(&"schema".to_string()) {<|fim▁hole|> if version != SCHEMA_VERSION {
return Err(format!(
"sorry, but I only understand version {}",
SCHEMA_VERSION))
}
}
Some(..) => return Err("malformed json".to_string()),
None => return Err("expected a schema version".to_string()),
}
let krate = match obj.remove(&"crate".to_string()) {
Some(json) => {
let mut d = json::Decoder::new(json);
Decodable::decode(&mut d).unwrap()
}
None => return Err("malformed json".to_string()),
};
// FIXME: this should read from the "plugins" field, but currently
// Json doesn't implement decodable...
let plugin_output = Vec::new();
Ok(Output { krate: krate, json_plugins: plugin_output, passes: Vec::new(), })
}
Ok(..) => {
Err("malformed json input: expected an object at the \
top".to_string())
}
}
}
/// Outputs the crate/plugin json as a giant json blob at the specified
/// destination.
fn json_output(krate: clean::Crate, res: Vec<plugins::PluginJson> ,
dst: PathBuf) -> io::Result<()> {
// {
// "schema": version,
// "crate": { parsed crate ... },
// "plugins": { output of plugins ... }
// }
let mut json = std::collections::BTreeMap::new();
json.insert("schema".to_string(), Json::String(SCHEMA_VERSION.to_string()));
let plugins_json = res.into_iter()
.filter_map(|opt| {
match opt {
None => None,
Some((string, json)) => {
Some((string.to_string(), json))
}
}
}).collect();
// FIXME #8335: yuck, Rust -> str -> JSON round trip! No way to .encode
// straight to the Rust JSON representation.
let crate_json_str = format!("{}", json::as_json(&krate));
let crate_json = match json::from_str(&crate_json_str) {
Ok(j) => j,
Err(e) => panic!("Rust generated JSON is invalid: {:?}", e)
};
json.insert("crate".to_string(), crate_json);
json.insert("plugins".to_string(), Json::Object(plugins_json));
let mut file = try!(File::create(&dst));
write!(&mut file, "{}", Json::Object(json))
}<|fim▁end|>
|
Some(Json::String(version)) => {
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/*
* Routes handlers
*/
var exec = require('child_process').exec,
child_process = require('child_process'),
fs = require('fs'),
child_processes = [];
// Basic routing
module.exports = function(app) {
app.get("/", getHomePage);
app.post("/add", postAddScraper);
}
function getHomePage(req, res) {
var port = res.app.settings.config.server.port;
res.render('index', {
port: port
});
}
function postAddScraper(req, res) {
var url = req.body.url,
auth_user = req.body.auth_user,
auth_pass = req.body.auth_pass,
depth = parseInt(req.body.create_crawler_depth),
create_sitemap = req.body.create_crawler_sitemap == 1,
clean = req.body.clean_crawl == 1,
config = res.app.settings.config;<|fim▁hole|> var child = child_process.fork("crawling-daemon.js");
// setup config
child.send({
action: "setConfig",
config: config
});
if (auth_user!="" && auth_pass!="") {
child.send({
action: "setAuth",
auth_user: auth_user,
auth_pass: auth_pass
});
}
child.send({
action: "start",
url: url,
clean: clean,
depth: depth
});
child.on("message", function(data) {
switch (data.message) {
case "auth-required":
data.row_id = data.host.replace(/\./g,"");
res.render("partials/scraper-stats-row", {data: data, layout: false}, function(err, html) {
if (err != null) {
return;
}
data.html = html;
io.sockets.emit('auth-required', data);
});
break;
case "general-stats":
data.row_id = data.host.replace(/\./g,"");
res.render("partials/scraper-stats-row", {data: data, layout: false}, function(err, html) {
if (err != null) {
return;
}
data.html = html;
io.sockets.emit('general-stats', data);
});
break;
case "done-crawling": case "stop-crawling":
if (create_sitemap) {
child.send({ action: "createSitemap" });
} else {
child.kill(); // Terminate crawling daemon
}
io.sockets.emit(data.message, data); // done-crawling | stop-crawling
break;
// @TODO: Implement
case "recrawl":
break;
case "sitemap-created":
var sitemap_path = "public/sitemaps/";
fs.exists(sitemap_path, function(exists) {
if (!exists) {
fs.mkdir(sitemap_path, writeSitemap);
} else {
writeSitemap();
}
// Terminate crawling daemon
child.kill();
});
function writeSitemap() {
sitemap_path += "sitemap_"+ data.host +".xml";
fs.writeFile(sitemap_path, data.content, function(err) {
if(err) {
console.log(err);
}
else {
io.sockets.emit('sitemap-ready', { host: data.host, path: sitemap_path.replace("public/", "") })
}
});
}
break;
default:
io.sockets.emit(data.message, data);
break;
}
})
res.redirect("/");
}<|fim▁end|>
| |
<|file_name|>nttcis.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
NTTCIS Common Components
"""
import xml.etree.ElementTree as etree
import re
from functools import wraps
from copy import deepcopy
from base64 import b64encode
from time import sleep
from io import BytesIO
try:
from collections.abc import MutableSequence, Mapping
except ImportError:
from collections import MutableSequence, Mapping
# TODO: use disutils.version when Travis CI fixed the pylint issue with version
# from distutils.version import LooseVersion
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import b
from libcloud.common.base import ConnectionUserAndKey, XmlResponse, RawResponse
from libcloud.compute.base import Node
from libcloud.utils.py3 import basestring
from libcloud.utils.xml import findtext
from libcloud.compute.types import LibcloudError, InvalidCredsError
# Roadmap / TODO:
#
# 1.0 - Copied from OpSource API, named provider details.
# setup a few variables to represent all of the NTTC-CIS cloud namespaces
NAMESPACE_BASE = "http://oec.api.opsource.net/schemas"
ORGANIZATION_NS = NAMESPACE_BASE + "/organization"
SERVER_NS = NAMESPACE_BASE + "/server"
NETWORK_NS = NAMESPACE_BASE + "/network"
DIRECTORY_NS = NAMESPACE_BASE + "/directory"
GENERAL_NS = NAMESPACE_BASE + "/general"
BACKUP_NS = NAMESPACE_BASE + "/backup"
# API 2.0 Namespaces and URNs
TYPES_URN = "urn:didata.com:api:cloud:types"
# API end-points
API_ENDPOINTS = {
'na': {
'name': 'North America (NA)',
'host': 'api-na.dimensiondata.com',
'vendor': 'NTTC-CIS'
},
'eu': {
'name': 'Europe (EU)',
'host': 'api-eu.dimensiondata.com',
'vendor': 'NTTC-CIS'
},
'au': {
'name': 'Australia (AU)',
'host': 'api-au.dimensiondata.com',
'vendor': 'NTTC-CIS'
},
'au-gov': {
'name': 'Australia Canberra ACT (AU)',
'host': 'api-canberra.dimensiondata.com',
'vendor': 'NTTC-CIS'
},
'af': {
'name': 'Africa (AF)',
'host': 'api-mea.dimensiondata.com',
'vendor': 'NTTC-CIS'
},
'ap': {
'name': 'Asia Pacific (AP)',
'host': 'api-ap.dimensiondata.com',
'vendor': 'NTTC-CIS'
},
'ca': {
'name': 'Canada (CA)',
'host': 'api-canada.dimensiondata.com',
'vendor': 'NTTC-CIS'
},
'is-na': {
'name': 'North America (NA)',
'host': 'usapi.cloud.is.co.za',
'vendor': 'InternetSolutions'
},
'is-eu': {
'name': 'Europe (EU)',
'host': 'euapi.cloud.is.co.za',
'vendor': 'InternetSolutions'
},
'is-au': {
'name': 'Australia (AU)',
'host': 'auapi.cloud.is.co.za',
'vendor': 'InternetSolutions'
},
'is-af': {
'name': 'Africa (AF)',
'host': 'meaapi.cloud.is.co.za',
'vendor': 'InternetSolutions'
},
'is-ap': {
'name': 'Asia Pacific (AP)',
'host': 'apapi.cloud.is.co.za',
'vendor': 'InternetSolutions'
},
'is-latam': {
'name': 'South America (LATAM)',
'host': 'latamapi.cloud.is.co.za',
'vendor': 'InternetSolutions'
},
'is-canada': {
'name': 'Canada (CA)',
'host': 'canadaapi.cloud.is.co.za',
'vendor': 'InternetSolutions'
},
'ntta-na': {
'name': 'North America (NA)',
'host': 'cloudapi.nttamerica.com',
'vendor': 'NTTNorthAmerica'
},
'ntta-eu': {
'name': 'Europe (EU)',
'host': 'eucloudapi.nttamerica.com',
'vendor': 'NTTNorthAmerica'
},
'ntta-au': {
'name': 'Australia (AU)',
'host': 'aucloudapi.nttamerica.com',
'vendor': 'NTTNorthAmerica'
},
'ntta-af': {
'name': 'Africa (AF)',
'host': 'sacloudapi.nttamerica.com',
'vendor': 'NTTNorthAmerica'
},
'ntta-ap': {
'name': 'Asia Pacific (AP)',
'host': 'hkcloudapi.nttamerica.com',
'vendor': 'NTTNorthAmerica'
},
'cisco-na': {
'name': 'North America (NA)',
'host': 'iaas-api-na.cisco-ccs.com',
'vendor': 'Cisco'
},
'cisco-eu': {
'name': 'Europe (EU)',
'host': 'iaas-api-eu.cisco-ccs.com',
'vendor': 'Cisco'
},
'cisco-au': {
'name': 'Australia (AU)',
'host': 'iaas-api-au.cisco-ccs.com',
'vendor': 'Cisco'
},
'cisco-af': {
'name': 'Africa (AF)',
'host': 'iaas-api-mea.cisco-ccs.com',
'vendor': 'Cisco'
},
'cisco-ap': {
'name': 'Asia Pacific (AP)',
'host': 'iaas-api-ap.cisco-ccs.com',
'vendor': 'Cisco'
},
'cisco-latam': {
'name': 'South America (LATAM)',
'host': 'iaas-api-sa.cisco-ccs.com',
'vendor': 'Cisco'
},
'cisco-canada': {
'name': 'Canada (CA)',
'host': 'iaas-api-ca.cisco-ccs.com',
'vendor': 'Cisco'
},
'med1-il': {
'name': 'Israel (IL)',
'host': 'api.cloud.med-1.com',
'vendor': 'Med-1'
},
'med1-na': {
'name': 'North America (NA)',
'host': 'api-na.cloud.med-1.com',
'vendor': 'Med-1'
},
'med1-eu': {
'name': 'Europe (EU)',
'host': 'api-eu.cloud.med-1.com',
'vendor': 'Med-1'
},
'med1-au': {
'name': 'Australia (AU)',
'host': 'api-au.cloud.med-1.com',
'vendor': 'Med-1'
},
'med1-af': {
'name': 'Africa (AF)',
'host': 'api-af.cloud.med-1.com',
'vendor': 'Med-1'
},
'med1-ap': {
'name': 'Asia Pacific (AP)',
'host': 'api-ap.cloud.med-1.com',
'vendor': 'Med-1'
},
'med1-latam': {
'name': 'South America (LATAM)',
'host': 'api-sa.cloud.med-1.com',
'vendor': 'Med-1'
},
'med1-canada': {
'name': 'Canada (CA)',
'host': 'api-ca.cloud.med-1.com',
'vendor': 'Med-1'
},
'indosat-id': {
'name': 'Indonesia (ID)',
'host': 'iaas-api.indosat.com',
'vendor': 'Indosat'
},
'indosat-na': {
'name': 'North America (NA)',
'host': 'iaas-usapi.indosat.com',
'vendor': 'Indosat'
},
'indosat-eu': {
'name': 'Europe (EU)',
'host': 'iaas-euapi.indosat.com',
'vendor': 'Indosat'
},
'indosat-au': {
'name': 'Australia (AU)',
'host': 'iaas-auapi.indosat.com',
'vendor': 'Indosat'
},
'indosat-af': {
'name': 'Africa (AF)',
'host': 'iaas-afapi.indosat.com',
'vendor': 'Indosat'
},
'bsnl-in': {
'name': 'India (IN)',
'host': 'api.bsnlcloud.com',
'vendor': 'BSNL'
},
'bsnl-na': {
'name': 'North America (NA)',
'host': 'usapi.bsnlcloud.com',
'vendor': 'BSNL'
},
'bsnl-eu': {
'name': 'Europe (EU)',
'host': 'euapi.bsnlcloud.com',
'vendor': 'BSNL'
},
'bsnl-au': {
'name': 'Australia (AU)',
'host': 'auapi.bsnlcloud.com',
'vendor': 'BSNL'
},
'bsnl-af': {
'name': 'Africa (AF)',
'host': 'afapi.bsnlcloud.com',
'vendor': 'BSNL'
}
}
# Default API end-point for the base connection class.
DEFAULT_REGION = 'na'
BAD_CODE_XML_ELEMENTS = (
('responseCode', SERVER_NS),
('responseCode', TYPES_URN),
('result', GENERAL_NS)
)
BAD_MESSAGE_XML_ELEMENTS = (
('message', SERVER_NS),
('message', TYPES_URN),
('resultDetail', GENERAL_NS)
)
def get_params(func):
@wraps(func)
def paramed(*args, **kwargs):
if kwargs:
for k, v in kwargs.items():
old_key = k
matches = re.findall(r'_(\w)', k)
for match in matches:
k = k.replace('_' + match, match.upper())
del kwargs[old_key]<|fim▁hole|> result = func(args[0])
return result
return paramed
def dd_object_to_id(obj, obj_type, id_value='id'):
"""
Takes in a DD object or string and prints out it's id
This is a helper method, as many of our functions can take either an object
or a string, and we need an easy way of converting them
:param obj: The object to get the id for
:type obj: ``object``
:param func: The function to call, e.g. ex_get_vlan. Note: This
function needs to return an object which has ``status``
attribute.
:type func: ``function``
:rtype: ``str``
"""
if isinstance(obj, obj_type):
return getattr(obj, id_value)
elif isinstance(obj, (basestring)):
return obj
else:
raise TypeError(
"Invalid type %s looking for basestring or %s"
% (type(obj).__name__, obj_type.__name__)
)
# TODO: use disutils.version when Travis CI fixed the pylint issue with version
# This is a temporary workaround.
def LooseVersion(version):
return float(version)
class NetworkDomainServicePlan(object):
ESSENTIALS = "ESSENTIALS"
ADVANCED = "ADVANCED"
class NttCisRawResponse(RawResponse):
pass
class NttCisResponse(XmlResponse):
def parse_error(self):
if self.status == httplib.UNAUTHORIZED:
raise InvalidCredsError(self.body)
elif self.status == httplib.FORBIDDEN:
raise InvalidCredsError(self.body)
body = self.parse_body()
if self.status == httplib.BAD_REQUEST:
for response_code in BAD_CODE_XML_ELEMENTS:
code = findtext(body, response_code[0], response_code[1])
if code is not None:
break
for message in BAD_MESSAGE_XML_ELEMENTS:
message = findtext(body, message[0], message[1])
if message is not None:
break
raise NttCisAPIException(code=code,
msg=message,
driver=self.connection.driver)
if self.status is not httplib.OK:
raise NttCisAPIException(code=self.status,
msg=body,
driver=self.connection.driver)
return self.body
class NttCisAPIException(LibcloudError):
def __init__(self, code, msg, driver):
self.code = code
self.msg = msg
self.driver = driver
def __str__(self):
return "%s: %s" % (self.code, self.msg)
def __repr__(self):
return ("<NttCisAPIException: code='%s', msg='%s'>" %
(self.code, self.msg))
class NttCisConnection(ConnectionUserAndKey):
"""
Connection class for the NttCis driver
"""
api_path_version_1 = '/oec'
api_path_version_2 = '/caas'
api_version_1 = 0.9
# Earliest version supported
oldest_api_version = '2.2'
# Latest version supported
latest_api_version = '2.7'
# Default api version
active_api_version = '2.7'
_orgId = None
responseCls = NttCisResponse
rawResponseCls = NttCisRawResponse
allow_insecure = False
def __init__(self, user_id, key, secure=True, host=None, port=None,
url=None, timeout=None, proxy_url=None,
api_version=None, **conn_kwargs):
super(NttCisConnection, self).__init__(
user_id=user_id,
key=key,
secure=secure,
host=host, port=port,
url=url, timeout=timeout,
proxy_url=proxy_url)
if conn_kwargs['region']:
self.host = conn_kwargs['region']['host']
if api_version:
if LooseVersion(api_version) < LooseVersion(
self.oldest_api_version):
msg = 'API Version specified is too old. No longer ' \
'supported. Please upgrade to the latest version {}' \
.format(self.active_api_version)
raise NttCisAPIException(code=None,
msg=msg,
driver=self.driver)
elif LooseVersion(api_version) > LooseVersion(
self.latest_api_version):
msg = 'Unsupported API Version. The version specified is ' \
'not release yet. Please use the latest supported ' \
'version {}' \
.format(self.active_api_version)
raise NttCisAPIException(code=None,
msg=msg,
driver=self.driver)
else:
# Overwrite default version using the version user specified
self.active_api_version = api_version
def add_default_headers(self, headers):
headers['Authorization'] = \
('Basic %s' % b64encode(b('%s:%s' % (self.user_id,
self.key))).decode('utf-8'))
headers['Content-Type'] = 'application/xml'
return headers
def request_api_1(self, action, params=None, data='',
headers=None, method='GET'):
action = "%s/%s/%s" % (self.api_path_version_1,
self.api_version_1, action)
return super(NttCisConnection, self).request(
action=action,
params=params, data=data,
method=method, headers=headers)
def request_api_2(self, path, action, params=None, data='',
headers=None, method='GET'):
action = "%s/%s/%s/%s" % (self.api_path_version_2,
self.active_api_version, path, action)
return super(NttCisConnection, self).request(
action=action,
params=params, data=data,
method=method, headers=headers)
def raw_request_with_orgId_api_1(self, action, params=None, data='',
headers=None, method='GET'):
action = "%s/%s" % (self.get_resource_path_api_1(), action)
return super(NttCisConnection, self).request(
action=action,
params=params, data=data,
method=method, headers=headers, raw=True)
def request_with_orgId_api_1(self, action, params=None, data='',
headers=None, method='GET'):
action = "%s/%s" % (self.get_resource_path_api_1(), action)
return super(NttCisConnection, self).request(
action=action,
params=params, data=data,
method=method, headers=headers)
def request_with_orgId_api_2(self, action, params=None, data='',
headers=None, method='GET'):
action = "%s/%s" % (self.get_resource_path_api_2(), action)
return super(NttCisConnection, self).request(
action=action,
params=params, data=data,
method=method, headers=headers)
def paginated_request_with_orgId_api_2(self, action, params=None, data='',
headers=None, method='GET',
page_size=250):
"""
A paginated request to the MCP2.0 API
This essentially calls out to request_with_orgId_api_2 for each page
and yields the response to make a generator
This generator can be looped through to grab all the pages.
:param action: The resource to access (i.e. 'network/vlan')
:type action: ``str``
:param params: Parameters to give to the action
:type params: ``dict`` or ``None``
:param data: The data payload to be added to the request
:type data: ``str``
:param headers: Additional header to be added to the request
:type headers: ``str`` or ``dict`` or ``None``
:param method: HTTP Method for the request (i.e. 'GET', 'POST')
:type method: ``str``
:param page_size: The size of each page to be returned
Note: Max page size in MCP2.0 is currently 250
:type page_size: ``int``
"""
if params is None:
params = {}
params['pageSize'] = page_size
resp = self.request_with_orgId_api_2(action, params,
data, headers,
method).object
yield resp
if len(resp) <= 0:
return
pcount = resp.get('pageCount') # pylint: disable=no-member
psize = resp.get('pageSize') # pylint: disable=no-member
pnumber = resp.get('pageNumber') # pylint: disable=no-member
while int(pcount) >= int(psize):
params['pageNumber'] = int(pnumber) + 1
resp = self.request_with_orgId_api_2(action, params,
data, headers,
method).object
pcount = resp.get('pageCount') # pylint: disable=no-member
psize = resp.get('pageSize') # pylint: disable=no-member
pnumber = resp.get('pageNumber') # pylint: disable=no-member
yield resp
def get_resource_path_api_1(self):
"""
This method returns a resource path which is necessary for referencing
resources that require a full path instead of just an ID, such as
networks, and customer snapshots.
"""
return ("%s/%s/%s" % (self.api_path_version_1, self.api_version_1,
self._get_orgId()))
def get_resource_path_api_2(self):
"""
This method returns a resource path which is necessary for referencing
resources that require a full path instead of just an ID, such as
networks, and customer snapshots.
"""
return ("%s/%s/%s" % (self.api_path_version_2, self.active_api_version,
self._get_orgId()))
def wait_for_state(self, state, func, poll_interval=2, timeout=60, *args,
**kwargs):
"""
Wait for the function which returns a instance with field status/state
to match.
Keep polling func until one of the desired states is matched
:param state: Either the desired state (`str`) or a `list` of states
:type state: ``str`` or ``list``
:param func: The function to call, e.g. ex_get_vlan. Note: This
function needs to return an object which has ``status``
attribute.
:type func: ``function``
:param poll_interval: The number of seconds to wait between checks
:type poll_interval: `int`
:param timeout: The total number of seconds to wait to reach a state
:type timeout: `int`
:param args: The arguments for func
:type args: Positional arguments
:param kwargs: The arguments for func
:type kwargs: Keyword arguments
:return: Result from the calling function.
"""
cnt = 0
result = None
object_state = None
state = state.lower()
while cnt < timeout / poll_interval:
result = func(*args, **kwargs)
if isinstance(result, Node):
object_state = result.state.lower()
else:
# BUG: need to use result.status.lower() or
# will never match if client uses lower case
object_state = result.status.lower()
if object_state is state or object_state in state:
return result
sleep(poll_interval)
cnt += 1
msg = 'Status check for object %s timed out' % (result)
raise NttCisAPIException(code=object_state,
msg=msg,
driver=self.driver)
def _get_orgId(self):
"""
Send the /myaccount API request to NTTC-CIS cloud and parse the
'orgId' from the XML response object. We need the orgId to use most
of the other API functions
"""
if self._orgId is None:
body = self.request_api_1('myaccount').object
self._orgId = findtext(body, 'orgId', DIRECTORY_NS)
return self._orgId
def get_account_details(self):
"""
Get the details of this account
:rtype: :class:`DimensionDataAccountDetails`
"""
body = self.request_api_1('myaccount').object
return NttCisAccountDetails(
user_name=findtext(body, 'userName', DIRECTORY_NS),
full_name=findtext(body, 'fullName', DIRECTORY_NS),
first_name=findtext(body, 'firstName', DIRECTORY_NS),
last_name=findtext(body, 'lastName', DIRECTORY_NS),
email=findtext(body, 'emailAddress', DIRECTORY_NS))
class NttCisAccountDetails(object):
"""
NTTCIS account class details
"""
def __init__(self, user_name, full_name, first_name, last_name, email):
self.user_name = user_name
self.full_name = full_name
self.first_name = first_name
self.last_name = last_name
self.email = email
class NttCisStatus(object):
"""
NTTCIS API pending operation status class
action, request_time, user_name, number_of_steps, update_time,
step.name, step.number, step.percent_complete, failure_reason,
"""
def __init__(self, action=None, request_time=None, user_name=None,
number_of_steps=None, update_time=None, step_name=None,
step_number=None, step_percent_complete=None,
failure_reason=None):
self.action = action
self.request_time = request_time
self.user_name = user_name
self.number_of_steps = number_of_steps
self.update_time = update_time
self.step_name = step_name
self.step_number = step_number
self.step_percent_complete = step_percent_complete
self.failure_reason = failure_reason
def __repr__(self):
return (('<NttCisStatus: action=%s, request_time=%s, '
'user_name=%s, number_of_steps=%s, update_time=%s, '
'step_name=%s, step_number=%s, '
'step_percent_complete=%s, failure_reason=%s>')
% (self.action, self.request_time, self.user_name,
self.number_of_steps, self.update_time, self.step_name,
self.step_number, self.step_percent_complete,
self.failure_reason))
class NttCisNetwork(object):
"""
NTTCIS network with location.
"""
def __init__(self, id, name, description, location, private_net,
multicast, status):
self.id = str(id)
self.name = name
self.description = description
self.location = location
self.private_net = private_net
self.multicast = multicast
self.status = status
def __repr__(self):
return (('<NttCisNetwork: id=%s, name=%s, description=%s, '
'location=%s, private_net=%s, multicast=%s>')
% (self.id, self.name, self.description, self.location,
self.private_net, self.multicast))
class NttCisNetworkDomain(object):
"""
NttCis network domain with location.
"""
def __init__(self, id, name, description, location, status, plan):
self.id = str(id)
self.name = name
self.description = description
self.location = location
self.status = status
self.plan = plan
def __repr__(self):
return (('<NttCisNetworkDomain: id=%s, name=%s, '
'description=%s, location=%s, status=%s, plan=%s>')
% (self.id, self.name, self.description, self.location,
self.status, self.plan))
class NttCisPublicIpBlock(object):
"""
NTTCIS Public IP Block with location.
"""
def __init__(self, id, base_ip, size, location, network_domain,
status):
self.id = str(id)
self.base_ip = base_ip
self.size = size
self.location = location
self.network_domain = network_domain
self.status = status
def __repr__(self):
return (('<NttCisNetworkDomain: id=%s, base_ip=%s, '
'size=%s, location=%s, status=%s>')
% (self.id, self.base_ip, self.size, self.location,
self.status))
class NttCisServerCpuSpecification(object):
"""
A class that represents the specification of the CPU(s) for a
node
"""
def __init__(self, cpu_count, cores_per_socket, performance):
"""
Instantiate a new :class:`NttCisServerCpuSpecification`
:param cpu_count: The number of CPUs
:type cpu_count: ``int``
:param cores_per_socket: The number of cores per socket, the
recommendation is 1
:type cores_per_socket: ``int``
:param performance: The performance type, e.g. HIGHPERFORMANCE
:type performance: ``str``
"""
self.cpu_count = cpu_count
self.cores_per_socket = cores_per_socket
self.performance = performance
def __repr__(self):
return (('<NttCisServerCpuSpecification: '
'cpu_count=%s, cores_per_socket=%s, '
'performance=%s>')
% (self.cpu_count, self.cores_per_socket, self.performance))
class NttCisServerDisk(object):
"""
A class that represents the disk on a server
"""
def __init__(self, id=None, scsi_id=None, size_gb=None, speed=None,
state=None):
"""
Instantiate a new :class:`DimensionDataServerDisk`
:param id: The id of the disk
:type id: ``str``
:param scsi_id: Representation for scsi
:type scsi_id: ``int``
:param size_gb: Size of the disk
:type size_gb: ``int``
:param speed: Speed of the disk (i.e. STANDARD)
:type speed: ``str``
:param state: State of the disk (i.e. PENDING)
:type state: ``str``
"""
self.id = id
self.scsi_id = scsi_id
self.size_gb = size_gb
self.speed = speed
self.state = state
def __repr__(self):
return (('<NttCisServerDisk: '
'id=%s, size_gb=%s')
% (self.id, self.size_gb))
class NttCisScsiController(object):
"""
A class that represents the disk on a server
"""
def __init__(self, id, adapter_type, bus_number, state):
"""
Instantiate a new :class:`DimensionDataServerDisk`
:param id: The id of the controller
:type id: ``str``
:param adapter_type: The 'brand' of adapter
:type adapter_type: ``str``
:param bus_number: The bus number occupied on the virtual hardware
:type bus_nubmer: ``str``
:param state: Curent state (i.e. NORMAL)
:type speed: ``str``
:param state: State of the disk (i.e. PENDING)
:type state: ``str``
"""
self.id = id
self.adapter_type = adapter_type
self.bus_number = bus_number
self.state = state
def __repr__(self):
return (('<NttCisScsiController: '
'id=%s, adapter_type=%s, bus_number=%s, state=%s')
% (self.id, self.adapter_type, self.bus_number, self.state))
class NttCisServerVMWareTools(object):
"""
A class that represents the VMWareTools for a node
"""
def __init__(self, status, version_status, api_version):
"""
Instantiate a new :class:`NttCisServerVMWareTools` object
:param status: The status of VMWare Tools
:type status: ``str``
:param version_status: The status for the version of VMWare Tools
(i.e NEEDS_UPGRADE)
:type version_status: ``str``
:param api_version: The API version of VMWare Tools
:type api_version: ``str``
"""
self.status = status
self.version_status = version_status
self.api_version = api_version
def __repr__(self):
return (('<NttCisServerVMWareTools '
'status=%s, version_status=%s, '
'api_version=%s>')
% (self.status, self.version_status, self.api_version))
class NttCisSnapshot(object):
"""
NTTCIS Class representing server snapshots
"""
def __init__(self, server_id, service_plan, id=None, window_id=None,
start_time=None, state=None, end_time=None,
type=None, expiry_time=None, action=None):
self.server_id = server_id
self.service_plan = service_plan
self.id = id
self.window_id = window_id
self.start_time = start_time
self.end_time = end_time
self.state = state
self.end_time = end_time
self.type = type
self.expiry_time = expiry_time
self.action = action
def __repr__(self):
return (('<NttCisSnapshots '
'id=%s, start_time=%s, '
'end_time=%s, self.type=%s, '
'self.expiry_timne=%s, self.state=%s>')
% (self.id, self.start_time, self.end_time,
self.type, self.expiry_time, self.state))
class NttCisReservedIpAddress(object):
"""
NTTCIS Rerverse IPv4 address
"""
def __init__(self, datacenter_id, exclusive, vlan_id, ip,
description=None):
self.datacenter_id = datacenter_id
self.exclusive = exclusive
self.vlan_id = vlan_id
self.ip = ip
self.description = description
def __repr__(self):
return (('<NttCisReservedIpAddress '
'datacenterId=%s, exclusiven=%s, vlanId=%s, ipAddress=%s,'
' description=-%s') % (self.datacenter_id, self.exclusive,
self.vlan_id, self.ip,
self.description))
class NttCisFirewallRule(object):
"""
NTTCIS Firewall Rule for a network domain
"""
def __init__(self, id, name, action, location, network_domain,
status, ip_version, protocol, source, destination,
enabled):
self.id = str(id)
self.name = name
self.action = action
self.location = location
self.network_domain = network_domain
self.status = status
self.ip_version = ip_version
self.protocol = protocol
self.source = source
self.destination = destination
self.enabled = enabled
def __repr__(self):
return (('<NttCisFirewallRule: id=%s, name=%s, '
'action=%s, location=%s, network_domain=%s, '
'status=%s, ip_version=%s, protocol=%s, source=%s, '
'destination=%s, enabled=%s>')
% (self.id, self.name, self.action, self.location,
self.network_domain, self.status, self.ip_version,
self.protocol, self.source, self.destination,
self.enabled))
"""
class NttCisFirewallAddress(object):
The source or destination model in a firewall rule
def __init__(self, any_ip, ip_address, ip_prefix_size,
port_begin, port_end, address_list_id,
port_list_id):
self.any_ip = any_ip
self.ip_address = ip_address
self.ip_prefix_size = ip_prefix_size
self.port_list_id = port_list_id
self.port_begin = port_begin
self.port_end = port_end
self.address_list_id = address_list_id
self.port_list_id = port_list_id
def __repr__(self):
return (
'<NttCisFirewallAddress: any_ip=%s, ip_address=%s, '
'ip_prefix_size=%s, port_begin=%s, port_end=%s, '
'address_list_id=%s, port_list_id=%s>'
% (self.any_ip, self.ip_address, self.ip_prefix_size,
self.port_begin, self.port_end, self.address_list_id,
self.port_list_id))
"""
class NttCisFirewallAddress(object):
"""
The source or destination model in a firewall rule
9/4/18: Editing Class to use with ex_create_firewall_rtule method.
Will haved to circle back and test for any other uses.
"""
def __init__(self, any_ip=None, ip_address=None, ip_prefix_size=None,
port_begin=None, port_end=None, address_list_id=None,
port_list_id=None):
"""
param any_ip: used to set ip address to "ANY"
:param ip_address: Optional, an ip address of either IPv4 decimal
notation or an IPv6 address
:type ``str``
:param ip_prefix_size: An integer denoting prefix size.
:type ``int``
:param port_begin: integer for an individual port or start of a list
of ports if not using a port list
:type ``int``
:param port_end: integer required if using a list of ports
(NOT a port list but a list starting with port begin)
:type ``int``
:param address_list_id: An id identifying an address list
:type ``str``
:param port_list_id: An id identifying a port list
:type ``str``
"""
self.any_ip = any_ip
self.ip_address = ip_address
self.ip_prefix_size = ip_prefix_size
self.port_list_id = port_list_id
self.port_begin = port_begin
self.port_end = port_end
self.address_list_id = address_list_id
self.port_list_id = port_list_id
def __repr__(self):
return (
'<NttCisFirewallAddress: any_ip=%s, ip_address=%s, '
'ip_prefix_size=%s, port_begin=%s, port_end=%s, '
'address_list_id=%s, port_list_id=%s>'
% (self.any_ip, self.ip_address, self.ip_prefix_size,
self.port_begin, self.port_end, self.address_list_id,
self.port_list_id))
class NttCisNatRule(object):
"""
An IP NAT rule in a network domain
"""
def __init__(self, id, network_domain, internal_ip, external_ip, status):
self.id = id
self.network_domain = network_domain
self.internal_ip = internal_ip
self.external_ip = external_ip
self.status = status
def __repr__(self):
return (('<NttCisNatRule: id=%s, status=%s>')
% (self.id, self.status))
class NttCisAntiAffinityRule(object):
"""
Anti-Affinity rule for NTTCIS
An Anti-Affinity rule ensures that servers in the rule will
not reside on the same VMware ESX host.
"""
def __init__(self, id, node_list):
"""
Instantiate a new :class:`NttCisDataAntiAffinityRule`
:param id: The ID of the Anti-Affinity rule
:type id: ``str``
:param node_list: List of node ids that belong in this rule
:type node_list: ``list`` of ``str``
"""
self.id = id
self.node_list = node_list
def __repr__(self):
return (('<NttCisAntiAffinityRule: id=%s>')
% (self.id))
class NttCisVlan(object):
"""
NTTCIS VLAN.
"""
def __init__(self, id, name, description, location, network_domain,
status, private_ipv4_range_address, private_ipv4_range_size,
ipv6_range_address, ipv6_range_size, ipv4_gateway,
ipv6_gateway):
"""
Initialize an instance of ``DimensionDataVlan``
:param id: The ID of the VLAN
:type id: ``str``
:param name: The name of the VLAN
:type name: ``str``
:param description: Plan text description of the VLAN
:type description: ``str``
:param location: The location (data center) of the VLAN
:type location: ``NodeLocation``
:param network_domain: The Network Domain that owns this VLAN
:type network_domain: :class:`DimensionDataNetworkDomain`
:param status: The status of the VLAN
:type status: :class:`DimensionDataStatus`
:param private_ipv4_range_address: The host address of the VLAN
IP space
:type private_ipv4_range_address: ``str``
:param private_ipv4_range_size: The size (e.g. '24') of the VLAN
as a CIDR range size
:type private_ipv4_range_size: ``int``
:param ipv6_range_address: The host address of the VLAN
IP space
:type ipv6_range_address: ``str``
:param ipv6_range_size: The size (e.g. '32') of the VLAN
as a CIDR range size
:type ipv6_range_size: ``int``
:param ipv4_gateway: The IPv4 default gateway address
:type ipv4_gateway: ``str``
:param ipv6_gateway: The IPv6 default gateway address
:type ipv6_gateway: ``str``
"""
self.id = str(id)
self.name = name
self.location = location
self.description = description
self.network_domain = network_domain
self.status = status
self.private_ipv4_range_address = private_ipv4_range_address
self.private_ipv4_range_size = private_ipv4_range_size
self.ipv6_range_address = ipv6_range_address
self.ipv6_range_size = ipv6_range_size
self.ipv4_gateway = ipv4_gateway
self.ipv6_gateway = ipv6_gateway
def __repr__(self):
return (('<NttCisVlan: id=%s, name=%s, '
'description=%s, location=%s, status=%s>')
% (self.id, self.name, self.description,
self.location, self.status))
class NttCisPool(object):
"""
NttCis VIP Pool.
"""
def __init__(self, id, name, description, status, load_balance_method,
health_monitor_id, service_down_action, slow_ramp_time):
"""
Initialize an instance of ``NttCisPool``
:param id: The ID of the pool
:type id: ``str``
:param name: The name of the pool
:type name: ``str``
:param description: Plan text description of the pool
:type description: ``str``
:param status: The status of the pool
:type status: :class:NttCisStatus`
:param load_balance_method: The load balancer method
:type load_balance_method: ``str``
:param health_monitor_id: The ID of the health monitor
:type health_monitor_id: ``str``
:param service_down_action: Action to take when pool is down
:type service_down_action: ``str``
:param slow_ramp_time: The ramp-up time for service recovery
:type slow_ramp_time: ``int``
"""
self.id = str(id)
self.name = name
self.description = description
self.status = status
self.load_balance_method = load_balance_method
self.health_monitor_id = health_monitor_id
self.service_down_action = service_down_action
self.slow_ramp_time = slow_ramp_time
def __repr__(self):
return (('<NttCisPool: id=%s, name=%s, '
'description=%s, status=%s>')
% (self.id, self.name, self.description,
self.status))
class NttCisPoolMember(object):
"""
NTTCIS VIP Pool Member.
"""
def __init__(self, id, name, status, ip, port, node_id):
"""
Initialize an instance of ``NttCisPoolMember``
:param id: The ID of the pool member
:type id: ``str``
:param name: The name of the pool member
:type name: ``str``
:param status: The status of the pool
:type status: :class:`NttCisStatus`
:param ip: The IP of the pool member
:type ip: ``str``
:param port: The port of the pool member
:type port: ``int``
:param node_id: The ID of the associated node
:type node_id: ``str``
"""
self.id = str(id)
self.name = name
self.status = status
self.ip = ip
self.port = port
self.node_id = node_id
def __repr__(self):
return (('NttCisPoolMember: id=%s, name=%s, '
'ip=%s, status=%s, port=%s, node_id=%s>')
% (self.id, self.name,
self.ip, self.status, self.port,
self.node_id))
class NttCisVIPNode(object):
def __init__(self, id, name, status, ip, connection_limit='10000',
connection_rate_limit='10000', health_monitor=None):
"""
Initialize an instance of :class:`NttCisVIPNode`
:param id: The ID of the node
:type id: ``str``
:param name: The name of the node
:type name: ``str``
:param status: The status of the node
:type status: :class:`NttCisStatus`
:param ip: The IP of the node
:type ip: ``str``
:param connection_limit: The total connection limit for the node
:type connection_limit: ``int``
:param connection_rate_limit: The rate limit for the node
:type connection_rate_limit: ``int``
"""
self.id = str(id)
self.name = name
self.status = status
self.ip = ip
self.connection_limit = connection_limit
self.connection_rate_limit = connection_rate_limit
if health_monitor is not None:
self.health_monitor_id = health_monitor
def __repr__(self):
return (('<NttCisVIPNode: id=%s, name=%s, '
'status=%s, ip=%s>')
% (self.id, self.name,
self.status, self.ip))
class NttCisVirtualListener(object):
"""
NTTCIS Virtual Listener.
"""
def __init__(self, id, name, status, ip):
"""
Initialize an instance of :class:`NttCisVirtualListener`
:param id: The ID of the listener
:type id: ``str``
:param name: The name of the listener
:type name: ``str``
:param status: The status of the listener
:type status: :class:`NttCisStatus`
:param ip: The IP of the listener
:type ip: ``str``
"""
self.id = str(id)
self.name = name
self.status = status
self.ip = ip
def __repr__(self):
return (('<NttCisVirtualListener: id=%s, name=%s, '
'status=%s, ip=%s>')
% (self.id, self.name,
self.status, self.ip))
class NttCisDefaultHealthMonitor(object):
"""
A default health monitor for a VIP (node, pool or listener)
"""
def __init__(self, id, name, node_compatible, pool_compatible):
"""
Initialize an instance of :class:`NttCisDefaultHealthMonitor`
:param id: The ID of the monitor
:type id: ``str``
:param name: The name of the monitor
:type name: ``str``
:param node_compatible: Is a monitor capable of monitoring nodes
:type node_compatible: ``bool``
:param pool_compatible: Is a monitor capable of monitoring pools
:type pool_compatible: ``bool``
"""
self.id = id
self.name = name
self.node_compatible = node_compatible
self.pool_compatible = pool_compatible
def __repr__(self):
return (('<NttCisDefaultHealthMonitor: id=%s, name=%s>')
% (self.id, self.name))
class NttCisPersistenceProfile(object):
"""
Each Persistence Profile declares the combination of Virtual Listener
type and protocol with which it is
compatible and whether or not it is compatible as a
Fallback Persistence Profile.
"""
def __init__(self, id, name, compatible_listeners, fallback_compatible):
"""
Initialize an instance of :class:`NttCisPersistenceProfile`
:param id: The ID of the profile
:type id: ``str``
:param name: The name of the profile
:type name: ``str``
:param compatible_listeners: List of compatible Virtual Listener types
:type compatible_listeners: ``list`` of
:class:`NttCisVirtualListenerCompatibility`
:param fallback_compatible: Is capable as a fallback profile
:type fallback_compatible: ``bool``
"""
self.id = id
self.name = name
self.compatible_listeners = compatible_listeners
self.fallback_compatible = fallback_compatible
def __repr__(self):
return (('NttCisPersistenceProfile: id=%s, name=%s>')
% (self.id, self.name))
class NttCisDefaultiRule(object):
"""
A default iRule for a network domain, can be applied to a listener
"""
def __init__(self, id, name, compatible_listeners):
"""
Initialize an instance of :class:`NttCisefaultiRule`
:param id: The ID of the iRule
:type id: ``str``
:param name: The name of the iRule
:type name: ``str``
:param compatible_listeners: List of compatible Virtual Listener types
:type compatible_listeners: ``list`` of
:class:`NttCisVirtualListenerCompatibility`
"""
self.id = id
self.name = name
self.compatible_listeners = compatible_listeners
def __repr__(self):
return (('<NttCisDefaultiRule: id=%s, name=%s>')
% (self.id, self.name))
class NttCisVirtualListenerCompatibility(object):
"""
A compatibility preference for a persistence profile or iRule
specifies which virtual listener types this profile or iRule can be
applied to.
"""
def __init__(self, type, protocol):
self.type = type
self.protocol = protocol
def __repr__(self):
return (('<NttCisVirtualListenerCompatibility: '
'type=%s, protocol=%s>')
% (self.type, self.protocol))
class NttCisBackupDetails(object):
"""
NTTCIS Backup Details represents information about
a targets backups configuration
"""
def __init__(self, asset_id, service_plan, status, clients=None):
"""
Initialize an instance of :class:`NttCisBackupDetails`
:param asset_id: Asset identification for backups
:type asset_id: ``str``
:param service_plan: The service plan for backups. i.e (Essentials)
:type service_plan: ``str``
:param status: The overall status this backup target.
i.e. (unregistered)
:type status: ``str``
:param clients: Backup clients attached to this target
:type clients: ``list`` of :class:`NttCisBackupClient`
"""
self.asset_id = asset_id
self.service_plan = service_plan
self.status = status
self.clients = clients
def __repr__(self):
return (('<NttCisBackupDetails: id=%s>')
% (self.asset_id))
class NttCisBackupClient(object):
"""
An object that represents a backup client
"""
def __init__(self, id, type, status,
schedule_policy, storage_policy, download_url,
alert=None, running_job=None):
"""
Initialize an instance of :class:`NttCisBackupClient`
:param id: Unique ID for the client
:type id: ``str``
:param type: The type of client that this client is
:type type: :class:`NttCisBackupClientType`
:param status: The states of this particular backup client.
i.e. (Unregistered)
:type status: ``str``
:param schedule_policy: The schedule policy for this client
NOTE: NTTCIS only sends back the name
of the schedule policy, no further details
:type schedule_policy: ``str``
:param storage_policy: The storage policy for this client
NOTE: NTTCIS only sends back the name
of the storage policy, no further details
:type storage_policy: ``str``
:param download_url: The download url for this client
:type download_url: ``str``
:param alert: The alert configured for this backup client (optional)
:type alert: :class:`NttCisBackupClientAlert`
:param alert: The running job for the client (optional)
:type alert: :class:`NttCisBackupClientRunningJob`
"""
self.id = id
self.type = type
self.status = status
self.schedule_policy = schedule_policy
self.storage_policy = storage_policy
self.download_url = download_url
self.alert = alert
self.running_job = running_job
def __repr__(self):
return (('<NttCisBackupClient: id=%s>')
% (self.id))
class NttCisBackupClientAlert(object):
"""
An alert for a backup client
"""
def __init__(self, trigger, notify_list=[]):
"""
Initialize an instance of :class:`NttCisBackupClientAlert`
:param trigger: Trigger type for the client i.e. ON_FAILURE
:type trigger: ``str``
:param notify_list: List of email addresses that are notified
when the alert is fired
:type notify_list: ``list`` of ``str``
"""
self.trigger = trigger
self.notify_list = notify_list
def __repr__(self):
return (('<NttCisBackupClientAlert: trigger=%s>')
% (self.trigger))
class NttCisBackupClientRunningJob(object):
"""
A running job for a given backup client
"""
def __init__(self, id, status, percentage=0):
"""
Initialize an instance of :class:`NttCisBackupClientRunningJob`
:param id: The unqiue ID of the job
:type id: ``str``
:param status: The status of the job i.e. Waiting
:type status: ``str``
:param percentage: The percentage completion of the job
:type percentage: ``int``
"""
self.id = id
self.percentage = percentage
self.status = status
def __repr__(self):
return (('<NttCisBackupClientRunningJob: id=%s>')
% (self.id))
class NttCisBackupClientType(object):
"""
A client type object for backups
"""
def __init__(self, type, is_file_system, description):
"""
Initialize an instance of :class:`NttCisBackupClientType`
:param type: The type of client i.e. (FA.Linux, MySQL, ect.)
:type type: ``str``
:param is_file_system: The name of the iRule
:type is_file_system: ``bool``
:param description: Description of the client
:type description: ``str``
"""
self.type = type
self.is_file_system = is_file_system
self.description = description
def __repr__(self):
return (('<NttCisBackupClientType: type=%s>')
% (self.type))
class NttCisBackupStoragePolicy(object):
"""
A representation of a storage policy
"""
def __init__(self, name, retention_period, secondary_location):
"""
Initialize an instance of :class:`NttCisBackupStoragePolicy`
:param name: The name of the storage policy i.e. 14 Day Storage Policy
:type name: ``str``
:param retention_period: How long to keep the backup in days
:type retention_period: ``int``
:param secondary_location: The secondary location i.e. Primary
:type secondary_location: ``str``
"""
self.name = name
self.retention_period = retention_period
self.secondary_location = secondary_location
def __repr__(self):
return (('<NttCisBackupStoragePolicy: name=%s>')
% (self.name))
class NttCisBackupSchedulePolicy(object):
"""
A representation of a schedule policy
"""
def __init__(self, name, description):
"""
Initialize an instance of :class:`NttCisBackupSchedulePolicy`
:param name: The name of the policy i.e 12AM - 6AM
:type name: ``str``
:param description: Short summary of the details of the policy
:type description: ``str``
"""
self.name = name
self.description = description
def __repr__(self):
return (('<NttCisBackupSchedulePolicy: name=%s>')
% (self.name))
class NttCisTag(object):
"""
A representation of a Tag in NTTCIS
A Tag first must have a Tag Key, then an asset is tag with
a key and an option value. Tags can be queried later to filter assets
and also show up on usage report if so desired.
"""
def __init__(self, asset_type, asset_id, asset_name,
datacenter, key, value):
"""
Initialize an instance of :class:`NttCisTag`
:param asset_type: The type of asset. Current asset types:
SERVER, VLAN, NETWORK_DOMAIN, CUSTOMER_IMAGE,
PUBLIC_IP_BLOCK, ACCOUNT
:type asset_type: ``str``
:param asset_id: The GUID of the asset that is tagged
:type asset_id: ``str``
:param asset_name: The name of the asset that is tagged
:type asset_name: ``str``
:param datacenter: The short datacenter name of the tagged asset
:type datacenter: ``str``
:param key: The tagged key
:type key: :class:`NttCisTagKey`
:param value: The tagged value
:type value: ``None`` or ``str``
"""
self.asset_type = asset_type
self.asset_id = asset_id
self.asset_name = asset_name
self.datacenter = datacenter
self.key = key
self.value = value
def __repr__(self):
return (('<NttCisTag: asset_name=%s, tag_name=%s, value=%s>')
% (self.asset_name, self.key.name, self.value))
class NttCisTagKey(object):
"""
A representation of a Tag Key in NTTCIS
A tag key is required to tag an asset
"""
def __init__(self, id, name, description,
value_required, display_on_report):
"""
Initialize an instance of :class:`NttCisTagKey`
:param id: GUID of the tag key
:type id: ``str``
:param name: Name of the tag key
:type name: ``str``
:param description: Description of the tag key
:type description: ``str``
:param value_required: If a value is required for this tag key
:type value_required: ``bool``
:param display_on_report: If this tag key should be displayed on
usage reports
:type display_on_report: ``bool``
"""
self.id = id
self.name = name
self.description = description
self.value_required = value_required
self.display_on_report = display_on_report
def __repr__(self):
return (('NttCisTagKey: id=%s name=%s>')
% (self.id, self.name))
class NttCisIpAddressList(object):
"""
NttCis IP Address list
"""
def __init__(self, id, name, description, ip_version,
ip_address_collection,
state, create_time, child_ip_address_lists=None):
""""
Initialize an instance of :class:`NttCisIpAddressList`
:param id: GUID of the IP Address List key
:type id: ``str``
:param name: Name of the IP Address List
:type name: ``str``
:param description: Description of the IP Address List
:type description: ``str``
:param ip_version: IP version. E.g. IPV4, IPV6
:type ip_version: ``str``
:param ip_address_collection: Collection of NttCisIpAddress
:type ip_address_collection: ``List``
:param state: IP Address list state
:type state: ``str``
:param create_time: IP Address List created time
:type create_time: ``date time``
:param child_ip_address_lists: List of IP address list to be included
:type child_ip_address_lists: List
of :class:'NttCisIpAddressList'
"""
self.id = id
self.name = name
self.description = description
self.ip_version = ip_version
self.ip_address_collection = ip_address_collection
self.state = state
self.create_time = create_time
self.child_ip_address_lists = child_ip_address_lists
def __repr__(self):
return ('<NttCisIpAddressList: id=%s, name=%s, description=%s, '
'ip_version=%s, ip_address_collection=%s, state=%s, '
'create_time=%s, child_ip_address_lists=%s>'
% (self.id, self.name, self.description, self.ip_version,
self.ip_address_collection, self.state, self.create_time,
self.child_ip_address_lists))
class NttCisChildIpAddressList(object):
"""
NttCis Child IP Address list
"""
def __init__(self, id, name):
""""
Initialize an instance of :class:`NttCisDataChildIpAddressList`
:param id: GUID of the IP Address List key
:type id: ``str``
:param name: Name of the IP Address List
:type name: ``str``
"""
self.id = id
self.name = name
def __repr__(self):
return ('<NttCisChildIpAddressList: id=%s, name=%s>'
% (self.id, self.name))
class NttCisIpAddress(object):
"""
A representation of IP Address in NttCis
"""
def __init__(self, begin, end=None, prefix_size=None):
"""
Initialize an instance of :class:`NttCisIpAddress`
:param begin: IP Address Begin
:type begin: ``str``
:param end: IP Address end
:type end: ``str``
:param prefixSize: IP Address prefix size
:type prefixSize: ``int``
"""
self.begin = begin
self.end = end
self.prefix_size = prefix_size
def __repr__(self):
return ('<NttCisIpAddress: begin=%s, end=%s, prefix_size=%s>'
% (self.begin, self.end, self.prefix_size))
class NttCisPortList(object):
"""
NttCis Port list
"""
def __init__(self, id, name, description, port_collection,
child_portlist_list,
state, create_time):
""""
Initialize an instance of :class:`DNttCisPortList`
:param id: GUID of the Port List key
:type id: ``str``
:param name: Name of the Port List
:type name: ``str``
:param description: Description of the Port List
:type description: ``str``
:param port_collection: Collection of NttCisPort
:type port_collection: ``List``
:param child_portlist_list: Collection of NttCisChildPort
:type child_portlist_list: ``List``
:param state: Port list state
:type state: ``str``
:param create_time: Port List created time
:type create_time: ``date time``
"""
self.id = id
self.name = name
self.description = description
self.port_collection = port_collection
self.child_portlist_list = child_portlist_list
self.state = state
self.create_time = create_time
def __repr__(self):
return (
"<NttCisPortList: id=%s, name=%s, description=%s, "
"port_collection=%s, child_portlist_list=%s, state=%s, "
"create_time=%s>"
% (self.id, self.name, self.description,
self.port_collection, self.child_portlist_list, self.state,
self.create_time))
class NttCisChildPortList(object):
"""
NttCis Child Port list
"""
def __init__(self, id, name):
""""
Initialize an instance of :class:`NttCisChildIpAddressList`
:param id: GUID of the child port list key
:type id: ``str``
:param name: Name of the child port List
:type name: ``str``
"""
self.id = id
self.name = name
def __repr__(self):
return ('<NttCisChildPortList: id=%s, name=%s>'
% (self.id, self.name))
class NttCisPort(object):
"""
A representation of Port in NTTCIS
"""
def __init__(self, begin, end=None):
"""
Initialize an instance of :class:`NttCisPort`
:param begin: Port Number Begin
:type begin: ``str``
:param end: Port Number end
:type end: ``str``
"""
self.begin = begin
self.end = end
def __repr__(self):
return ('<NttCisPort: begin=%s, end=%s>'
% (self.begin, self.end))
class NttCisNic(object):
"""
A representation of Network Adapter in NTTCIS
"""
def __init__(self, private_ip_v4=None, vlan=None,
network_adapter_name=None):
"""
Initialize an instance of :class:`NttCisNic`
:param private_ip_v4: IPv4
:type private_ip_v4: ``str``
:param vlan: Network VLAN
:type vlan: class: NttCisVlan or ``str``
:param network_adapter_name: Network Adapter Name
:type network_adapter_name: ``str``
"""
self.private_ip_v4 = private_ip_v4
self.vlan = vlan
self.network_adapter_name = network_adapter_name
def __repr__(self):
return ('<NttCisNic: private_ip_v4=%s, vlan=%s,'
'network_adapter_name=%s>'
% (self.private_ip_v4, self.vlan, self.network_adapter_name))
# Dynamically create classes from returned XML. Leaves the API as the
# single authoritative source.
class ClassFactory(object):
pass
attrs = {}
def processor(mapping, name=None):
"""
Closure that keeps the deepcopy of the original dict
converted to XML current.
:param mapping: The converted XML to dict/lists
:type mapping: ``dict``
:param name: (Optional) what becomes the class name if provided
:type: ``str``
:return: Nothing
"""
mapping = mapping
# the map_copy will have keys deleted after the key and value are processed
map_copy = deepcopy(mapping)
def add_items(key, value, name=None):
"""
Add items to the global attr dict, then delete key, value from map copy
:param key: from the process function becomes the attribute name
:type key: ``str``
:param value: The value of the property and may be a dict
:type value: ``str``
:param name: Name of class, often same as key
:type: name" ``str``
"""
if name in attrs:
attrs[name].update({key: value})
elif name is not None:
attrs[name] = value
else:
attrs.update({key: value})
# trim the copy of the mapping
if key in map_copy:
del map_copy[key]
elif key in map_copy[name]:
del map_copy[name][key]
if len(map_copy[name]) == 0:
del map_copy[name]
def handle_map(map, name):
tmp = {}
types = [type(x) for x in map.values()]
if XmlListConfig not in types and \
XmlDictConfig not in types and dict not in types:
return map
elif XmlListConfig in types:
result = handle_seq(map, name)
return result
else:
for k, v in map.items():
if isinstance(v, str):
tmp.update({k: v})
if isinstance(v, dict):
cls = build_class(k.capitalize(), v)
tmp.update({k: cls})
elif isinstance(v, XmlDictConfig):
cls = build_class(k.capitalize(), v)
return (k, cls)
return tmp
def handle_seq(seq, name):
tmp = {}
if isinstance(seq, list):
tmp = []
for _ in seq:
cls = build_class(name.capitalize(), _)
tmp.append(cls)
return tmp
for k, v in seq.items():
if isinstance(v, MutableSequence):
for _ in v:
if isinstance(_, Mapping):
types = [type(x) for x in _.values()]
if XmlDictConfig in types:
result = handle_map(_, k)
if isinstance(result, tuple):
tmp.update({result[0]: result[1]})
else:
tmp.update({k: result})
else:
tmp_list = [build_class(k.capitalize(), i)
for i in v]
tmp[k] = tmp_list
elif isinstance(v, str):
tmp.update({k: v})
return tmp
def build_class(key, value):
klass = class_factory(key.capitalize(), value)
return klass(value)
def process(mapping):
"""
This function is recursive, creating attributes for the class factory
by taking apart the elements in the dictionary. Thus, the calls to
handle_seq or handle_map
:param mapping: the dictionary converted from XML
:return: itself (recursive)
"""
for k1, v1 in mapping.items():
if isinstance(v1, Mapping):
types = [type(v) for v in v1.values()]
if MutableSequence not in types and dict not in types:
result = handle_map(v1, k1)
cls = build_class(k1.capitalize(), result)
add_items(k1, cls)
elif XmlListConfig in types:
result = handle_seq(v1, k1)
cls = build_class(list(v1)[0], result)
add_items(k1, cls)
elif dict in types:
result = handle_map(v1, k1)
cls = build_class(k1.capitalize(), result)
add_items(k1, cls, k1)
elif isinstance(v1, list):
tmp1 = {}
tmp2 = {}
tmp2[k1] = []
for i, j in enumerate(v1):
if isinstance(j, dict):
key = list(j)[0]
result = handle_map(j, key)
tmp1[k1 + str(i)] = build_class(k1, result)
tmp2[k1].append(tmp1[k1 + str(i)])
if tmp2:
add_items(k1, tmp2[k1], k1)
elif isinstance(v1, str):
add_items(k1, v1)
if len(map_copy) == 0:
return 1
return process(mapping)
def class_factory(cls_name, attrs):
"""
This class takes a name and a dictionary to create a class.
The clkass has an init method, an iter for retrieving properties,
and, finally, a repr for returning the instance
:param cls_name: The name to be tacked onto the suffix NttCis
:type cls_name: ``str``
:param attrs: The attributes and values for an instance
:type attrs: ``dict``
:return: a class that inherits from ClassFactory
:rtype: ``ClassFactory``
"""
def __init__(self, *args, **kwargs):
for key in attrs:
setattr(self, key, attrs[key])
if cls_name == "NttCisServer":
self.state = self._get_state()
def __iter__(self):
for name in self.__dict__:
yield getattr(self, name)
def __repr__(self):
values = ', '.join('{}={!r}'.format(*i)
for i in zip(self.__dict__, self))
return '{}({})'.format(self.__class__.__name__, values)
cls_attrs = dict(
__init__=__init__,
__iter__=__iter__,
__repr__=__repr__)
return type("NttCis{}".format(cls_name), (ClassFactory,), cls_attrs)
class XmlListConfig(list):
"""
Creates a class from XML elements that make a list. If a list of
XML elements with attributes, the attributes are passed to XmlDictConfig.
"""
def __init__(self, elem_list):
for element in elem_list:
if element is not None:
# treat like dict
if len(element) >= 0 or element[0].tag != element[1].tag:
self.append(XmlDictConfig(element))
# treat like list
elif element[0].tag == element[1].tag:
# property refers to an element used repeatedly
# in the XML for data centers only
if 'property' in element.tag:
self.append({element.attrib.get('name'):
element.attrib.get('value')})
else:
self.append(element.attrib)
elif element.text:
text = element.text.strip()
if text:
self.append(text)
class XmlDictConfig(dict):
"""
Inherits from dict. Looks for XML elements, such as attrib, that
can be converted to a dictionary. Any XML element that contains
other XML elements, will be passed to XmlListConfig
"""
def __init__(self, parent_element):
if parent_element.items():
if 'property' in parent_element.tag:
self.update({parent_element.attrib.get('name'):
parent_element.attrib.get('value')})
else:
self.update(dict(parent_element.items()))
for element in parent_element:
if len(element) > 0:
# treat like dict - we assume that if the first two tags
# in a series are different, then they are all different.
if len(element) == 1 or element[0].tag != element[1].tag:
elem_dict = XmlDictConfig(element)
# treat like list - we assume that if the first two tags
# in a series are the same, then the rest are the same.
else:
# here, we put the list in dictionary; the key is the
# tag name the list elements all share in common, and
# the value is the list itself
elem_dict = {element[0].tag.split('}')[1]:
XmlListConfig(element)}
# if the tag has attributes, add those to the dict
if element.items():
elem_dict.update(dict(element.items()))
self.update({element.tag.split('}')[1]: elem_dict})
# this assumes that if you've got an attribute in a tag,
# you won't be having any text. This may or may not be a
# good idea -- time will tell. It works for the way we are
# currently doing XML configuration files...
elif element.items():
# It is possible to have duplicate element tags.
# If so, convert to a dict of lists
if element.tag.split('}')[1] in self:
if isinstance(self[element.tag.split('}')[1]], list):
self[element.tag.split('}')[1]].\
append(dict(element.items()))
else:
tmp_list = list()
tmp_dict = dict()
for k, v in self[element.tag.split('}')[1]].items():
if isinstance(k, XmlListConfig):
tmp_list.append(k)
else:
tmp_dict.update({k: v})
tmp_list.append(tmp_dict)
tmp_list.append(dict(element.items()))
self[element.tag.split('}')[1]] = tmp_list
else:
self.update({element.tag.split('}')[1]:
dict(element.items())})
# finally, if there are no child tags and no attributes, extract
# the text
else:
self.update({element.tag.split('}')[1]: element.text})
def process_xml(xml):
"""
Take the xml and put it into a dictionary. The process the dictionary
recursively. This returns a class based on the XML API. Thus, properties
will have the camel case found in the Java XML. This a trade-off
to reduce the number of "static" classes that all have to be synchronized
with any changes in the API.
:param xml: The serialized version of the XML returned from Cloud Control
:return: a dynamic class that inherits from ClassFactory
:rtype: `ClassFactory`
"""
global attrs
tree = etree.parse(BytesIO(xml))
root = tree.getroot()
elem = root.tag.split('}')[1].capitalize()
items = dict(root.items())
if 'pageNumber' in items:
converted_xml = XmlListConfig(root)
processor(converted_xml[0])
else:
converted_xml = XmlDictConfig(root)
processor(converted_xml)
klass = class_factory(elem.capitalize(), attrs)
cls = klass(attrs)
attrs = {}
return cls<|fim▁end|>
|
kwargs[k] = v
params = kwargs
result = func(args[0], params)
else:
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup
import os.path
setup(
name='State Fragility',<|fim▁hole|> py_modules=['state_fragility'],
data_files=[('', [
"./state_fragility.db"
])]
)<|fim▁end|>
|
version='1',
|
<|file_name|>secrets.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
_READER = "role:reader"
_MEMBER = "role:member"
_ADMIN = "role:admin"
_PROJECT_MEMBER = f"{_MEMBER} and project_id:%(target.secret.project_id)s"
_PROJECT_ADMIN = f"{_ADMIN} and project_id:%(target.secret.project_id)s"
_SECRET_CREATOR = "user_id:%(target.secret.creator_id)s"
_SECRET_IS_NOT_PRIVATE = "True:%(target.secret.read_project_access)s"
rules = [
policy.DocumentedRuleDefault(
name='secret:decrypt',
check_str='rule:secret_decrypt_non_private_read or ' +
'rule:secret_project_creator or ' +
'rule:secret_project_admin or rule:secret_acl_read or ' +
f"({_PROJECT_MEMBER} and ({_SECRET_CREATOR} or " +
f"{_SECRET_IS_NOT_PRIVATE})) or {_PROJECT_ADMIN}",
scope_types=['project'],
description='Retrieve a secrets payload.',
operations=[
{
'path': '/v1/secrets/{uuid}/payload',
'method': 'GET'
}
]
),
policy.DocumentedRuleDefault(
name='secret:get',
check_str='rule:secret_non_private_read or ' +
'rule:secret_project_creator or ' +
'rule:secret_project_admin or rule:secret_acl_read or ' +
f"({_PROJECT_MEMBER} and ({_SECRET_CREATOR} or " +
f"{_SECRET_IS_NOT_PRIVATE})) or {_PROJECT_ADMIN}",
scope_types=['project'],
description='Retrieves a secrets metadata.',
operations=[
{
'path': '/v1/secrets/{secret-id}',
'method': 'GET"'
}
]
),
policy.DocumentedRuleDefault(
name='secret:put',
check_str='rule:admin_or_creator and rule:secret_project_match or ' +
f"({_PROJECT_MEMBER} and ({_SECRET_CREATOR} or " +
f"{_SECRET_IS_NOT_PRIVATE})) or {_PROJECT_ADMIN}",
scope_types=['project'],
description='Add the payload to an existing metadata-only secret.',
operations=[
{
'path': '/v1/secrets/{secret-id}',
'method': 'PUT'
}<|fim▁hole|> ),
policy.DocumentedRuleDefault(
name='secret:delete',
check_str='rule:secret_project_admin or ' +
'rule:secret_project_creator or ' +
'(rule:secret_project_creator_role and ' +
'not rule:secret_private_read) or ' +
f"({_PROJECT_MEMBER} and ({_SECRET_CREATOR} or " +
f"{_SECRET_IS_NOT_PRIVATE})) or {_PROJECT_ADMIN}",
scope_types=['project'],
description='Delete a secret by uuid.',
operations=[
{
'path': '/v1/secrets/{secret-id}',
'method': 'DELETE'
}
]
),
policy.DocumentedRuleDefault(
name='secrets:post',
check_str=f'rule:admin_or_creator or {_MEMBER}',
scope_types=['project'],
description='Creates a Secret entity.',
operations=[
{
'path': '/v1/secrets',
'method': 'POST'
}
]
),
policy.DocumentedRuleDefault(
name='secrets:get',
check_str=f'rule:all_but_audit or {_MEMBER}',
scope_types=['project'],
description='Lists a projects secrets.',
operations=[
{
'path': '/v1/secrets',
'method': 'GET'
}
]
)
]
def list_rules():
return rules<|fim▁end|>
|
]
|
<|file_name|>filters.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react';
import { AppliedFilter, DataTypes, GridFilters, numberWithCommas, ReactPowerTable, withInternalPaging, withInternalSorting } from '../../src/';
import { defaultColumns, partyList, sampledata } from './shared';
// //if coming in from DTO
// const availDTO = [
// { fieldName: 'number', dataType: 'int' },
// { fieldName: 'president', dataType: 'string' },
// { fieldName: 'birth_year', dataType: 'int' },
// { fieldName: 'death_year', dataType: 'int', canBeNull: true },
// { fieldName: 'took_office', dataType: 'date' },
// { fieldName: 'left_office', dataType: 'date', canBeNull: true },
// { fieldName: 'party', dataType: 'string' },
// ];
// const availableFiltersMap = createKeyedMap(availDTO.map(m => new DataTypes[m.dataType](m)), m=>m.fieldName);
//availableFilters.party = new DataTypes.list(availableFilters.party, partyList);
const partyListOptions = partyList.map(m => ({ label: m.label, value: m.label }));
//if building in JS
const availableFilters = [
new DataTypes.int('number'),
new DataTypes.string('president'),
new DataTypes.int('birth_year'),
new DataTypes.decimal({ fieldName: 'death_year', canBeNull: true }),
new DataTypes.date({ fieldName: 'took_office' }),
new DataTypes.date({ fieldName: 'left_office', canBeNull: true }),
new DataTypes.list('party', partyListOptions),
new DataTypes.boolean({ fieldName: 'assasinated', displayName: 'was assasinated' }),
new DataTypes.timespan({ fieldName: 'timeBorn', displayName: 'time born' }),
];
//const columns = [...defaultColumns, { field: m => m.timeBorn, width: 80, formatter: formatTimeValue }];
const assasinatedPresidents = [16, 20, 25, 35];
function padStart(str: string, targetLength: number, padString: string) {
// tslint:disable-next-line:no-bitwise
targetLength = targetLength >> 0; //truncate if number, or convert non-number to 0;
padString = String(typeof padString !== 'undefined' ? padString : ' ');
if (str.length >= targetLength) {
return String(str);
} else {
targetLength = targetLength - str.length;
if (targetLength > padString.length) {
padString += padString.repeat(targetLength / padString.length); //append to original to ensure we are longer than needed
}
return padString.slice(0, targetLength) + str;
}
}
const data = sampledata.map(m => ({ ...m, assasinated: assasinatedPresidents.indexOf(m.number) > -1, timeBorn: padStart(Math.floor((Math.random() * 24)).toString(), 2, '0') + ':00' }));
//const availableFiltersMap = createKeyedMap(availableFilters, m => m.fieldName);
//availableFiltersMap.number.operations.gt.displayName = 'greater than TEST';
availableFilters.find(m => m.fieldName === 'death_year').operations['null'].displayName = 'still alive';
interface FiltersExampleState {
appliedFilters: Array<AppliedFilter<any>>;
}
<|fim▁hole|> constructor(props: never) {
super(props);
this.state = { appliedFilters: [] };
this.handleFiltersChange = this.handleFiltersChange.bind(this);
}
handleFiltersChange(newFilters: Array<AppliedFilter<any>>) {
console.log('onFiltersChange', newFilters);
this.setState({ appliedFilters: newFilters });
}
render() {
let filteredData = data;
this.state.appliedFilters.forEach(m => {
filteredData = m.filter.applyFilter(filteredData, m.operation, m.value);
});
return (
<div className="row">
<div className="col-md-3">
<div className="grid-filters">
<div className="small">
{numberWithCommas(filteredData.length) + ' Presidents'}
</div>
<div style={{ marginTop: 10 }} />
<GridFilters availableFilters={availableFilters} appliedFilters={this.state.appliedFilters} onFiltersChange={this.handleFiltersChange} />
</div>
</div>
<div className="col-md-9">
<Table columns={defaultColumns} keyColumn="number" rows={filteredData} sorting={{ column: 'number' }} />
</div>
</div>
);
}
}<|fim▁end|>
|
const Table = withInternalSorting(withInternalPaging(ReactPowerTable));
export class FiltersExample extends React.Component<never, FiltersExampleState> {
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># flake8: noqa
from __future__ import absolute_import
from .filterset import FilterSet
from .filters import *
__version__ = '0.9.2'<|fim▁hole|>
def parse_version(version):
'''
'0.1.2-dev' -> (0, 1, 2, 'dev')
'0.1.2' -> (0, 1, 2)
'''
v = version.split('.')
v = v[:-1] + v[-1].split('-')
ret = []
for p in v:
if p.isdigit():
ret.append(int(p))
else:
ret.append(p)
return tuple(ret)
VERSION = parse_version(__version__)<|fim▁end|>
| |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict'
const isPlainObject = require('lodash.isplainobject')
const getPath = require('lodash.get')
const StaticComponent = require('./StaticComponent')
const DynamicComponent = require('./DynamicComponent')
const LinkedComponent = require('./LinkedComponent')
const ContainerComponent = require('./ContainerComponent')
const MissingComponent = require('./MissingComponent')
module.exports = {
coerce,
value: makeStaticComponent,
factory: makeDynamicComponent,
link: makeLinkedComponent,
container: makeContainerComponent,
missing: makeMissingComponent
}
function makeStaticComponent(value) {
return new StaticComponent(value)
}
makeDynamicComponent.predefinedFrom = PredefinedComponentBuilder
function makeDynamicComponent(factory, context) {
return new DynamicComponent(factory, context)
}
function PredefinedComponentBuilder(implementations) {
return function PredefinedComponent(implementationName, context) {
let implementation = getPath(implementations, implementationName)
return makeDynamicComponent(implementation, context)
}
}
makeLinkedComponent.boundTo = BoundLinkBuilder
function makeLinkedComponent(context, targetKey) {
return new LinkedComponent(context, targetKey)
}
function BoundLinkBuilder(container) {
return function BoundLink(targetKey) {
return makeLinkedComponent(container, targetKey)
}
}
function makeContainerComponent(container) {
return new ContainerComponent(container)
}
function makeMissingComponent(key) {
return new MissingComponent(key)
}
function coerce(component) {
switch (true) {
case _isComponent(component):
return component
case isPlainObject(component):
return makeContainerComponent(component)
default:
return makeStaticComponent(component)
}
}
function _isComponent(component) {<|fim▁hole|><|fim▁end|>
|
return component && component.instantiate && component.unwrap && true
}
|
<|file_name|>queue_get_config_request.py<|end_file_name|><|fim▁begin|>"""Query the switch for configured queues on a port."""
# System imports
# Third-party imports<|fim▁hole|>
# Local source tree imports
from pyof.foundation.base import GenericMessage
from pyof.foundation.basic_types import Pad, UBInt32
from pyof.v0x04.common.header import Header, Type
from pyof.v0x04.common.port import PortNo
__all__ = ('QueueGetConfigRequest',)
class QueueGetConfigRequest(GenericMessage):
"""Query structure for configured queues on a port."""
#: Openflow :class:`~pyof.v0x04.common.header.Header`.
header = Header(message_type=Type.OFPT_GET_CONFIG_REQUEST)
#: Port to be queried. Should refer to a valid physical port
#: (i.e. < OFPP_MAX), or OFPP_ANY to request all configured queues.
port = UBInt32(enum_ref=PortNo)
pad = Pad(4)
def __init__(self, xid=None, port=None):
"""Create a QueueGetConfigRequest with the optional parameters below.
Args:
xid (int): xid of OpenFlow header
port (:class:`~.common.port.PortNo`): Target port for the query.
"""
super().__init__(xid)
self.port = port<|fim▁end|>
| |
<|file_name|>TranslateComponent.spec.ts<|end_file_name|><|fim▁begin|>import {
provideTranslator,
TranslateComponent,
TranslateLogHandler,
Translator,
TranslatorConfig,
TranslatorContainer,
TranslatorModule,
} from "../index";
import {Component, ReflectiveInjector} from "@angular/core";
import {fakeAsync, flushMicrotasks, TestBed} from "@angular/core/testing";
import {JasmineHelper} from "./helper/JasmineHelper";
import {TranslateLogHandlerMock, TranslationLoaderMock} from "./helper/TranslatorMocks";
describe("TranslateComponent", () => {
describe("constructor", () => {
it("requires a Translator", () => {
let injector = ReflectiveInjector.resolveAndCreate([ TranslateComponent ]);
let action = () => {
try {
injector.get(TranslateComponent);
} catch (e) {
expect(e.message).toContain("No provider for Translator!");
throw e;
}
};
expect(action).toThrow();
});
it("requires a TranslateLogHandler", () => {
let translatorConfig: TranslatorConfig = new TranslatorConfig(new TranslateLogHandlerMock(), {
loader: TranslationLoaderMock,
providedLanguages: [ "en", "de" ],
});
let injector = ReflectiveInjector.resolveAndCreate([
TranslateComponent,
TranslatorContainer,
{ provide: TranslationLoaderMock, useValue: new TranslationLoaderMock() },
{ provide: TranslatorConfig, useValue: translatorConfig },
provideTranslator("test"),
]);
let action = () => {
try {
injector.get(TranslateComponent);
} catch (e) {
expect(e.message).toContain("No provider for TranslateLogHandler!");
throw e;
}
};
expect(action).toThrow();
});
it("subscribes on language changes", () => {
let translatorConfig: TranslatorConfig = new TranslatorConfig(new TranslateLogHandlerMock(), {
loader: TranslationLoaderMock,
providedLanguages: [ "en", "de" ],
});
let injector = ReflectiveInjector.resolveAndCreate([
TranslateComponent,
TranslatorContainer,
{ provide: TranslationLoaderMock, useValue: new TranslationLoaderMock() },
{ provide: TranslatorConfig, useValue: translatorConfig },
{ provide: TranslateLogHandler, useClass: TranslateLogHandlerMock },
provideTranslator("test"),
]);
let translator: Translator = injector.get(Translator);
spyOn(translator.languageChanged, "subscribe").and.callThrough();
injector.get(TranslateComponent);
expect(translator.languageChanged.subscribe).toHaveBeenCalled();
});
});
describe("instance", () => {
let translator: Translator;
let translatorConfig: TranslatorConfig;
let translateComponent: TranslateComponent;
let logHandler: TranslateLogHandler;
let translateContainer: TranslatorContainer;
beforeEach(() => {
translatorConfig = new TranslatorConfig(new TranslateLogHandlerMock(), {
loader: TranslationLoaderMock,
providedLanguages: [ "en", "de" ],
});
TestBed.configureTestingModule({
providers: [
{ provide: TranslatorConfig, useValue: translatorConfig},
{ provide: TranslationLoaderMock, useValue: new TranslationLoaderMock() },
{ provide: TranslateLogHandler, useClass: TranslateLogHandlerMock },
provideTranslator("test"),
TranslatorContainer,
TranslateComponent,
],
});
translator = TestBed.get(Translator);
translateComponent = TestBed.get(TranslateComponent);
logHandler = TestBed.get(TranslateLogHandler);
translateContainer = TestBed.get(TranslatorContainer);
spyOn(translator, "translate").and.returnValue(Promise.resolve("This is a text"));
spyOn(logHandler, "error");
});
it("starts translation when key got set", () => {
translateComponent.key = "TEXT";
expect(translator.translate).toHaveBeenCalledWith("TEXT", {});
});
it("starts translation when key is set and params got changed", () => {
translateComponent.key = "TEXT";
JasmineHelper.calls(translator.translate).reset();
translateComponent.params = { some: "value" };
expect(translator.translate).toHaveBeenCalledWith("TEXT", { some: "value" });
});
it("restarts translation when key got changed", () => {
translateComponent.key = "ANYTHING";
translateComponent.params = { some: "value" };
JasmineHelper.calls(translator.translate).reset();
translateComponent.key = "TEXT";
expect(translator.translate).toHaveBeenCalledWith("TEXT", { some: "value" });
});
it("does not translate when key got not set", () => {
translateComponent.params = { some: "value" };
expect(translator.translate).not.toHaveBeenCalled();
});
it("does not accept non-object params", () => {
translateComponent.key = "TEXT";
JasmineHelper.calls(translator.translate).reset();
translateComponent.params = "foo";
expect(translator.translate).not.toHaveBeenCalled();
});
it("stores translation when promise got resolved", fakeAsync(() => {
translateComponent.key = "TEXT";
flushMicrotasks();
expect(translateComponent.translation).toBe("This is a text");
}));
it("restarts translation when language got changed", () => {
translateComponent.key = "TEXT";
JasmineHelper.calls(translator.translate).reset();
translator.language = "de";
expect(translator.translate).toHaveBeenCalledWith("TEXT", {});
});
it("shows error if params are not object", () => {
translateComponent.params = "foo";
expect(logHandler.error).toHaveBeenCalledWith("Params have to be an object");
});
describe("translatorModule attribute", () => {
let anotherTranslator: Translator;
beforeEach(() => {
anotherTranslator = translateContainer.getTranslator("another");
spyOn(anotherTranslator, "translate").and.returnValue(Promise.resolve("This is a text"));
});
it("uses another module with translatorModule", () => {
spyOn(translateContainer, "getTranslator").and.callThrough();
translateComponent.module = "another";
expect(translateContainer.getTranslator).toHaveBeenCalledWith("another");
});
it("subscribes to the other language changed", () => {
spyOn(anotherTranslator.languageChanged, "subscribe").and.callThrough();
translateComponent.module = "another";
expect(anotherTranslator.languageChanged.subscribe).toHaveBeenCalled();
});
it("starts the translation after module is changed", () => {
translateComponent.key = "TEXT";
translateComponent.module = "another";
expect(anotherTranslator.translate).toHaveBeenCalledWith("TEXT", {});
});
it("does not react on language changes of original translator", () => {
translateComponent.key = "TEXT";
translateComponent.module = "another";
translator.language = "de";
expect(JasmineHelper.calls(anotherTranslator.translate).count()).toBe(1);
});
it("restarts translation on language changes", () => {
translateComponent.key = "TEXT";
translateComponent.module = "another";
JasmineHelper.calls(anotherTranslator.translate).reset();
anotherTranslator.language = "de";
expect(anotherTranslator.translate).toHaveBeenCalledWith("TEXT", {});
});
});
});
describe("within module", () => {
let translator: Translator;
@Component({
selector: "my-component",
template: `<p translate="TEXT" [translateParams]="{ some: 'value' }"></p>`,
})
class MyComponent {}
beforeEach(() => {
TestBed.configureTestingModule({
imports: [ TranslatorModule.forRoot() ],
declarations: [ MyComponent ],
});
translator = TestBed.get(Translator);
spyOn(translator, "translate").and.returnValue(Promise.resolve("some text"));
});
it("first resolves the parameters", () => {
let component = TestBed.createComponent(MyComponent);
<|fim▁hole|> component.detectChanges();
expect(translator.translate).toHaveBeenCalledWith("TEXT", { some: "value" });
expect(JasmineHelper.calls(translator.translate).count()).toBe(1);
});
});
});<|fim▁end|>
| |
<|file_name|>navbar.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core';
import { ActivatedRoute, Router } from '@angular/router';
import { ROUTER_DIRECTIVES } from '@angular/router';
@Component({
selector: 'fc-navbar',
directives: [...ROUTER_DIRECTIVES],
styleUrls: ['./navbar.component.scss'],
template: `
<nav class="navbar navbar-inverse navbar-fixed-top">
<div class="container-fluid">
<div class="navbar-header">
<a class="navbar-brand" [routerLink]="['']">
<img alt="Brand" src="/img/fashioncloud_logo_small.png">
</a>
</div>
<div id="navbar" class="collapse navbar-collapse">
<ul class="nav navbar-nav">
<li *ngFor="let link of links" ><a [routerLink]="[link.link]">{{ link.name }}</a></li>
</ul>
</div><!--/.nav-collapse -->
</div>
</nav>
`
})
export class navbarComponent {
private userName: string;
private sub: any;
links: any;
constructor(private route: ActivatedRoute, private router: Router) {
this.links = [<|fim▁hole|> name: 'Users',
link: ''
},
{
name: 'Posts',
link: 'post'
},
{
name: 'Albums',
link: 'albums'
}
];
}
}<|fim▁end|>
|
{
|
<|file_name|>loss.py<|end_file_name|><|fim▁begin|>import numpy as np
import cudarray as ca
from .base import PickleMixin
_FLT_MIN = np.finfo(ca.float_).tiny
<|fim▁hole|> # elegant if Loss only provided loss() and grad(). However, where should
# we place the logic from fprop()?
@classmethod
def from_any(cls, arg):
if isinstance(arg, Loss):
return arg
elif isinstance(arg, str):
if arg == 'softmaxce':
return SoftmaxCrossEntropy()
elif arg == 'bce':
return BinaryCrossEntropy()
elif arg == 'mse':
return MeanSquaredError()
raise ValueError('Invalid constructor arguments: %s' % arg)
def _setup(self, x_shape):
pass
def fprop(self, x):
return x
def loss(self, target, x):
""" Returns the loss calculated from the target and the input. """
raise NotImplementedError()
def grad(self, target, x):
""" Returns the input gradient. """
raise NotImplementedError()
def y_shape(self, x_shape):
return x_shape
class SoftmaxCrossEntropy(Loss):
"""
Softmax + cross entropy (aka. multinomial logistic loss)
"""
def __init__(self):
self.name = 'softmaxce'
self._tmp_x = None
self._tmp_y = None
self._tmp_target = None
self._tmp_one_hot = None
self.n_classes = None
def _setup(self, x_shape):
self.n_classes = x_shape[1]
def _softmax(self, x):
# caching wrapper
if self._tmp_x is not x:
self._tmp_y = ca.nnet.softmax(x)
self._tmp_x = x
return self._tmp_y
def _one_hot(self, target):
# caching wrapper
if self._tmp_target is not target:
self._tmp_one_hot = ca.nnet.one_hot_encode(target, self.n_classes)
self._tmp_target = target
return self._tmp_one_hot
def fprop(self, x):
return ca.nnet.one_hot_decode(self._softmax(x))
def loss(self, target, x):
y = self._softmax(x)
target = self._one_hot(target)
return ca.nnet.categorical_cross_entropy(y_pred=y, y_true=target)
def grad(self, target, x):
y = self._softmax(x)
target = self._one_hot(target)
return -(target - y)
def y_shape(self, x_shape):
return (x_shape[0],)
class BinaryCrossEntropy(Loss):
def __init__(self):
self.name = 'bce'
def loss(self, y, y_pred):
y_pred = ca.maximum(y_pred, _FLT_MIN)
return -ca.mean(y*ca.log(y_pred) + (1 - y)*ca.log(1 - y_pred), axis=1)
def grad(self, y, y_pred):
y_pred = ca.maximum(y_pred, _FLT_MIN)
return -(y/y_pred - (1-y)/(1-y_pred))
class MeanSquaredError(Loss):
def __init__(self):
self.name = 'mse'
self.n_targets = None
def _setup(self, x_shape):
self.n_targets = x_shape[1]
def loss(self, y, y_pred):
return ca.mean((y-y_pred)**2, axis=1)
def grad(self, y, y_pred):
return 2.0 / self.n_targets * (y_pred - y)<|fim▁end|>
|
class Loss(PickleMixin):
# abll: I suspect that this interface is not ideal. It would be more
|
<|file_name|>generator_utils_tests.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Unit tests for generator_utils.py
"""
import os
import unittest
import generator_utils
# Absolute path to chrome/src.
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
SRC_DIR = os.path.abspath(os.path.join(SCRIPT_DIR, "../../.."))
TESTS_DIR = os.path.join(SCRIPT_DIR, "test_data")
class ParserTest(unittest.TestCase):
TSV_CONTENTS = [[
u"unique_id_A", u"", u"sender_A", u"description_A", u"trigger_A",
u"data_A", u"destination_A", u"cookies_allowed_A", u"cookies_store_A",
u"settings_A", u"chrome_policy_A", u"", u"source_file_A",
u"id_hash_code_A", u"content_hash_code_A"],
[
u"unique_id_B", u"", u"sender_B", u"description_B", u"trigger_B",
u"data_B", u"destination_B", u"cookies_allowed_B", u"cookies_store_B",
u"settings_B", u"chrome_policy_B", u"", u"source_file_B",
u"id_hash_code_B", u"content_hash_code_B"],
[
u"unique_id_C", u"", u"sender_C", u"description_C", u"trigger_C",
u"data_C", u"destination_C", u"cookies_allowed_C", u"cookies_store_C",
u"settings_C", u"chrome_policy_C", u"", u"source_file_C",
u"id_hash_code_C", u"content_hash_code_C"]
]
ANNOTATIONS_MAPPING = {
"unique_id_A":
generator_utils.TrafficAnnotation(
**{
"unique_id": "unique_id_A",
"description": "description_A",
"trigger": "trigger_A",
"data": "data_A",
"settings": "settings_A",
"policy": "chrome_policy_A"
}),
"unique_id_B":
generator_utils.TrafficAnnotation(
**{
"unique_id": "unique_id_B",<|fim▁hole|> "settings": "settings_B",
"policy": "chrome_policy_B"
}),
"unique_id_C":
generator_utils.TrafficAnnotation(
**{
"unique_id": "unique_id_C",
"description": "description_C",
"trigger": "trigger_C",
"data": "data_C",
"settings": "settings_C",
"policy": "chrome_policy_C"
})
}
PLACEHOLDERS = [
{"type": generator_utils.Placeholder.GROUP, "name": "Group A"},
{"type": generator_utils.Placeholder.SENDER, "name": "Sender 1"},
{
"type": generator_utils.Placeholder.ANNOTATION,
"traffic_annotation": ANNOTATIONS_MAPPING["unique_id_A"]},
{"type": generator_utils.Placeholder.SENDER, "name": "Sender 2"},
{
"type": generator_utils.Placeholder.ANNOTATION,
"traffic_annotation": ANNOTATIONS_MAPPING["unique_id_B"]},
{"type": generator_utils.Placeholder.GROUP, "name": "Group C"},
{"type": generator_utils.Placeholder.SENDER, "name": "Sender 3"},
{
"type": generator_utils.Placeholder.ANNOTATION,
"traffic_annotation": ANNOTATIONS_MAPPING["unique_id_C"]}
]
# Document formatted according to fake_grouping.xml
DOC_JSON = generator_utils.extract_body(target="all",
json_file_path=os.path.join(
TESTS_DIR, "fake_doc.json"))
def test_load_tsv_file(self):
self.assertEqual(self.TSV_CONTENTS, generator_utils.load_tsv_file(
os.path.join(SRC_DIR,
"tools/traffic_annotation/scripts/test_data/fake_annotations.tsv"),
False))
def test_map_annotations(self):
self.assertEqual(self.ANNOTATIONS_MAPPING,
generator_utils.map_annotations(self.TSV_CONTENTS))
def test_xml_parser_build_placeholders(self):
xml_parser = generator_utils.XMLParser(
os.path.join(TESTS_DIR, "fake_grouping.xml"), self.ANNOTATIONS_MAPPING)
self.assertEqual(self.PLACEHOLDERS, xml_parser.build_placeholders())
def test_find_first_index(self):
first_index = generator_utils.find_first_index(self.DOC_JSON)
self.assertEqual(1822, first_index)
def test_find_last_index(self):
last_index = generator_utils.find_last_index(self.DOC_JSON)
self.assertEqual(2066, last_index)
def test_find_chrome_browser_version(self):
current_version = generator_utils.find_chrome_browser_version(self.DOC_JSON)
self.assertEqual("86.0.4187.0", current_version)
def test_find_bold_ranges(self):
expected_bold_ranges = [(1843, 1855), (1859, 1867), (1871, 1876),
(1880, 1889), (1893, 1900), (1918, 1930),
(1934, 1942), (1968, 1975), (1946, 1951),
(1955, 1964), (2001, 2013), (2017, 2025),
(2029, 2034), (2038, 2047), (2051, 2058)]
bold_ranges = generator_utils.find_bold_ranges(self.DOC_JSON)
self.assertItemsEqual(expected_bold_ranges, bold_ranges)
if __name__ == "__main__":
unittest.main()<|fim▁end|>
|
"description": "description_B",
"trigger": "trigger_B",
"data": "data_B",
|
<|file_name|>doc.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and<|fim▁hole|>limitations under the License.
*/
// This package is generated by client-gen with arguments: --clientset-name=clientset --input=[api/v1,apps/v1beta1,authentication/v1beta1,authorization/v1beta1,autoscaling/v1,batch/v1,batch/v2alpha1,certificates/v1alpha1,extensions/v1beta1,policy/v1beta1,rbac/v1beta1,rbac/v1alpha1,storage/v1beta1]
// This package has the automatically generated clientset.
package clientset<|fim▁end|>
| |
<|file_name|>o_auth_credential_create_request.go<|end_file_name|><|fim▁begin|>package models
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
strfmt "github.com/go-openapi/strfmt"
"github.com/go-openapi/errors"
"github.com/go-openapi/swag"
"github.com/go-openapi/validate"
manifold "github.com/manifoldco/go-manifold"
)
// OAuthCredentialCreateRequest o auth credential create request
// swagger:model OAuthCredentialCreateRequest
type OAuthCredentialCreateRequest struct {
// A human readable description of this credential pair.
//
// Required: true
// Max Length: 256
// Min Length: 3<|fim▁hole|> Description *string `json:"description"`
// Product identifier to scope the credential to a single product.
//
ProductID *manifold.ID `json:"product_id,omitempty"`
// **ALPHA** Provider identifier to scope the credential to
// all products of a provider.
//
ProviderID *manifold.ID `json:"provider_id,omitempty"`
}
// Validate validates this o auth credential create request
func (m *OAuthCredentialCreateRequest) Validate(formats strfmt.Registry) error {
var res []error
if err := m.validateDescription(formats); err != nil {
// prop
res = append(res, err)
}
if err := m.validateProductID(formats); err != nil {
// prop
res = append(res, err)
}
if err := m.validateProviderID(formats); err != nil {
// prop
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
func (m *OAuthCredentialCreateRequest) validateDescription(formats strfmt.Registry) error {
if err := validate.Required("description", "body", m.Description); err != nil {
return err
}
if err := validate.MinLength("description", "body", string(*m.Description), 3); err != nil {
return err
}
if err := validate.MaxLength("description", "body", string(*m.Description), 256); err != nil {
return err
}
return nil
}
func (m *OAuthCredentialCreateRequest) validateProductID(formats strfmt.Registry) error {
if swag.IsZero(m.ProductID) { // not required
return nil
}
if m.ProductID != nil {
if err := m.ProductID.Validate(formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("product_id")
}
return err
}
}
return nil
}
func (m *OAuthCredentialCreateRequest) validateProviderID(formats strfmt.Registry) error {
if swag.IsZero(m.ProviderID) { // not required
return nil
}
if m.ProviderID != nil {
if err := m.ProviderID.Validate(formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("provider_id")
}
return err
}
}
return nil
}<|fim▁end|>
| |
<|file_name|>TimeSlot.java<|end_file_name|><|fim▁begin|>package doodle;
import java.io.Serializable;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Comparator;
import java.util.Date;
/**
* A poll is made up of two or more time slots, which are voted on by poll
* invitees. A time slot is defined by a start datetime and optionally an end
* datetime.
*
* @author Jonas Michel
*
*/
public class TimeSlot implements Serializable {
private static final long serialVersionUID = -8690469227753138784L;
/** The time slot's start time. */
private Date start = null;
/** The time slot's end time (optional). */
private Date end = null;
public TimeSlot(Date start, Date end) {
this.start = start;
this.end = end;
}
<|fim▁hole|> public TimeSlot(Date start) {
this.start = start;
}
public TimeSlot(Date day, String timeStr) throws NumberFormatException {
if (timeStr.contains("-"))
initDoubleTime(day, timeStr);
else
initSingleTime(day, timeStr);
}
public Date getStart() {
return start;
}
public Date getEnd() {
return end;
}
private void initSingleTime(Date day, String timeStr)
throws NumberFormatException {
start = parseTimeString(day, timeStr);
}
private void initDoubleTime(Date day, String timeStr)
throws NumberFormatException {
String[] timeStrArr = timeStr.split("-");
start = parseTimeString(day, timeStrArr[0]);
end = parseTimeString(day, timeStrArr[1]);
}
private Date parseTimeString(Date day, String timeStr)
throws NumberFormatException {
int hour = 0, minute = 0;
if (timeStr.contains(":")) {
hour = Integer.parseInt(timeStr.split(":")[0]);
minute = Integer.parseInt(timeStr.split(":")[1]);
} else {
hour = Integer.parseInt(timeStr);
}
Calendar cal = Calendar.getInstance();
cal.setTime(day);
cal.add(Calendar.HOUR_OF_DAY, hour);
cal.add(Calendar.MINUTE, minute);
return cal.getTime();
}
public String toDayString() {
SimpleDateFormat day = new SimpleDateFormat("MM/dd/yyyy");
return day.format(start);
}
public String toTimeString() {
SimpleDateFormat time = new SimpleDateFormat("HH:mm");
StringBuilder sb = new StringBuilder();
sb.append(time.format(start));
if (end == null)
return sb.toString();
sb.append("-" + time.format(end));
return sb.toString();
}
@Override
public String toString() {
return toDayString() + " " + toTimeString();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((end == null) ? 0 : end.hashCode());
result = prime * result + ((start == null) ? 0 : start.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (!(obj instanceof TimeSlot))
return false;
TimeSlot other = (TimeSlot) obj;
if (end == null) {
if (other.end != null)
return false;
} else if (!end.equals(other.end))
return false;
if (start == null) {
if (other.start != null)
return false;
} else if (!start.equals(other.start))
return false;
return true;
}
public static class TimeSlotComparator implements Comparator<TimeSlot> {
@Override
public int compare(TimeSlot ts1, TimeSlot ts2) {
if (ts1.getStart().before(ts2.getStart()))
return -1;
else if (ts1.getStart().after(ts2.getStart()))
return 1;
else
return 0;
}
}
}<|fim▁end|>
| |
<|file_name|>progress_test.py<|end_file_name|><|fim▁begin|># coding=utf8
import time
import random
import unittest
from qiniuManager.progress import *
class Pro(object):
def __init__(self):
self.progressed = 0
self.total = 100
self.title = 'test'
self.chunked = False
self.chunk_recved = 0
self.start = time.time()
@bar(100, '=')
def loader(self):
self._normal_loader()
self.title = "固定长度"
@bar(fill='x')
def loader_x(self):
self._normal_loader()
self.title = "x"
@bar()
def auto_loader(self):
self._normal_loader()
self.title = "长度占满宽度"
def _normal_loader(self):
time.sleep(0.01)
self.progressed += 1
def _chunked_loader(self):
self.chunked = True
time.sleep(0.01)
<|fim▁hole|> self.progressed = self.total
@bar()
def chunked_loader(self):
self._chunked_loader()
self.title = "full width"
@bar(100)
def fixed_chunked_loader(self):
self._chunked_loader()
self.title = "fixed width"
class ProgressTester(unittest.TestCase):
def test_100_progress(self):
print("进度条换行")
Pro().loader()
Pro().loader_x()
def test_auto_width_progress(self):
print("进度条换行")
Pro().auto_loader()
def test_disable_progress(self):
pro = Pro()
pro.disable_progress = True
pro.title = "无进度条,也就是说应该看不到这串字符才对"
pro.loader()
def test_chunked_progress(self):
print("进度条换行")
Pro().chunked_loader()
def test_fixed_chunked_progress(self):
print("进度条换行")
Pro().fixed_chunked_loader()
if __name__ == '__main__':
unittest.main(verbosity=2)<|fim▁end|>
|
self.chunk_recved += random.randrange(3, 1000000)
if time.time() - self.start > 5:
|
<|file_name|>page-not-found.component.ts<|end_file_name|><|fim▁begin|>import { Component } from '@angular/core';
@Component({
<|fim▁hole|> `
})
export class PageNotFoundComponent {}<|fim▁end|>
|
template: `
<h2>Page not found</h2>
|
<|file_name|>taskbar_ui.js<|end_file_name|><|fim▁begin|>(function ($) {
/**
* Move a block in the blocks table from one region to another via select list.
*
* This behavior is dependent on the tableDrag behavior, since it uses the
* objects initialized in that behavior to update the row.
*/
Drupal.behaviors.taskbarUIDrag = {
attach: function (context, settings) {
// tableDrag is required and we should be on the Taskbar UI admin page.
if (typeof Drupal.tableDrag == 'undefined' || typeof Drupal.tableDrag.items == 'undefined') {
return;
}
var table = $('table#items');
var tableDrag = Drupal.tableDrag.items; // Get the items tableDrag object.
// Add a handler for when a row is swapped, update empty regions.
tableDrag.row.prototype.onSwap = function(swappedRow) {
checkEmptyRegions(table, this);
};
// A custom message for the blocks page specifically.
Drupal.theme.tableDragChangedWarning = function () {
return '<div class="messages warning">' + Drupal.theme('tableDragChangedMarker') + ' ' + Drupal.t("The changes to these items will not be saved until the <em>Save items</em> button is clicked.") + '</div>';
};
// Add a handler so when a row is dropped, update fields dropped into new regions.
tableDrag.onDrop = function() {
dragObject = this;
var regionRow = $(dragObject.rowObject.element).prevAll('tr.region-message').get(0);
var regionName = regionRow.className.replace(/([^ ]+[ ]+)*region-([^ ]+)-message([ ]+[^ ]+)*/, '$2');
var regionField = $('select.item-region-select', dragObject.rowObject.element);
if ($('option[value=' + regionName + ']', regionField).length == 0) {
alert(Drupal.t('The item cannot be placed in this region.'));
// Simulate that there was a selected element change, so the row is put
// back to from where the user tried to drag it.
regionField.change();
}
else if ($(dragObject.rowObject.element).prev('tr').is('.region-message')) {
var weightField = $('select.item-weight', dragObject.rowObject.element);
var oldRegionName = weightField[0].className.replace(/([^ ]+[ ]+)*item-weight-([^ ]+)([ ]+[^ ]+)*/, '$2');
if (!regionField.is('.item-region-'+ regionName)) {
regionField.removeClass('item-region-' + oldRegionName).addClass('item-region-' + regionName);
weightField.removeClass('item-weight-' + oldRegionName).addClass('item-weight-' + regionName);
regionField.val(regionName);
}
}
};
// Add the behavior to each region select list.
$('select.item-region-select', context).once('item-region-select', function() {
$(this).change(function(event) {
// Make our new row and select field.
var row = $(this).parents('tr:first');
var select = $(this);
tableDrag.rowObject = new tableDrag.row(row);
// Find the correct region and insert the row as the first in the region.
$('tr.region-message', table).each(function() {
if ($(this).is('.region-' + select[0].value + '-message')) {
// Add the new row and remove the old one.
$(this).after(row);
// Manually update weights and restripe.
tableDrag.updateFields(row.get(0));
tableDrag.rowObject.changed = true;
if (tableDrag.oldRowElement) {
$(tableDrag.oldRowElement).removeClass('drag-previous');
}
tableDrag.oldRowElement = row.get(0);
tableDrag.restripeTable();
tableDrag.rowObject.markChanged();
tableDrag.oldRowElement = row;<|fim▁hole|>
// Modify empty regions with added or removed fields.
checkEmptyRegions(table, row);
// Remove focus from selectbox.
select.get(0).blur();
});
$(this).addClass('itemregionselect-processed');
});
var checkEmptyRegions = function(table, rowObject) {
$('tr.region-message', table).each(function() {
// If the dragged row is in this region, but above the message row, swap it down one space.
if ($(this).prev('tr').get(0) == rowObject.element) {
// Prevent a recursion problem when using the keyboard to move rows up.
if ((rowObject.method != 'keyboard' || rowObject.direction == 'down')) {
rowObject.swap('after', this);
}
}
// This region has become empty
if ($(this).next('tr').is(':not(.draggable)') || $(this).next('tr').size() == 0) {
$(this).removeClass('region-populated').addClass('region-empty');
}
// This region has become populated.
else if ($(this).is('.region-empty')) {
$(this).removeClass('region-empty').addClass('region-populated');
}
});
};
}
};
})(jQuery);<|fim▁end|>
|
$(row).addClass('drag-previous');
}
});
|
<|file_name|>RedirectToServiceActionTests.java<|end_file_name|><|fim▁begin|>package org.apereo.cas.web.flow.actions;
import org.apereo.cas.authentication.CoreAuthenticationTestUtils;
import org.apereo.cas.authentication.principal.ResponseBuilderLocator;
import org.apereo.cas.authentication.principal.WebApplicationService;
import org.apereo.cas.authentication.principal.WebApplicationServiceResponseBuilder;
import org.apereo.cas.config.CasCoreServicesConfiguration;
import org.apereo.cas.config.CasCoreUtilConfiguration;
import org.apereo.cas.services.ServicesManager;
import org.apereo.cas.web.flow.CasWebflowConstants;
import org.apereo.cas.web.support.WebUtils;
import lombok.val;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.cloud.autoconfigure.RefreshAutoConfiguration;
import org.springframework.mock.web.MockHttpServletRequest;<|fim▁hole|>import org.springframework.webflow.context.servlet.ServletExternalContext;
import org.springframework.webflow.test.MockRequestContext;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
/**
* This is {@link RedirectToServiceActionTests}.
*
* @author Misagh Moayyed
* @since 5.3.0
*/
@SpringBootTest(classes = {
RefreshAutoConfiguration.class,
CasCoreServicesConfiguration.class,
CasCoreUtilConfiguration.class
})
public class RedirectToServiceActionTests {
@ClassRule
public static final SpringClassRule SPRING_CLASS_RULE = new SpringClassRule();
@Rule
public final SpringMethodRule springMethodRule = new SpringMethodRule();
@Autowired
@Qualifier("servicesManager")
private ServicesManager servicesManager;
@Test
public void verifyAction() throws Exception {
val context = new MockRequestContext();
val request = new MockHttpServletRequest();
context.setExternalContext(new ServletExternalContext(new MockServletContext(), request, new MockHttpServletResponse()));
WebUtils.putAuthentication(CoreAuthenticationTestUtils.getAuthentication(), context);
WebUtils.putService(context, CoreAuthenticationTestUtils.getWebApplicationService());
val locator = mock(ResponseBuilderLocator.class);
when(locator.locate(any(WebApplicationService.class))).thenReturn(new WebApplicationServiceResponseBuilder(this.servicesManager));
val redirectToServiceAction = new RedirectToServiceAction(locator);
val event = redirectToServiceAction.execute(context);
assertEquals(CasWebflowConstants.TRANSITION_ID_REDIRECT, event.getId());
}
}<|fim▁end|>
|
import org.springframework.mock.web.MockHttpServletResponse;
import org.springframework.mock.web.MockServletContext;
import org.springframework.test.context.junit4.rules.SpringClassRule;
import org.springframework.test.context.junit4.rules.SpringMethodRule;
|
<|file_name|>_showtickprefix.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
<|fim▁hole|> def __init__(
self, plotly_name="showtickprefix", parent_name="layout.yaxis", **kwargs
):
super(ShowtickprefixValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "ticks"),
role=kwargs.pop("role", "style"),
values=kwargs.pop("values", ["all", "first", "last", "none"]),
**kwargs
)<|fim▁end|>
|
class ShowtickprefixValidator(_plotly_utils.basevalidators.EnumeratedValidator):
|
<|file_name|>CredentialBodyParser.java<|end_file_name|><|fim▁begin|>//
// ========================================================================
// Copyright (c) 1995-2015 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
//
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
//
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
//
package org.eclipse.jetty.spdy.parser;
import java.io.ByteArrayInputStream;
import java.nio.ByteBuffer;
import java.security.cert.Certificate;
import java.security.cert.CertificateException;
import java.security.cert.CertificateFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.eclipse.jetty.spdy.SessionException;
import org.eclipse.jetty.spdy.api.SessionStatus;
import org.eclipse.jetty.spdy.frames.ControlFrameType;
import org.eclipse.jetty.spdy.frames.CredentialFrame;
public class CredentialBodyParser extends ControlFrameBodyParser
{
private final List<Certificate> certificates = new ArrayList<>();
private final ControlFrameParser controlFrameParser;
private State state = State.SLOT;
private int totalLength;
private int cursor;
private short slot;
private int proofLength;
private byte[] proof;
private int certificateLength;
private byte[] certificate;
public CredentialBodyParser(ControlFrameParser controlFrameParser)
{
this.controlFrameParser = controlFrameParser;
}
@Override
public boolean parse(ByteBuffer buffer)
{
while (buffer.hasRemaining())
{
switch (state)
{
case SLOT:
{
if (buffer.remaining() >= 2)
{
slot = buffer.getShort();
checkSlotValid();
state = State.PROOF_LENGTH;
}
else
{
state = State.SLOT_BYTES;
cursor = 2;
}
break;
}
case SLOT_BYTES:
{
byte currByte = buffer.get();
--cursor;
slot += (currByte & 0xFF) << 8 * cursor;
if (cursor == 0)
{
checkSlotValid();
state = State.PROOF_LENGTH;
}
break;
}
case PROOF_LENGTH:
{
if (buffer.remaining() >= 4)
{
proofLength = buffer.getInt() & 0x7F_FF_FF_FF;
state = State.PROOF;
}
else
{
state = State.PROOF_LENGTH_BYTES;
cursor = 4;
}
break;
}
case PROOF_LENGTH_BYTES:
{<|fim▁hole|> if (cursor == 0)
{
proofLength &= 0x7F_FF_FF_FF;
state = State.PROOF;
}
break;
}
case PROOF:
{
totalLength = controlFrameParser.getLength() - 2 - 4 - proofLength;
proof = new byte[proofLength];
if (buffer.remaining() >= proofLength)
{
buffer.get(proof);
state = State.CERTIFICATE_LENGTH;
if (totalLength == 0)
{
onCredential();
return true;
}
}
else
{
state = State.PROOF_BYTES;
cursor = proofLength;
}
break;
}
case PROOF_BYTES:
{
proof[proofLength - cursor] = buffer.get();
--cursor;
if (cursor == 0)
{
state = State.CERTIFICATE_LENGTH;
if (totalLength == 0)
{
onCredential();
return true;
}
}
break;
}
case CERTIFICATE_LENGTH:
{
if (buffer.remaining() >= 4)
{
certificateLength = buffer.getInt() & 0x7F_FF_FF_FF;
state = State.CERTIFICATE;
}
else
{
state = State.CERTIFICATE_LENGTH_BYTES;
cursor = 4;
}
break;
}
case CERTIFICATE_LENGTH_BYTES:
{
byte currByte = buffer.get();
--cursor;
certificateLength += (currByte & 0xFF) << 8 * cursor;
if (cursor == 0)
{
certificateLength &= 0x7F_FF_FF_FF;
state = State.CERTIFICATE;
}
break;
}
case CERTIFICATE:
{
totalLength -= 4 + certificateLength;
certificate = new byte[certificateLength];
if (buffer.remaining() >= certificateLength)
{
buffer.get(certificate);
if (onCertificate())
return true;
}
else
{
state = State.CERTIFICATE_BYTES;
cursor = certificateLength;
}
break;
}
case CERTIFICATE_BYTES:
{
certificate[certificateLength - cursor] = buffer.get();
--cursor;
if (cursor == 0)
{
if (onCertificate())
return true;
}
break;
}
default:
{
throw new IllegalStateException();
}
}
}
return false;
}
private void checkSlotValid()
{
if (slot <= 0)
throw new SessionException(SessionStatus.PROTOCOL_ERROR,
"Invalid slot " + slot + " for " + ControlFrameType.CREDENTIAL + " frame");
}
private boolean onCertificate()
{
certificates.add(deserializeCertificate(certificate));
if (totalLength == 0)
{
onCredential();
return true;
}
else
{
certificateLength = 0;
state = State.CERTIFICATE_LENGTH;
}
return false;
}
private Certificate deserializeCertificate(byte[] bytes)
{
try
{
CertificateFactory certificateFactory = CertificateFactory.getInstance("X.509");
return certificateFactory.generateCertificate(new ByteArrayInputStream(bytes));
}
catch (CertificateException x)
{
throw new SessionException(SessionStatus.PROTOCOL_ERROR, x);
}
}
private void onCredential()
{
CredentialFrame frame = new CredentialFrame(controlFrameParser.getVersion(), slot,
Arrays.copyOf(proof, proof.length), certificates.toArray(new Certificate[certificates.size()]));
controlFrameParser.onControlFrame(frame);
reset();
}
private void reset()
{
state = State.SLOT;
totalLength = 0;
cursor = 0;
slot = 0;
proofLength = 0;
proof = null;
certificateLength = 0;
certificate = null;
certificates.clear();
}
public enum State
{
SLOT, SLOT_BYTES, PROOF_LENGTH, PROOF_LENGTH_BYTES, PROOF, PROOF_BYTES,
CERTIFICATE_LENGTH, CERTIFICATE_LENGTH_BYTES, CERTIFICATE, CERTIFICATE_BYTES
}
}<|fim▁end|>
|
byte currByte = buffer.get();
--cursor;
proofLength += (currByte & 0xFF) << 8 * cursor;
|
<|file_name|>to_bits.rs<|end_file_name|><|fim▁begin|>use itertools::Itertools;
use malachite_base::num::logic::traits::{BitConvertible, BitIterable};
use malachite_base_test_util::bench::bucketers::vec_len_bucketer;
use malachite_base_test_util::bench::{run_benchmark, BenchmarkType};
use malachite_base_test_util::generators::common::{GenConfig, GenMode};
use malachite_base_test_util::generators::{bool_vec_gen, bool_vec_gen_var_5};
use malachite_base_test_util::num::logic::bit_convertible::{to_bits_asc_alt, to_bits_desc_alt};
use malachite_base_test_util::runner::Runner;
use malachite_nz::integer::logic::bit_convertible::{
bits_slice_to_twos_complement_bits_negative, bits_to_twos_complement_bits_non_negative,
bits_vec_to_twos_complement_bits_negative,
};
use malachite_nz_test_util::bench::bucketers::integer_bit_bucketer;
use malachite_nz_test_util::generators::integer_gen;
use malachite_nz_test_util::integer::logic::to_bits::{to_bits_asc_naive, to_bits_desc_naive};
pub(crate) fn register(runner: &mut Runner) {
register_demo!(runner, demo_bits_to_twos_complement_bits_non_negative);
register_demo!(runner, demo_bits_slice_to_twos_complement_bits_negative);
register_demo!(runner, demo_bits_vec_to_twos_complement_bits_negative);
register_demo!(runner, demo_integer_to_bits_asc);
register_demo!(runner, demo_integer_to_bits_desc);
register_bench!(runner, benchmark_bits_to_twos_complement_bits_non_negative);
register_bench!(
runner,
benchmark_bits_slice_to_twos_complement_bits_negative
);
register_bench!(runner, benchmark_bits_vec_to_twos_complement_bits_negative);
register_bench!(runner, benchmark_integer_to_bits_asc_evaluation_strategy);
register_bench!(runner, benchmark_integer_to_bits_asc_algorithms);
register_bench!(runner, benchmark_integer_to_bits_desc_evaluation_strategy);
register_bench!(runner, benchmark_integer_to_bits_desc_algorithms);
}
fn demo_bits_to_twos_complement_bits_non_negative(gm: GenMode, config: GenConfig, limit: usize) {
for mut bits in bool_vec_gen().get(gm, &config).take(limit) {
let old_bits = bits.clone();
bits_to_twos_complement_bits_non_negative(&mut bits);
println!(
"bits := {:?}; bits_to_twos_complement_bits_non_negative(&mut bits); bits = {:?}",
old_bits, bits
);
}
}
fn demo_bits_slice_to_twos_complement_bits_negative(gm: GenMode, config: GenConfig, limit: usize) {
for mut bits in bool_vec_gen().get(gm, &config).take(limit) {
let old_bits = bits.clone();
let carry = bits_slice_to_twos_complement_bits_negative(&mut bits);
println!(
"bits := {:?}; bits_slice_to_twos_complement_bits_negative(&mut bits) = {}; \
bits = {:?}",
old_bits, carry, bits
);
}
}
fn demo_bits_vec_to_twos_complement_bits_negative(gm: GenMode, config: GenConfig, limit: usize) {
for mut bits in bool_vec_gen_var_5().get(gm, &config).take(limit) {
let old_bits = bits.clone();
bits_vec_to_twos_complement_bits_negative(&mut bits);
println!(
"bits := {:?}; bits_vec_to_twos_complement_bits_negative(&mut bits); bits = {:?}",
old_bits, bits
);
}
}
fn demo_integer_to_bits_asc(gm: GenMode, config: GenConfig, limit: usize) {
for n in integer_gen().get(gm, &config).take(limit) {
println!("to_bits_asc({}) = {:?}", n, n.to_bits_asc());
}
}
fn demo_integer_to_bits_desc(gm: GenMode, config: GenConfig, limit: usize) {
for n in integer_gen().get(gm, &config).take(limit) {
println!("to_bits_desc({}) = {:?}", n, n.to_bits_desc());
}
}
fn benchmark_bits_to_twos_complement_bits_non_negative(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
"bits_to_twos_complement_bits_non_negative(&mut [bool])",
BenchmarkType::Single,
bool_vec_gen().get(gm, &config),
gm.name(),
limit,
file_name,
&vec_len_bucketer(),
&mut [("Malachite", &mut |mut bits| {
bits_to_twos_complement_bits_non_negative(&mut bits)
})],
);
}
fn benchmark_bits_slice_to_twos_complement_bits_negative(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
"bits_slice_to_twos_complement_bits_negative(&mut [bool])",
BenchmarkType::Single,
bool_vec_gen().get(gm, &config),
gm.name(),
limit,
file_name,
&vec_len_bucketer(),
&mut [("Malachite", &mut |mut bits| {
no_out!(bits_slice_to_twos_complement_bits_negative(&mut bits))
})],
);
}
fn benchmark_bits_vec_to_twos_complement_bits_negative(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
"bits_vec_to_twos_complement_bits_negative(&mut [bool])",
BenchmarkType::Single,
bool_vec_gen_var_5().get(gm, &config),
gm.name(),
limit,
file_name,
&vec_len_bucketer(),
&mut [("Malachite", &mut |ref mut bits| {
bits_vec_to_twos_complement_bits_negative(bits)
})],
);
}
<|fim▁hole|>fn benchmark_integer_to_bits_asc_evaluation_strategy(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
"Integer.to_bits_asc()",
BenchmarkType::EvaluationStrategy,
integer_gen().get(gm, &config),
gm.name(),
limit,
file_name,
&integer_bit_bucketer("n"),
&mut [
("Integer.to_bits_asc()", &mut |n| no_out!(n.to_bits_asc())),
("Integer.bits().collect_vec()", &mut |n| {
no_out!(n.bits().collect_vec())
}),
],
);
}
fn benchmark_integer_to_bits_asc_algorithms(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
"Integer.to_bits_asc()",
BenchmarkType::Algorithms,
integer_gen().get(gm, &config),
gm.name(),
limit,
file_name,
&integer_bit_bucketer("n"),
&mut [
("default", &mut |n| no_out!(n.to_bits_asc())),
("alt", &mut |n| no_out!(to_bits_asc_alt(&n))),
("naive", &mut |n| no_out!(to_bits_asc_naive(&n))),
],
);
}
fn benchmark_integer_to_bits_desc_evaluation_strategy(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
"Integer.to_bits_desc()",
BenchmarkType::EvaluationStrategy,
integer_gen().get(gm, &config),
gm.name(),
limit,
file_name,
&integer_bit_bucketer("n"),
&mut [
("Integer.to_bits_desc()", &mut |n| no_out!(n.to_bits_desc())),
("Integer.bits().rev().collect_vec()", &mut |n| {
no_out!(n.bits().rev().collect_vec())
}),
],
);
}
fn benchmark_integer_to_bits_desc_algorithms(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
"Integer.to_bits_desc()",
BenchmarkType::Algorithms,
integer_gen().get(gm, &config),
gm.name(),
limit,
file_name,
&integer_bit_bucketer("n"),
&mut [
("default", &mut |n| no_out!(n.to_bits_desc())),
("alt", &mut |n| no_out!(to_bits_desc_alt(&n))),
("naive", &mut |n| no_out!(to_bits_desc_naive(&n))),
],
);
}<|fim▁end|>
| |
<|file_name|>LosslessJPEGCodec.java<|end_file_name|><|fim▁begin|>/*
* #%L
* BSD implementations of Bio-Formats readers and writers
* %%
* Copyright (C) 2005 - 2016 Open Microscopy Environment:
* - Board of Regents of the University of Wisconsin-Madison
* - Glencoe Software, Inc.
* - University of Dundee
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package loci.formats.codec;
import java.io.IOException;
import java.util.Vector;
import loci.common.ByteArrayHandle;
import loci.common.DataTools;
import loci.common.RandomAccessInputStream;
import loci.formats.FormatException;
import loci.formats.UnsupportedCompressionException;
/**
* Decompresses lossless JPEG images.
*
* @author Melissa Linkert melissa at glencoesoftware.com
*/
public class LosslessJPEGCodec extends BaseCodec {
// -- Constants --
// Start of Frame markers - non-differential, Huffman coding
private static final int SOF0 = 0xffc0; // baseline DCT
private static final int SOF1 = 0xffc1; // extended sequential DCT
private static final int SOF2 = 0xffc2; // progressive DCT
private static final int SOF3 = 0xffc3; // lossless (sequential)
// Start of Frame markers - differential, Huffman coding
private static final int SOF5 = 0xffc5; // differential sequential DCT
private static final int SOF6 = 0xffc6; // differential progressive DCT
private static final int SOF7 = 0xffc7; // differential lossless (sequential)
// Start of Frame markers - non-differential, arithmetic coding
private static final int JPG = 0xffc8; // reserved for JPEG extensions
private static final int SOF9 = 0xffc9; // extended sequential DCT
private static final int SOF10 = 0xffca; // progressive DCT
private static final int SOF11 = 0xffcb; // lossless (sequential)
// Start of Frame markers - differential, arithmetic coding
private static final int SOF13 = 0xffcd; // differential sequential DCT
private static final int SOF14 = 0xffce; // differential progressive DCT
private static final int SOF15 = 0xffcf; // differential lossless (sequential)
private static final int DHT = 0xffc4; // define Huffman table(s)
private static final int DAC = 0xffcc; // define arithmetic coding conditions
// Restart interval termination
private static final int RST_0 = 0xffd0;
private static final int RST_1 = 0xffd1;
private static final int RST_2 = 0xffd2;
private static final int RST_3 = 0xffd3;
private static final int RST_4 = 0xffd4;
private static final int RST_5 = 0xffd5;
private static final int RST_6 = 0xffd6;
private static final int RST_7 = 0xffd7;
private static final int SOI = 0xffd8; // start of image
private static final int EOI = 0xffd9; // end of image
private static final int SOS = 0xffda; // start of scan
private static final int DQT = 0xffdb; // define quantization table(s)
private static final int DNL = 0xffdc; // define number of lines
private static final int DRI = 0xffdd; // define restart interval
private static final int DHP = 0xffde; // define hierarchical progression
private static final int EXP = 0xffdf; // expand reference components
private static final int COM = 0xfffe; // comment
// -- Codec API methods --
/* @see Codec#compress(byte[], CodecOptions) */
@Override
public byte[] compress(byte[] data, CodecOptions options)
throws FormatException
{
throw new UnsupportedCompressionException(
"Lossless JPEG compression not supported");
}
/**
* The CodecOptions parameter should have the following fields set:
* {@link CodecOptions#interleaved interleaved}
* {@link CodecOptions#littleEndian littleEndian}
*
* @see Codec#decompress(RandomAccessInputStream, CodecOptions)
*/
@Override
public byte[] decompress(RandomAccessInputStream in, CodecOptions options)<|fim▁hole|> {
if (in == null)
throw new IllegalArgumentException("No data to decompress.");
if (options == null) options = CodecOptions.getDefaultOptions();
byte[] buf = new byte[0];
int width = 0, height = 0;
int bitsPerSample = 0, nComponents = 0, bytesPerSample = 0;
int[] horizontalSampling = null, verticalSampling = null;
int[] quantizationTable = null;
short[][] huffmanTables = null;
int startPredictor = 0, endPredictor = 0;
int pointTransform = 0;
int[] dcTable = null, acTable = null;
while (in.getFilePointer() < in.length() - 1) {
int code = in.readShort() & 0xffff;
int length = in.readShort() & 0xffff;
long fp = in.getFilePointer();
if (length > 0xff00) {
length = 0;
in.seek(fp - 2);
}
else if (code == SOS) {
nComponents = in.read();
dcTable = new int[nComponents];
acTable = new int[nComponents];
for (int i=0; i<nComponents; i++) {
int componentSelector = in.read();
int tableSelector = in.read();
dcTable[i] = (tableSelector & 0xf0) >> 4;
acTable[i] = tableSelector & 0xf;
}
startPredictor = in.read();
endPredictor = in.read();
pointTransform = in.read() & 0xf;
// read image data
byte[] toDecode = new byte[(int) (in.length() - in.getFilePointer())];
in.read(toDecode);
// scrub out byte stuffing
ByteVector b = new ByteVector();
for (int i=0; i<toDecode.length; i++) {
byte val = toDecode[i];
if (val == (byte) 0xff) {
if (toDecode[i + 1] == 0)
b.add(val);
i++;
} else {
b.add(val);
}
}
toDecode = b.toByteArray();
RandomAccessInputStream bb = new RandomAccessInputStream(
new ByteArrayHandle(toDecode));
HuffmanCodec huffman = new HuffmanCodec();
HuffmanCodecOptions huffmanOptions = new HuffmanCodecOptions();
huffmanOptions.bitsPerSample = bitsPerSample;
huffmanOptions.maxBytes = buf.length / nComponents;
int nextSample = 0;
while (nextSample < buf.length / nComponents) {
for (int i=0; i<nComponents; i++) {
huffmanOptions.table = huffmanTables[dcTable[i]];
int v = 0;
if (huffmanTables != null) {
v = huffman.getSample(bb, huffmanOptions);
if (nextSample == 0) {
v += (int) Math.pow(2, bitsPerSample - 1);
}
}
else {
throw new UnsupportedCompressionException(
"Arithmetic coding not supported");
}
// apply predictor to the sample
int predictor = startPredictor;
if (nextSample < width * bytesPerSample) predictor = 1;
else if ((nextSample % (width * bytesPerSample)) == 0) {
predictor = 2;
}
int componentOffset = i * (buf.length / nComponents);
int indexA = nextSample - bytesPerSample + componentOffset;
int indexB = nextSample - width * bytesPerSample + componentOffset;
int indexC = nextSample - (width + 1) * bytesPerSample +
componentOffset;
int sampleA = indexA < 0 ? 0 :
DataTools.bytesToInt(buf, indexA, bytesPerSample, false);
int sampleB = indexB < 0 ? 0 :
DataTools.bytesToInt(buf, indexB, bytesPerSample, false);
int sampleC = indexC < 0 ? 0 :
DataTools.bytesToInt(buf, indexC, bytesPerSample, false);
if (nextSample > 0) {
int pred = 0;
switch (predictor) {
case 1:
pred = sampleA;
break;
case 2:
pred = sampleB;
break;
case 3:
pred = sampleC;
break;
case 4:
pred = sampleA + sampleB + sampleC;
break;
case 5:
pred = sampleA + ((sampleB - sampleC) / 2);
break;
case 6:
pred = sampleB + ((sampleA - sampleC) / 2);
break;
case 7:
pred = (sampleA + sampleB) / 2;
break;
}
v += pred;
}
int offset = componentOffset + nextSample;
DataTools.unpackBytes(v, buf, offset, bytesPerSample, false);
}
nextSample += bytesPerSample;
}
bb.close();
}
else {
length -= 2; // stored length includes length param
if (length == 0) continue;
if (code == EOI) { }
else if (code == SOF3) {
// lossless w/Huffman coding
bitsPerSample = in.read();
height = in.readShort();
width = in.readShort();
nComponents = in.read();
horizontalSampling = new int[nComponents];
verticalSampling = new int[nComponents];
quantizationTable = new int[nComponents];
for (int i=0; i<nComponents; i++) {
in.skipBytes(1);
int s = in.read();
horizontalSampling[i] = (s & 0xf0) >> 4;
verticalSampling[i] = s & 0x0f;
quantizationTable[i] = in.read();
}
bytesPerSample = bitsPerSample / 8;
if ((bitsPerSample % 8) != 0) bytesPerSample++;
buf = new byte[width * height * nComponents * bytesPerSample];
}
else if (code == SOF11) {
throw new UnsupportedCompressionException(
"Arithmetic coding is not yet supported");
}
else if (code == DHT) {
if (huffmanTables == null) {
huffmanTables = new short[4][];
}
int bytesRead = 0;
while (bytesRead < length) {
int s = in.read();
byte tableClass = (byte) ((s & 0xf0) >> 4);
byte destination = (byte) (s & 0xf);
int[] nCodes = new int[16];
Vector table = new Vector();
for (int i=0; i<nCodes.length; i++) {
nCodes[i] = in.read();
table.add(new Short((short) nCodes[i]));
}
for (int i=0; i<nCodes.length; i++) {
for (int j=0; j<nCodes[i]; j++) {
table.add(new Short((short) (in.read() & 0xff)));
}
}
huffmanTables[destination] = new short[table.size()];
for (int i=0; i<huffmanTables[destination].length; i++) {
huffmanTables[destination][i] = ((Short) table.get(i)).shortValue();
}
bytesRead += table.size() + 1;
}
}
in.seek(fp + length);
}
}
if (options.interleaved && nComponents > 1) {
// data is stored in planar (RRR...GGG...BBB...) order
byte[] newBuf = new byte[buf.length];
for (int i=0; i<buf.length; i+=nComponents*bytesPerSample) {
for (int c=0; c<nComponents; c++) {
int src = c * (buf.length / nComponents) + (i / nComponents);
int dst = i + c * bytesPerSample;
System.arraycopy(buf, src, newBuf, dst, bytesPerSample);
}
}
buf = newBuf;
}
if (options.littleEndian && bytesPerSample > 1) {
// data is stored in big endian order
// reverse the bytes in each sample
byte[] newBuf = new byte[buf.length];
for (int i=0; i<buf.length; i+=bytesPerSample) {
for (int q=0; q<bytesPerSample; q++) {
newBuf[i + bytesPerSample - q - 1] = buf[i + q];
}
}
buf = newBuf;
}
return buf;
}
}<|fim▁end|>
|
throws FormatException, IOException
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>"""Yummly data models.
"""
from inspect import getargspec
class Storage(dict):
"""An object that is like a dict except `obj.foo` can be used in addition
to `obj['foo']`.
Raises Attribute/Key errors for missing references.
>>> o = Storage(a=1, b=2)
>>> assert(o.a == o['a'])
>>> assert(o.b == o['b'])
>>> o.a = 2
>>> print o['a']
2
>>> x = o.copy()
>>> assert(x == o)
>>> del o.a
>>> print o.a
Traceback (most recent call last):
...
AttributeError: a
>>> print o['a']
Traceback (most recent call last):
...
KeyError: 'a'
>>> o._get_fields()
Traceback (most recent call last):
...
TypeError: ...
"""
def __getattr__(self, key):
if key in self:
return self[key]
else:
raise AttributeError(key)
def __setattr__(self, key, value):
self[key] = value
def __delattr__(self, key):
if key in self:
del self[key]
else:
raise AttributeError(key)
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, dict.__repr__(self))
@classmethod
def _get_fields(cls):
"""Return class' __init__() args excluding `self`.
Assumes that calling class has actually implemented __init__(),
otherwise, this will fail.
"""
# For classes, first element of args == self which we don't want.
return getargspec(cls.__init__).args[1:]
##################################################
# Get recipe related models
##################################################
class Recipe(Storage):
"""Recipe model."""
def __init__(self, **kargs):
self.id = kargs['id']
self.name = kargs['name']
self.rating = kargs.get('rating')
self.totalTime = kargs.get('totalTime') or 0
self.totalTimeInSeconds = kargs.get('totalTimeInSeconds') or 0
self.ingredientLines = kargs.get('ingredientLines') or []
self.numberOfServings = kargs.get('numberOfServings')
self.yields = kargs.get('yields')
self.attributes = kargs.get('attributes') or {}
self.source = RecipeSource(**(kargs.get('source') or {}))
self.attribution = Attribution(**(kargs.get('attribution') or {}))
# NOTE: For `flavors`, the keys are returned capitalized so normalize
# to lowercase since search results' flavor keys are lowercase.
flavors = kargs.get('flavors') or {}
self.flavors = Flavors(**{key.lower(): value
for key, value in flavors.iteritems()})
self.nutritionEstimates = [NutritionEstimate(**nute)
for nute in (kargs.get('nutritionEstimates')
or [])]
self.images = [RecipeImages(**imgs)
for imgs in (kargs.get('images') or [])]
class Flavors(Storage):
"""Flavors model."""
def __init__(self, **kargs):
self.salty = kargs.get('salty')
self.meaty = kargs.get('meaty')
self.piquant = kargs.get('piquant')
self.bitter = kargs.get('bitter')
self.sour = kargs.get('sour')
self.sweet = kargs.get('sweet')
<|fim▁hole|> def __init__(self, **kargs):
self.html = kargs.get('html')
self.url = kargs.get('url')
self.text = kargs.get('text')
self.logo = kargs.get('logo')
class NutritionEstimate(Storage):
"""Nutrition estimate model."""
def __init__(self, **kargs):
self.attribute = kargs.get('attribute')
self.description = kargs.get('description')
self.value = kargs.get('value')
self.unit = NutritionUnit(**(kargs.get('unit') or {}))
class NutritionUnit(Storage):
"""Nutrition unit model."""
def __init__(self, **kargs):
self.id = kargs['id']
self.abbreviation = kargs.get('abbreviation')
self.plural = kargs.get('plural')
self.pluralAbbreviation = kargs.get('pluralAbbreviation')
class RecipeImages(Storage):
"""Recipe images model."""
def __init__(self, **kargs):
self.hostedLargeUrl = kargs.get('hostedLargeUrl')
self.hostedSmallUrl = kargs.get('hostedSmallUrl')
class RecipeSource(Storage):
"""Recipe source model."""
def __init__(self, **kargs):
self.sourceRecipeUrl = kargs.get('sourceRecipeUrl')
self.sourceSiteUrl = kargs.get('sourceSiteUrl')
self.sourceDisplayName = kargs.get('sourceDisplayName')
##################################################
# Search related models
##################################################
class SearchResult(Storage):
"""Search result model."""
def __init__(self, **kargs):
self.totalMatchCount = kargs['totalMatchCount']
self.criteria = SearchCriteria(**kargs['criteria'])
self.facetCounts = kargs['facetCounts']
self.matches = [SearchMatch(**match) for match in kargs['matches']]
self.attribution = Attribution(**kargs['attribution'])
class SearchMatch(Storage):
"""Search match model."""
def __init__(self, **kargs):
self.id = kargs['id']
self.recipeName = kargs['recipeName']
self.rating = kargs.get('rating')
self.totalTimeInSeconds = kargs.get('totalTimeInSeconds', 0)
self.ingredients = kargs.get('ingredients')
self.flavors = Flavors(**(kargs.get('flavors') or {}))
self.smallImageUrls = kargs.get('smallImageUrls')
self.sourceDisplayName = kargs.get('sourceDisplayName', '')
self.attributes = kargs.get('attributes')
class SearchCriteria(Storage):
"""Search criteria model."""
def __init__(self, **kargs):
self.maxResults = kargs.get('maxResults')
self.resultsToSkip = kargs.get('resultsToSkip')
self.terms = kargs.get('terms')
self.requirePictures = kargs.get('requirePictures')
self.facetFields = kargs.get('facetFields')
self.allowedIngredients = kargs.get('allowedIngredients')
self.excludedIngredients = kargs.get('excludedIngredients')
self.attributeRanges = kargs.get('attributeRanges', {})
self.allowedAttributes = kargs.get('allowedAttributes', [])
self.excludedAttributes = kargs.get('excludedAttributes', [])
self.allowedDiets = kargs.get('allowedDiets', [])
self.nutritionRestrictions = kargs.get('nutritionRestrictions', {})
##################################################
# Metadata related models
##################################################
class MetaAttribute(Storage):
"""Base class for metadata attributes."""
def __init__(self, **kargs):
self.id = kargs['id']
self.description = kargs['description']
self.localesAvailableIn = kargs['localesAvailableIn']
self.name = kargs['name']
self.searchValue = kargs['searchValue']
self.type = kargs['type']
class MetaHoliday(MetaAttribute):
"""Holiday metadata model."""
pass
class MetaCuisine(MetaAttribute):
"""Cuisine metadata model."""
pass
class MetaCourse(MetaAttribute):
"""Course metadata model."""
pass
class MetaTechnique(MetaAttribute):
"""Technique metadata model."""
pass
class MetaSource(Storage):
"""Source metadata model."""
def __init__(self, **kargs):
self.faviconUrl = kargs['faviconUrl']
self.description = kargs['description']
self.searchValue = kargs['searchValue']
class MetaBrand(Storage):
"""Brand metadata model."""
def __init__(self, **kargs):
self.faviconUrl = kargs['faviconUrl']
self.description = kargs['description']
self.searchValue = kargs['searchValue']
class MetaDiet(Storage):
"""Diet metadata model."""
def __init__(self, **kargs):
self.id = kargs['id']
self.localesAvailableIn = kargs['localesAvailableIn']
self.longDescription = kargs['longDescription']
self.searchValue = kargs['searchValue']
self.shortDescription = kargs['shortDescription']
self.type = kargs['type']
class MetaAllergy(Storage):
"""Allergy metadata model."""
def __init__(self, **kargs):
self.id = kargs['id']
self.localesAvailableIn = kargs['localesAvailableIn']
self.longDescription = kargs['longDescription']
self.shortDescription = kargs['shortDescription']
self.searchValue = kargs['searchValue']
self.type = kargs['type']
class MetaIngredient(Storage):
"""Ingredient metadata model."""
def __init__(self, **kargs):
self.description = kargs['description']
self.term = kargs['term']
self.searchValue = kargs['searchValue']<|fim▁end|>
|
class Attribution(Storage):
"""Attribution model."""
|
<|file_name|>document.ts<|end_file_name|><|fim▁begin|>import 'css.escape'
import { createFootnote, FootnoteElements } from './footnote'
import { bindScrollHandler } from './scroll'
import { Adapter } from '../core'
import { addClass, removeClass, unmount } from './element'
export const CLASS_CONTENT = 'littlefoot__content'
export const CLASS_WRAPPER = 'littlefoot__wrapper'
export type HTMLAdapterSettings = Readonly<{
allowDuplicates: boolean
anchorParentSelector: string
anchorPattern: RegExp
buttonTemplate: string
contentTemplate: string
footnoteSelector: string
numberResetSelector: string
scope: string
}>
type TemplateData = Readonly<{
number: number
id: string
content: string
reference: string
}>
type Original = Readonly<{
reference: HTMLElement
referenceId: string
body: HTMLElement
}>
type OriginalData = Readonly<{
original: Original
data: TemplateData
}>
const CLASS_PRINT_ONLY = 'littlefoot--print'
const CLASS_HOST = 'littlefoot'
const setPrintOnly = (el: Element) => addClass(el, CLASS_PRINT_ONLY)
function queryAll<E extends Element>(
parent: ParentNode,
selector: string
): readonly E[] {
return Array.from(parent.querySelectorAll<E>(selector))
}
function getByClassName<E extends Element>(element: E, className: string): E {
return (
element.querySelector<E>('.' + className) ||
(element.firstElementChild as E | null) ||
element
)
}
function createElementFromHTML(html: string): HTMLElement {
const container = document.createElement('div')
container.innerHTML = html
return container.firstElementChild as HTMLElement
}
function children(element: Element, selector: string): readonly Element[] {
return Array.from(element.children).filter(
(child) => child.nodeType !== 8 && child.matches(selector)
)
}
function isDefined<T>(value?: T): value is T {
return value !== undefined
}
function findFootnoteLinks(
document: Document,
pattern: RegExp,
scope: string
): readonly HTMLAnchorElement[] {
return queryAll<HTMLAnchorElement>(document, scope + ' a[href*="#"]').filter(
(link) => (link.href + link.rel).match(pattern)
)
}
function findReference(
document: Document,
allowDuplicates: boolean,
anchorParentSelector: string,
footnoteSelector: string
) {
const processed: Element[] = []
return (link: HTMLAnchorElement): Original | undefined => {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const fragment = link.href.split('#')[1]!
const related = queryAll(document, '#' + CSS.escape(fragment)).find(
(footnote) => allowDuplicates || !processed.includes(footnote)
)
const body = related?.closest<HTMLElement>(footnoteSelector)
if (body) {
processed.push(body)
const reference = link.closest<HTMLElement>(anchorParentSelector) || link
const referenceId = reference.id || link.id
return { reference, referenceId, body }
}
}
}
function recursiveHideFootnoteContainer(element: HTMLElement): void {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const container = element.parentElement!
const visibleElements = children(container, `:not(.${CLASS_PRINT_ONLY})`)
const visibleSeparators = visibleElements.filter((el) => el.tagName === 'HR')
if (visibleElements.length === visibleSeparators.length) {
visibleSeparators.concat(container).forEach(setPrintOnly)
recursiveHideFootnoteContainer(container)
}
}
function recursiveUnmount(element: HTMLElement) {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const parent = element.parentElement!
unmount(element)
const html = parent.innerHTML.replace('[]', '').replace(' ', ' ').trim()
if (!html) {
recursiveUnmount(parent)
}
}
function prepareTemplateData(original: Original, idx: number): OriginalData {
const content = createElementFromHTML(original.body.outerHTML)
const backlinkSelector = '[href$="#' + original.referenceId + '"]'
queryAll<HTMLElement>(content, backlinkSelector).forEach(recursiveUnmount)
const html = content.innerHTML.trim()
return {
original,
data: {
id: String(idx + 1),
number: idx + 1,
reference: 'lf-' + original.referenceId,
content: html.startsWith('<') ? html : '<p>' + html + '</p>',
},
}
}
const resetNumbers = (resetSelector: string) => {
let current = 0
let previousParent: Element | null = null
return ({ original, data }: OriginalData): OriginalData => {
const parent = original.reference.closest(resetSelector)
current = previousParent === parent ? current + 1 : 1
previousParent = parent
return { original, data: { ...data, number: current } }
}
}
function interpolate(template: string) {
const pattern = /<%=?\s*(\w+?)\s*%>/g
return (replacement: TemplateData) =>
template.replace(pattern, (_, key: keyof TemplateData) =>
String(replacement[key] ?? '')
)
}
function createElements(buttonTemplate: string, popoverTemplate: string) {
const renderButton = interpolate(buttonTemplate)
const renderPopover = interpolate(popoverTemplate)
return ({
original,
data,
}: OriginalData): OriginalData & FootnoteElements => {
const id = data.id
const host = createElementFromHTML(
`<span class="${CLASS_HOST}">${renderButton(data)}</span>`
)
const button = host.firstElementChild as HTMLElement
button.setAttribute('aria-expanded', 'false')
button.dataset.footnoteButton = ''
button.dataset.footnoteId = id
const popover = createElementFromHTML(renderPopover(data))
popover.dataset.footnotePopover = ''
popover.dataset.footnoteId = id
const wrapper = getByClassName(popover, CLASS_WRAPPER)
const content = getByClassName(popover, CLASS_CONTENT)
bindScrollHandler(content, popover)
return { original, data, id, button, host, popover, content, wrapper }
}
}
function attachFootnote(reference: HTMLElement, host: HTMLElement): void {
reference.insertAdjacentElement('beforebegin', host)
}
export function setup({
allowDuplicates,
anchorParentSelector,
anchorPattern,
buttonTemplate,
contentTemplate,
footnoteSelector,
numberResetSelector,
scope,
}: HTMLAdapterSettings): Adapter<HTMLElement> {
const footnoteElements = findFootnoteLinks(document, anchorPattern, scope)<|fim▁hole|> .map(
findReference(
document,
allowDuplicates,
anchorParentSelector,
footnoteSelector
)
)
.filter(isDefined)
.map(prepareTemplateData)
.map(numberResetSelector ? resetNumbers(numberResetSelector) : (i) => i)
.map(createElements(buttonTemplate, contentTemplate))
footnoteElements.forEach(({ original, host }) => {
setPrintOnly(original.reference)
setPrintOnly(original.body)
recursiveHideFootnoteContainer(original.body)
attachFootnote(original.reference, host)
})
const footnotes = footnoteElements.map(createFootnote)
return {
footnotes,
unmount() {
footnotes.forEach((footnote) => footnote.destroy())
queryAll(document, '.' + CLASS_PRINT_ONLY).forEach((element) =>
removeClass(element, CLASS_PRINT_ONLY)
)
},
}
}<|fim▁end|>
| |
<|file_name|>pages.py<|end_file_name|><|fim▁begin|>import copy
import types
from django.core.urlresolvers import reverse
from django.db.models.query import QuerySet
registry = []
def register(*args):
"""
Register urls, views, model instances and QuerySets to be potential
pages for menu items.
Example::
import simplemenu
simplemenu.register(
'package.module.view',
('package.module.view','name'),
FlatPage.objects.all(),
(FlatPage.objects.all(),'attr_containing_name'),
Products.objects.get(pk=1),
)<|fim▁hole|>class PageWrapper(object):
"""
A helper-object to wrap the pages, which might be django models or
strings.
"""
def __init__(self, urlobj_or_str, name=None):
if isinstance(urlobj_or_str, types.StringTypes):
self.urlobj = None
self.urlstr = urlobj_or_str
else:
self.urlobj = urlobj_or_str
self.urlstr = str()
self._name = name
def name(self):
if self._name:
name = self._name
elif self.urlobj:
name = unicode(self.urlobj)
elif "/" in self.urlstr:
name = self.urlstr
else:
name = self.urlstr.rsplit('.', 1)[-1]
name = name.replace("_", " ").capitalize()
return name
def url(self):
if self.urlobj:
url = self.urlobj.get_absolute_url()
elif "/" in self.urlstr:
url = self.urlstr
else:
url = reverse(self.urlstr)
return url
def strkey(self):
"""
Generates somewhat unique string id of the wrappee.
"""
if self.urlobj:
return "%s.%s.pk%s" % (self.urlobj.__module__,
self.urlobj.__class__.__name__,
self.urlobj.pk)
else:
return self.urlstr
def get_registered_pages():
"""
Returns all registered pages wrapped in PageWrapper helper-object
evaluating all QuerySets along the way.
"""
pages = []
for reg in map(copy.deepcopy, registry):
name = None
if isinstance(reg, types.TupleType):
reg, name = reg
if isinstance(reg, QuerySet):
# Name is the given attr if possible elsewise just use unicode(obj)
if not name:
f = lambda obj: PageWrapper(obj, unicode(obj))
else:
f = lambda obj: PageWrapper(obj, getattr(obj, name, unicode(obj)))
# evaluating QuerySet objects by iteration
pages.extend(map(f, reg))
else:
pages.append(PageWrapper(reg, name))
return pages<|fim▁end|>
|
"""
registry.extend(args)
|
<|file_name|>pep_bookmarks.py<|end_file_name|><|fim▁begin|><|fim▁hole|># This file is part of nbxmpp.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; If not, see <http://www.gnu.org/licenses/>.
from nbxmpp.namespaces import Namespace
from nbxmpp.protocol import NodeProcessed
from nbxmpp.structs import StanzaHandler
from nbxmpp.task import iq_request_task
from nbxmpp.errors import MalformedStanzaError
from nbxmpp.modules.base import BaseModule
from nbxmpp.modules.util import raise_if_error
from nbxmpp.modules.bookmarks.util import parse_bookmarks
from nbxmpp.modules.bookmarks.util import build_storage_node
BOOKMARK_OPTIONS = {
'pubsub#persist_items': 'true',
'pubsub#access_model': 'whitelist',
}
class PEPBookmarks(BaseModule):
_depends = {
'publish': 'PubSub',
'request_items': 'PubSub',
}
def __init__(self, client):
BaseModule.__init__(self, client)
self._client = client
self.handlers = [
StanzaHandler(name='message',
callback=self._process_pubsub_bookmarks,
ns=Namespace.PUBSUB_EVENT,
priority=16),
]
def _process_pubsub_bookmarks(self, _client, stanza, properties):
if not properties.is_pubsub_event:
return
if properties.pubsub_event.node != Namespace.BOOKMARKS:
return
item = properties.pubsub_event.item
if item is None:
# Retract, Deleted or Purged
return
try:
bookmarks = parse_bookmarks(item, self._log)
except MalformedStanzaError as error:
self._log.warning(error)
self._log.warning(stanza)
raise NodeProcessed
if not bookmarks:
self._log.info('Bookmarks removed')
return
pubsub_event = properties.pubsub_event._replace(data=bookmarks)
self._log.info('Received bookmarks from: %s', properties.jid)
for bookmark in bookmarks:
self._log.info(bookmark)
properties.pubsub_event = pubsub_event
@iq_request_task
def request_bookmarks(self):
_task = yield
items = yield self.request_items(Namespace.BOOKMARKS, max_items=1)
raise_if_error(items)
if not items:
yield []
bookmarks = parse_bookmarks(items[0], self._log)
for bookmark in bookmarks:
self._log.info(bookmark)
yield bookmarks
@iq_request_task
def store_bookmarks(self, bookmarks):
_task = yield
self._log.info('Store Bookmarks')
self.publish(Namespace.BOOKMARKS,
build_storage_node(bookmarks),
id_='current',
options=BOOKMARK_OPTIONS,
force_node_options=True)<|fim▁end|>
|
# Copyright (C) 2018 Philipp Hörist <philipp AT hoerist.com>
#
|
<|file_name|>GeneticAlgorithm.hpp<|end_file_name|><|fim▁begin|>/**
* @file GeneticAlgorithm.hpp
* @author Renato Oliveira ([email protected])
* @version 1.0
* @since GeneticNet 1.0
* @date 10/07/2012
*/
#ifndef GRUBI_GENETICNET_GENETICALGORITHM_HPP_INCLUDED
#define GRUBI_GENETICNET_GENETICALGORITHM_HPP_INCLUDED
#include <iostream>
#include <cstdlib>
#include <boost/archive/xml_iarchive.hpp>
#include <boost/archive/xml_oarchive.hpp>
#include <boost/serialization/serialization.hpp>
#include <boost/serialization/nvp.hpp>
#include <boost/serialization/string.hpp><|fim▁hole|>#include "SensorNetwork.hpp"
#include <GMPI/Logger.hpp>
namespace geneticnet {
namespace GA {
class Setup;
}
/**
* @brief Implements the canonical Genetic Algorithm.
*/
class GeneticAlgorithm : public AbstractMetaheuristic {
public:
GeneticAlgorithm();
GeneticAlgorithm(GA::Setup* gaSetup);
GeneticAlgorithm(boost::archive::xml_iarchive& input);
~GeneticAlgorithm();
virtual void execute(ResultManager * results = NULL);
virtual void save_setup_to_xml(boost::archive::xml_oarchive& output);
virtual void load_setup_from_xml(boost::archive::xml_iarchive& input);
Solution* tournment(Solution** population);
protected:
GA::Setup *setup;
};
/// The Genetic Algorithm specific structures.
namespace GA {
/**
* @brief This is a bean class for the Genetic Algorithm's execution parameters.
*/
class Setup {
friend class geneticnet::AbstractMetaheuristic;
friend class geneticnet::GeneticAlgorithm;
friend class boost::serialization::access;
public:
/// Constructs the setup and define default values for all parameters.
Setup() {
this->populationSize = 8;
this->maximumGenerations = 1000000;
this->maximumExecutionTime = 3600.0;
this->crossoverRate = 0.7;
this->mutationRate = 0.05;
this->elitism = true;
this->tournmentSize = 3;
this->instanceFilename = "";
}
/// Destruct parameters, if necessary.
~Setup() {}
protected:
/// Number of individuals in the population.
int populationSize;
/// Maximum number of population's generations.
int maximumGenerations;
/// Maximum execution time (real time) for the Genetic Algorithm.
double maximumExecutionTime;
/// The rate to apply crossover to generate the new individual.
double crossoverRate;
/// The rate to apply mutation to the new generated individual.
double mutationRate;
/// Use elitism criterias?
bool elitism;
/// Number of individuals selected to tournment.
int tournmentSize;
/// Instance (Sensor Network) configuration file path.
std::string instanceFilename;
/// Genetic Algorithm's setup serialization.
template<class Archive>
void serialize(Archive & archive, const unsigned int version) {
archive & BOOST_SERIALIZATION_NVP(populationSize);
archive & BOOST_SERIALIZATION_NVP(maximumGenerations);
archive & BOOST_SERIALIZATION_NVP(maximumExecutionTime);
archive & BOOST_SERIALIZATION_NVP(crossoverRate);
archive & BOOST_SERIALIZATION_NVP(mutationRate);
archive & BOOST_SERIALIZATION_NVP(elitism);
archive & BOOST_SERIALIZATION_NVP(tournmentSize);
archive & BOOST_SERIALIZATION_NVP(instanceFilename);
}
};
}
}
BOOST_CLASS_VERSION(geneticnet::GA::Setup, 1)
#endif<|fim▁end|>
|
#include "AbstractMetaheuristic.hpp"
#include "ResultManager.hpp"
#include "Solution.hpp"
|
<|file_name|>PasteEntryRepositoryAction.java<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2001-2017 by RapidMiner and the contributors
*
<|fim▁hole|> * This program is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either version 3
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this program.
* If not, see http://www.gnu.org/licenses/.
*/
package com.rapidminer.repository.gui.actions;
import com.rapidminer.repository.Entry;
import com.rapidminer.repository.gui.RepositoryTree;
import java.awt.event.ActionEvent;
import javax.swing.Action;
/**
* This action is the standard paste action.
*
* @author Simon Fischer
*/
public class PasteEntryRepositoryAction extends AbstractRepositoryAction<Entry> {
private static final long serialVersionUID = 1L;
public PasteEntryRepositoryAction(RepositoryTree tree) {
super(tree, Entry.class, false, "repository_paste");
putValue(ACTION_COMMAND_KEY, "paste");
}
@Override
public void actionPerformed(ActionEvent e) {
String action = e.getActionCommand();
Action a = tree.getActionMap().get(action);
if (a != null) {
a.actionPerformed(new ActionEvent(tree, ActionEvent.ACTION_PERFORMED, null));
}
}
@Override
public void actionPerformed(Entry cast) {
// not needed because we override actionPerformed(ActionEvent e) which is the only caller
}
}<|fim▁end|>
|
* Complete list of developers available at our web site:
*
* http://rapidminer.com
*
|
<|file_name|>inventory-filter.spec.ts<|end_file_name|><|fim▁begin|>import { TestData } from '../../test-data';
import { ViewController } from 'ionic-angular';
import {
NavParamsMock,
BrandsActionsMock,
CategoriesActionsMock,
ModelsActionsMock,
ItemsActionsMock,
BrandsServiceMock,
CategoriesServiceMock,
ModelsServiceMock,
ItemsServiceMock,
} from '../../mocks';
import { InventoryFilterPage } from './inventory-filter';
let instance: InventoryFilterPage = null;
describe('InventoryFilter Page', () => {
beforeEach(() => {
instance = new InventoryFilterPage(
<any> new ViewController,
<any> new NavParamsMock,
<any> new BrandsServiceMock,
<any> new BrandsActionsMock,
<any> new ModelsServiceMock,
<any> new ModelsActionsMock,
<any> new CategoriesServiceMock,
<any> new CategoriesActionsMock,
<any> new ItemsServiceMock,
<any> new ItemsActionsMock
);
});
it('is created', () => {
expect(instance).toBeTruthy();
});
it('gets ids from store', () => {
instance.ngOnInit();
expect(instance.selectedBrandID).toEqual(TestData.itemFilters.brandID);
expect(instance.selectedModelID).toEqual(TestData.itemFilters.modelID);
expect(instance.selectedCategoryID).toEqual(TestData.itemFilters.categoryID);
});
it('calls filterModels if selectedBrandID is not -1', () => {
instance.navParams.param = TestData.apiItem.brandID;
spyOn(instance, 'onFilterModels');
instance.ngOnInit();
expect(instance.onFilterModels).toHaveBeenCalled();
});
it('filters models on filterModels()', () => {
spyOn(instance.modelsActions, 'filterModels');
instance.onFilterModels();
expect(instance.modelsActions.filterModels).toHaveBeenCalled();
});<|fim▁hole|> expect(instance.selectedBrandID).toEqual(-1);
expect(instance.selectedModelID).toEqual(-1);
expect(instance.selectedCategoryID).toEqual(-1);
expect(instance.onApplyFilters).toHaveBeenCalled();
});
it('dismisses modal on dismiss', () => {
spyOn(instance.viewCtrl, 'dismiss');
instance.onDismiss();
expect(instance.viewCtrl.dismiss).toHaveBeenCalled();
});
it('dismisses modal on applyFilters', () => {
instance.selectedBrandID = TestData.itemFilters.brandID;
instance.selectedModelID = TestData.itemFilters.modelID;
instance.selectedCategoryID = TestData.itemFilters.categoryID;
const ids = {
brandID: TestData.itemFilters.brandID,
modelID: TestData.itemFilters.modelID,
categoryID: TestData.itemFilters.categoryID
};
spyOn(instance.viewCtrl, 'dismiss');
spyOn(instance.itemsActions, 'updateFilters');
instance.onApplyFilters();
expect(instance.viewCtrl.dismiss).toHaveBeenCalled();
expect(instance.itemsActions.updateFilters).toHaveBeenCalledWith(ids);
});
});<|fim▁end|>
|
it('resets filters on resetFilters()', () => {
spyOn(instance, 'onApplyFilters');
instance.onResetFilters();
|
<|file_name|>df103_NanoAODHiggsAnalysis.py<|end_file_name|><|fim▁begin|>## \file
## \ingroup tutorial_dataframe
## \notebook -draw
## \brief An example of complex analysis with RDataFrame: reconstructing the Higgs boson.
##
## This tutorial is a simplified but yet complex example of an analysis reconstructing the Higgs boson decaying to two Z
## bosons from events with four leptons. The data and simulated events are taken from CERN OpenData representing a
## subset of the data recorded in 2012 with the CMS detector at the LHC. The tutorials follows the Higgs to four leptons
## analysis published on CERN Open Data portal ([10.7483/OPENDATA.CMS.JKB8.RR42](http://opendata.cern.ch/record/5500)).
## The resulting plots show the invariant mass of the selected four lepton systems in different decay modes (four muons,
## four electrons and two of each kind) and in a combined plot indicating the decay of the Higgs boson with a mass of
## about 125 GeV.
##
## The following steps are performed for each sample with data and simulated events in order to reconstruct the Higgs
## boson from the selected muons and electrons:
## 1. Select interesting events with multiple cuts on event properties, e.g., number of leptons, kinematics of the
## leptons and quality of the tracks.
## 2. Reconstruct two Z bosons of which only one on the mass shell from the selected events and apply additional cuts on
## the reconstructed objects.
## 3. Reconstruct the Higgs boson from the remaining Z boson candidates and calculate its invariant mass.
##
## Another aim of this version of the tutorial is to show a way to blend C++ and Python code. All the functions that
## make computations on data to define new columns or filter existing ones in a precise way, better suited to be written
## in C++, have been moved to a header that is then declared to the ROOT C++ interpreter. The functions that instead
## create nodes of the computational graph (e.g. Filter, Define) remain inside the main Python script.
##
## The tutorial has the fast mode enabled by default, which reads the data from already skimmed
## datasets with a total size of only 51MB. If the fast mode is disabled, the tutorial runs over
## the full dataset with a size of 12GB.
##
## \macro_image
## \macro_code
## \macro_output
##
## \date July 2019
## \author Stefan Wunsch (KIT, CERN), Vincenzo Eduardo Padulano (UniMiB, CERN)
import ROOT
import os
# Enable multi-threading
ROOT.ROOT.EnableImplicitMT()
# Include necessary header
higgs_header_path = os.path.join(os.sep, str(ROOT.gROOT.GetTutorialDir()) + os.sep, "dataframe" + os.sep,
"df103_NanoAODHiggsAnalysis_python.h")
ROOT.gInterpreter.Declare('#include "{}"'.format(higgs_header_path))
# Python functions
def reco_higgs_to_2el2mu(df):
"""Reconstruct Higgs from two electrons and two muons"""
# Filter interesting events
df_base = selection_2el2mu(df)
# Compute masses of Z systems
df_z_mass = df_base.Define("Z_mass", "compute_z_masses_2el2mu(Electron_pt, Electron_eta, Electron_phi,"
" Electron_mass, Muon_pt, Muon_eta, Muon_phi, Muon_mass)")
# Cut on mass of Z candidates
df_z_cut = filter_z_candidates(df_z_mass)
# Reconstruct H mass
df_h_mass = df_z_cut.Define("H_mass", "compute_higgs_mass_2el2mu(Electron_pt, Electron_eta, Electron_phi,"
" Electron_mass, Muon_pt, Muon_eta, Muon_phi, Muon_mass)")
return df_h_mass
def selection_2el2mu(df):
"""Select interesting events with two electrons and two muons"""
df_ge2el2mu = df.Filter("nElectron>=2 && nMuon>=2", "At least two electrons and two muons")
df_eta = df_ge2el2mu.Filter("All(abs(Electron_eta)<2.5) && All(abs(Muon_eta)<2.4)", "Eta cuts")
df_pt = df_eta.Filter("pt_cuts(Muon_pt, Electron_pt)", "Pt cuts")
df_dr = df_pt.Filter("dr_cuts(Muon_eta, Muon_phi, Electron_eta, Electron_phi)", "Dr cuts")
df_iso = df_dr.Filter("All(abs(Electron_pfRelIso03_all)<0.40) && All(abs(Muon_pfRelIso04_all)<0.40)",
"Require good isolation")
df_el_ip3d = df_iso.Define("Electron_ip3d_el", "sqrt(Electron_dxy*Electron_dxy + Electron_dz*Electron_dz)")
df_el_sip3d = df_el_ip3d.Define("Electron_sip3d_el",
"Electron_ip3d_el/sqrt(Electron_dxyErr*Electron_dxyErr + "
"Electron_dzErr*Electron_dzErr)")
df_el_track = df_el_sip3d.Filter("All(Electron_sip3d_el<4) && All(abs(Electron_dxy)<0.5) &&"
" All(abs(Electron_dz)<1.0)",
"Electron track close to primary vertex with small uncertainty")
df_mu_ip3d = df_el_track.Define("Muon_ip3d_mu", "sqrt(Muon_dxy*Muon_dxy + Muon_dz*Muon_dz)")
df_mu_sip3d = df_mu_ip3d.Define("Muon_sip3d_mu",
"Muon_ip3d_mu/sqrt(Muon_dxyErr*Muon_dxyErr + Muon_dzErr*Muon_dzErr)")
df_mu_track = df_mu_sip3d.Filter("All(Muon_sip3d_mu<4) && All(abs(Muon_dxy)<0.5) && All(abs(Muon_dz)<1.0)",
"Muon track close to primary vertex with small uncertainty")
df_2p2n = df_mu_track.Filter("Sum(Electron_charge)==0 && Sum(Muon_charge)==0",
"Two opposite charged electron and muon pairs")
return df_2p2n
def reco_higgs_to_4mu(df):
"""Reconstruct Higgs from four muons"""
# Filter interesting events
df_base = selection_4mu(df)
# Reconstruct Z systems
df_z_idx = df_base.Define("Z_idx", "reco_zz_to_4l(Muon_pt, Muon_eta, Muon_phi, Muon_mass, Muon_charge)")
# Cut on distance between muons building Z systems
df_z_dr = df_z_idx.Filter("filter_z_dr(Z_idx, Muon_eta, Muon_phi)", "Delta R separation of muons building Z system")
# Compute masses of Z systems
df_z_mass = df_z_dr.Define("Z_mass", "compute_z_masses_4l(Z_idx, Muon_pt, Muon_eta, Muon_phi, Muon_mass)")
# Cut on mass of Z candidates
df_z_cut = filter_z_candidates(df_z_mass)
# Reconstruct H mass
df_h_mass = df_z_cut.Define("H_mass", "compute_higgs_mass_4l(Z_idx, Muon_pt, Muon_eta, Muon_phi, Muon_mass)")
return df_h_mass
def selection_4mu(df):
"""Select interesting events with four muons"""
df_ge4m = df.Filter("nMuon>=4", "At least four muons")
df_iso = df_ge4m.Filter("All(abs(Muon_pfRelIso04_all)<0.40)", "Require good isolation")
df_kin = df_iso.Filter("All(Muon_pt>5) && All(abs(Muon_eta)<2.4)", "Good muon kinematics")
df_ip3d = df_kin.Define("Muon_ip3d", "sqrt(Muon_dxy*Muon_dxy + Muon_dz*Muon_dz)")
df_sip3d = df_ip3d.Define("Muon_sip3d", "Muon_ip3d/sqrt(Muon_dxyErr*Muon_dxyErr + Muon_dzErr*Muon_dzErr)")
df_pv = df_sip3d.Filter("All(Muon_sip3d<4) && All(abs(Muon_dxy)<0.5) && All(abs(Muon_dz)<1.0)",
"Track close to primary vertex with small uncertainty")
df_2p2n = df_pv.Filter("nMuon==4 && Sum(Muon_charge==1)==2 && Sum(Muon_charge==-1)==2",
"Two positive and two negative muons")
return df_2p2n
def filter_z_candidates(df):
"""Apply selection on reconstructed Z candidates"""
df_z1_cut = df.Filter("Z_mass[0] > 40 && Z_mass[0] < 120", "Mass of first Z candidate in [40, 120]")
df_z2_cut = df_z1_cut.Filter("Z_mass[1] > 12 && Z_mass[1] < 120", "Mass of second Z candidate in [12, 120]")
return df_z2_cut
def reco_higgs_to_4el(df):
"""Reconstruct Higgs from four electrons"""
# Filter interesting events
df_base = selection_4el(df)
# Reconstruct Z systems
df_z_idx = df_base.Define("Z_idx",
"reco_zz_to_4l(Electron_pt, Electron_eta, Electron_phi, Electron_mass, Electron_charge)")
# Cut on distance between Electrons building Z systems
df_z_dr = df_z_idx.Filter("filter_z_dr(Z_idx, Electron_eta, Electron_phi)",
"Delta R separation of Electrons building Z system")
# Compute masses of Z systems
df_z_mass = df_z_dr.Define("Z_mass",
"compute_z_masses_4l(Z_idx, Electron_pt, Electron_eta, Electron_phi, Electron_mass)")
# Cut on mass of Z candidates
df_z_cut = filter_z_candidates(df_z_mass)
# Reconstruct H mass
df_h_mass = df_z_cut.Define("H_mass",
"compute_higgs_mass_4l(Z_idx, Electron_pt, Electron_eta, Electron_phi, Electron_mass)")
return df_h_mass
def selection_4el(df):
"""Select interesting events with four electrons"""
df_ge4el = df.Filter("nElectron>=4", "At least our electrons")
df_iso = df_ge4el.Filter("All(abs(Electron_pfRelIso03_all)<0.40)", "Require good isolation")
df_kin = df_iso.Filter("All(Electron_pt>7) && All(abs(Electron_eta)<2.5)", "Good Electron kinematics")
df_ip3d = df_kin.Define("Electron_ip3d", "sqrt(Electron_dxy*Electron_dxy + Electron_dz*Electron_dz)")
df_sip3d = df_ip3d.Define("Electron_sip3d",
"Electron_ip3d/sqrt(Electron_dxyErr*Electron_dxyErr + Electron_dzErr*Electron_dzErr)")
df_pv = df_sip3d.Filter("All(Electron_sip3d<4) && All(abs(Electron_dxy)<0.5) && All(abs(Electron_dz)<1.0)",
"Track close to primary vertex with small uncertainty")
df_2p2n = df_pv.Filter("nElectron==4 && Sum(Electron_charge==1)==2 && Sum(Electron_charge==-1)==2",
"Two positive and two negative electrons")
return df_2p2n
def plot(sig, bkg, data, x_label, filename):
"""
Plot invariant mass for signal and background processes from simulated
events overlay the measured data.
"""<|fim▁hole|> ROOT.gStyle.SetTextFont(42)
d = ROOT.TCanvas("d", "", 800, 700)
# Make sure the canvas stays in the list of canvases after the macro execution
ROOT.SetOwnership(d, False)
d.SetLeftMargin(0.15)
# Get signal and background histograms and stack them to show Higgs signal
# on top of the background process
h_bkg = bkg
h_cmb = sig.Clone()
h_cmb.Add(h_bkg)
h_cmb.SetTitle("")
h_cmb.GetXaxis().SetTitle(x_label)
h_cmb.GetXaxis().SetTitleSize(0.04)
h_cmb.GetYaxis().SetTitle("N_{Events}")
h_cmb.GetYaxis().SetTitleSize(0.04)
h_cmb.SetLineColor(ROOT.kRed)
h_cmb.SetLineWidth(2)
h_cmb.SetMaximum(18)
h_bkg.SetLineWidth(2)
h_bkg.SetFillStyle(1001)
h_bkg.SetLineColor(ROOT.kBlack)
h_bkg.SetFillColor(ROOT.kAzure - 9)
# Get histogram of data points
h_data = data
h_data.SetLineWidth(1)
h_data.SetMarkerStyle(20)
h_data.SetMarkerSize(1.0)
h_data.SetMarkerColor(ROOT.kBlack)
h_data.SetLineColor(ROOT.kBlack)
# Draw histograms
h_cmb.DrawCopy("HIST")
h_bkg.DrawCopy("HIST SAME")
h_data.DrawCopy("PE1 SAME")
# Add legend
legend = ROOT.TLegend(0.62, 0.70, 0.82, 0.88)
legend.SetFillColor(0)
legend.SetBorderSize(0)
legend.SetTextSize(0.03)
legend.AddEntry(h_data, "Data", "PE1")
legend.AddEntry(h_bkg, "ZZ", "f")
legend.AddEntry(h_cmb, "m_{H} = 125 GeV", "f")
legend.Draw()
# Add header
cms_label = ROOT.TLatex()
cms_label.SetTextSize(0.04)
cms_label.DrawLatexNDC(0.16, 0.92, "#bf{CMS Open Data}")
header = ROOT.TLatex()
header.SetTextSize(0.03)
header.DrawLatexNDC(0.63, 0.92, "#sqrt{s} = 8 TeV, L_{int} = 11.6 fb^{-1}")
# Save plot
d.SaveAs(filename)
def df103_NanoAODHiggsAnalysis(run_fast = True):
# In fast mode, take samples from */cms_opendata_2012_nanoaod_skimmed/*, which has
# the preselections from the selection_* functions already applied.
path = "root://eospublic.cern.ch//eos/root-eos/cms_opendata_2012_nanoaod/"
if run_fast: path = "root://eospublic.cern.ch//eos/root-eos/cms_opendata_2012_nanoaod_skimmed/"
# Create dataframes for signal, background and data samples
# Signal: Higgs -> 4 leptons
df_sig_4l = ROOT.RDataFrame("Events", path + "SMHiggsToZZTo4L.root")
# Background: ZZ -> 4 leptons
# Note that additional background processes from the original paper
# with minor contribution were left out for this
# tutorial.
df_bkg_4mu = ROOT.RDataFrame("Events", path + "ZZTo4mu.root")
df_bkg_4el = ROOT.RDataFrame("Events", path + "ZZTo4e.root")
df_bkg_2el2mu = ROOT.RDataFrame("Events", path + "ZZTo2e2mu.root")
# CMS data taken in 2012 (11.6 fb^-1 integrated luminosity)
df_data_doublemu = ROOT.RDataFrame("Events", (path + f for f in ["Run2012B_DoubleMuParked.root", "Run2012C_DoubleMuParked.root"]))
df_data_doubleel = ROOT.RDataFrame("Events", (path + f for f in ["Run2012B_DoubleElectron.root", "Run2012C_DoubleElectron.root"]))
# Number of bins for all histograms
nbins = 36
# Weights
luminosity = 11580.0 # Integrated luminosity of the data samples
xsec_ZZTo4mu = 0.077 # ZZ->4mu: Standard Model cross-section
nevt_ZZTo4mu = 1499064.0 # ZZ->4mu: Number of simulated events
xsec_ZZTo4el = 0.077 # ZZ->4el: Standard Model cross-section
nevt_ZZTo4el = 1499093.0 # ZZ->4el: Number of simulated events
xsec_ZZTo2el2mu = 0.18 # ZZ->2el2mu: Standard Model cross-section
nevt_ZZTo2el2mu = 1497445.0 # ZZ->2el2mu: Number of simulated events
xsec_SMHiggsToZZTo4L = 0.0065 # H->4l: Standard Model cross-section
nevt_SMHiggsToZZTo4L = 299973.0 # H->4l: Number of simulated events
scale_ZZTo4l = 1.386 # ZZ->4l: Scale factor for ZZ to four leptons
weight_sig_4mu = luminosity * xsec_SMHiggsToZZTo4L / nevt_SMHiggsToZZTo4L
weight_bkg_4mu = luminosity * xsec_ZZTo4mu * scale_ZZTo4l / nevt_ZZTo4mu
weight_sig_4el = luminosity * xsec_SMHiggsToZZTo4L / nevt_SMHiggsToZZTo4L
weight_bkg_4el = luminosity * xsec_ZZTo4el * scale_ZZTo4l / nevt_ZZTo4el
weight_sig_2el2mu = luminosity * xsec_SMHiggsToZZTo4L / nevt_SMHiggsToZZTo4L
weight_bkg_2el2mu = luminosity * xsec_ZZTo2el2mu * scale_ZZTo4l / nevt_ZZTo2el2mu
# Reconstruct Higgs to 4 muons
df_sig_4mu_reco = reco_higgs_to_4mu(df_sig_4l)
df_h_sig_4mu = df_sig_4mu_reco.Define("weight", "{}".format(weight_sig_4mu))\
.Histo1D(("h_sig_4mu", "", nbins, 70, 180), "H_mass", "weight")
df_bkg_4mu_reco = reco_higgs_to_4mu(df_bkg_4mu)
df_h_bkg_4mu = df_bkg_4mu_reco.Define("weight", "{}".format(weight_bkg_4mu))\
.Histo1D(("h_bkg_4mu", "", nbins, 70, 180), "H_mass", "weight")
df_data_4mu_reco = reco_higgs_to_4mu(df_data_doublemu)
df_h_data_4mu = df_data_4mu_reco.Define("weight", "1.0")\
.Histo1D(("h_data_4mu", "", nbins, 70, 180), "H_mass", "weight")
# Reconstruct Higgs to 4 electrons
df_sig_4el_reco = reco_higgs_to_4el(df_sig_4l)
df_h_sig_4el = df_sig_4el_reco.Define("weight", "{}".format(weight_sig_4el))\
.Histo1D(("h_sig_4el", "", nbins, 70, 180), "H_mass", "weight")
df_bkg_4el_reco = reco_higgs_to_4el(df_bkg_4el)
df_h_bkg_4el = df_bkg_4el_reco.Define("weight", "{}".format(weight_bkg_4el))\
.Histo1D(("h_bkg_4el", "", nbins, 70, 180), "H_mass", "weight")
df_data_4el_reco = reco_higgs_to_4el(df_data_doubleel)
df_h_data_4el = df_data_4el_reco.Define("weight", "1.0")\
.Histo1D(("h_data_4el", "", nbins, 70, 180), "H_mass", "weight")
# Reconstruct Higgs to 2 electrons and 2 muons
df_sig_2el2mu_reco = reco_higgs_to_2el2mu(df_sig_4l)
df_h_sig_2el2mu = df_sig_2el2mu_reco.Define("weight", "{}".format(weight_sig_2el2mu))\
.Histo1D(("h_sig_2el2mu", "", nbins, 70, 180), "H_mass", "weight")
df_bkg_2el2mu_reco = reco_higgs_to_2el2mu(df_bkg_2el2mu)
df_h_bkg_2el2mu = df_bkg_2el2mu_reco.Define("weight", "{}".format(weight_bkg_2el2mu))\
.Histo1D(("h_bkg_2el2mu", "", nbins, 70, 180), "H_mass", "weight")
df_data_2el2mu_reco = reco_higgs_to_2el2mu(df_data_doublemu)
df_h_data_2el2mu = df_data_2el2mu_reco.Define("weight", "1.0")\
.Histo1D(("h_data_2el2mu_doublemu", "", nbins, 70, 180), "H_mass", "weight")
# Trigger event loops and retrieve histograms
signal_4mu = df_h_sig_4mu.GetValue()
background_4mu = df_h_bkg_4mu.GetValue()
data_4mu = df_h_data_4mu.GetValue()
signal_4el = df_h_sig_4el.GetValue()
background_4el = df_h_bkg_4el.GetValue()
data_4el = df_h_data_4el.GetValue()
signal_2el2mu = df_h_sig_2el2mu.GetValue()
background_2el2mu = df_h_bkg_2el2mu.GetValue()
data_2el2mu = df_h_data_2el2mu.GetValue()
# Make plots
plot(signal_4mu, background_4mu, data_4mu, "m_{4#mu} (GeV)", "higgs_4mu.pdf")
plot(signal_4el, background_4el, data_4el, "m_{4e} (GeV)", "higgs_4el.pdf")
plot(signal_2el2mu, background_2el2mu, data_2el2mu, "m_{2e2#mu} (GeV)", "higgs_2el2mu.pdf")
# Combined plots
# If this was done before plotting the others, calling the `Add` function
# on the `signal_4mu` histogram would modify the underlying `TH1D` object.
# Thus, the histogram with the 4 muons reconstruction would be lost,
# instead resulting in the same plot as the aggregated histograms.
h_sig_4l = signal_4mu
h_sig_4l.Add(signal_4el)
h_sig_4l.Add(signal_2el2mu)
h_bkg_4l = background_4mu
h_bkg_4l.Add(background_4el)
h_bkg_4l.Add(background_2el2mu)
h_data_4l = data_4mu
h_data_4l.Add(data_4el)
h_data_4l.Add(data_2el2mu)
# Plot aggregated histograms
plot(h_sig_4l, h_bkg_4l, h_data_4l, "m_{4l} (GeV)", "higgs_4l.pdf")
if __name__ == "__main__":
run_fast = True
df103_NanoAODHiggsAnalysis(run_fast)<|fim▁end|>
|
# Canvas and general style options
ROOT.gStyle.SetOptStat(0)
|
<|file_name|>new.js<|end_file_name|><|fim▁begin|><|fim▁hole|>export { default } from 'ember-flexberry-designer/controllers/fd-interface-edit-form/new';<|fim▁end|>
| |
<|file_name|>MysqlDialect.java<|end_file_name|><|fim▁begin|>package com.xinfan.msgbox.service.dao.dialect;
public class MysqlDialect extends Dialect {
@Override
public boolean supportsLimit() {
return true;
}
@Override
public boolean supportsLimitOffset() {
return true;
}
@Override
public String getLimitString(String sql, int offset, String offsetPlaceholder, int limit, String limitPlaceholder) {
sql = sql.trim();
boolean isForUpdate = false;
if (sql.toLowerCase().endsWith(" for update")) {
sql = sql.substring(0, sql.length() - 11);
isForUpdate = true;
}
StringBuffer pagingSelect = new StringBuffer(sql.length() + 100);
pagingSelect.append("select * from ( ");
pagingSelect.append(sql);
int endInt = Integer.parseInt(offsetPlaceholder) + + + Integer.parseInt(limitPlaceholder);
pagingSelect.append(" ) _t limit " + offset + "," + endInt);
if (isForUpdate) {
pagingSelect.append(" for update");
}
<|fim▁hole|> public String getCountSql(String sql)
{
sql = sql.trim();
if (sql.toLowerCase().endsWith(" for update")) {
sql = sql.substring(0, sql.length() - 11);
}
StringBuffer countSelect = new StringBuffer(sql.length() + 100);
countSelect.append("select count(*) from ( ");
countSelect.append(sql);
countSelect.append(" ) _t ");
return countSelect.toString();
}
}<|fim▁end|>
|
return pagingSelect.toString();
}
|
<|file_name|>issue-2995.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your<|fim▁hole|>fn bad (p: *const isize) {
let _q: &isize = p as &isize; //~ ERROR non-scalar cast
}
fn main() { }<|fim▁end|>
|
// option. This file may not be copied, modified, or distributed
// except according to those terms.
|
<|file_name|>generation.py<|end_file_name|><|fim▁begin|># Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""VexFlow labeled data generation.
Wraps the node.js generator, which generates a random measure of music as SVG,
and the ground truth glyphs present in the image as a `Page` message.
Each invocation generates a batch of images. There is a tradeoff between the
startup time of node.js for each invocation, and keeping the output size small
enough to pipe into Python.
The final outputs are positive and negative example patches. Positive examples
are centered on an outputted glyph, and have that glyph's type. Negative
examples are at least a few pixels away from any glyph, and have type NONE.
Since negative examples could be a few pixels away from a glyph, we get negative
examples that overlap with partial glyph(s), but are centered too far away from
a glyph to be considered a positive example. Currently, every single glyph
results in a single positive example, and negative examples are randomly
sampled.
All glyphs are emitted to RecordIO, where they are outputted in a single
collection for training. We currently do not store the entire generated image
anywhere. This could be added later in order to try other classification
approaches.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os.path
import random
import subprocess
import sys
import apache_beam as beam
from apache_beam.metrics import Metrics
import numpy as np
import tensorflow as tf
from google.protobuf import text_format
from moonlight import engine
from moonlight.protobuf import musicscore_pb2
from moonlight.staves import staffline_distance
from moonlight.staves import staffline_extractor
# Every image is expected to contain at least 3 glyphs.
POSITIVE_EXAMPLES_PER_IMAGE = 3
def _normalize_path(filename):
"""Normalizes a relative path to a command to spawn.
Args:
filename: String; relative or absolute path.
Returns:
The normalized path. This is necessary because in our use case,
vexflow_generator_pipeline will live in a different directory from
vexflow_generator, and there are symlinks to both directories in the same
parent directory. Without normalization, `..` would reference the parent of
the actual directory that was symlinked. With normalization, it references
the directory that contains the symlink to the working directory.
"""
if filename.startswith('/'):
return filename
else:
return os.path.normpath(
os.path.join(os.path.dirname(sys.argv[0]), filename))
class PageGenerationDoFn(beam.DoFn):
"""Generates the PNG images and ground truth for each batch.
Takes in a batch number, and outputs a tuple of PNG contents (bytes) and the
labeled staff (Staff message).
"""
def __init__(self, num_pages_per_batch, vexflow_generator_command,
svg_to_png_command):
self.num_pages_per_batch = num_pages_per_batch
self.vexflow_generator_command = vexflow_generator_command
self.svg_to_png_command = svg_to_png_command
def process(self, batch_num):
for page in self.get_pages_for_batch(batch_num, self.num_pages_per_batch):
staff = musicscore_pb2.Staff()
text_format.Parse(page['page'], staff)
# TODO(ringw): Fix the internal proto pickling issue so that we don't
# have to serialize the staff here.
yield self._svg_to_png(page['svg']), staff.SerializeToString()
def get_pages_for_batch(self, batch_num, num_pages_per_batch):
"""Generates the music score pages in a single batch.
The generator takes in a seed for the RNG for each page, and outputs all
pages at once. The seeds for all batches are consecutive for determinism,
starting from 0, but each seed to the Mersenne Twister RNG should result in
completely different output.
Args:
batch_num: The index of the batch to output.
num_pages_per_batch: The number of pages to generate in each batch.
Returns:
A list of dicts holding `svg` (XML text) and `page` (text-format
`tensorflow.moonlight.Staff` proto).
"""
return self.get_pages(
range(batch_num * num_pages_per_batch,
(batch_num + 1) * num_pages_per_batch))
def get_pages(self, seeds):
vexflow_generator_command = list(self.vexflow_generator_command)
# If vexflow_generator_command is relative, it is relative to the pipeline
# binary.
vexflow_generator_command[0] = _normalize_path(vexflow_generator_command[0])
seeds = ','.join(map(str, seeds))
return json.loads(
subprocess.check_output(vexflow_generator_command +
['--random_seeds=' + seeds]))
def _svg_to_png(self, svg):
svg_to_png_command = list(self.svg_to_png_command)
svg_to_png_command[0] = _normalize_path(svg_to_png_command[0])
popen = subprocess.Popen(<|fim▁hole|> svg_to_png_command,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = popen.communicate(input=svg)
if popen.returncode != 0:
raise ValueError('convert failed with status %d\nstderr:\n%s' %
(popen.returncode, stderr))
return stdout
class PatchExampleDoFn(beam.DoFn):
"""Extracts labeled patches from generated VexFlow music scores."""
def __init__(self,
negative_example_distance,
patch_width,
negative_to_positive_example_ratio,
noise_fn=lambda x: x):
self.negative_example_distance = negative_example_distance
self.patch_width = patch_width
self.negative_to_positive_example_ratio = negative_to_positive_example_ratio
self.noise_fn = noise_fn
self.patch_counter = Metrics.counter(self.__class__, 'num_patches')
def start_bundle(self):
# TODO(ringw): Expose a cleaner way to set this value.
# The image is too small for the default min staffline distance score.
# pylint: disable=protected-access
staffline_distance._MIN_STAFFLINE_DISTANCE_SCORE = 100
self.omr = engine.OMREngine()
def process(self, item):
png_contents, staff_message = item
staff_message = musicscore_pb2.Staff.FromString(staff_message)
with tf.Session(graph=self.omr.graph) as sess:
# Load the image, then feed it in to apply noise.
# Randomly rotate the image and apply noise, then dump it back out as a
# PNG.
# TODO(ringw): Expose a way to pass in the image contents to the main
# OMR TF graph.
img = tf.to_float(tf.image.decode_png(png_contents))
# Collapse the RGB channels, if any. No-op for a monochrome PNG.
img = tf.reduce_mean(img[:, :, :3], axis=2)[:, :, None]
# Fix the stafflines being #999.
img = tf.clip_by_value(img * 2. - 255., 0., 255.)
img = self.noise_fn(img)
# Get a 2D uint8 image array for OMR.
noisy_image = sess.run(
tf.cast(tf.clip_by_value(img, 0, 255)[:, :, 0], tf.uint8))
# Run OMR staffline extraction and staffline distance estimation. The
# stafflines are used to get patches from the generated image.
stafflines, image_staffline_distance = sess.run(
[
self.omr.glyph_classifier.staffline_extractor.extract_staves(),
self.omr.structure.staff_detector.staffline_distance[0]
],
feed_dict={self.omr.image: noisy_image})
if stafflines.shape[0] != 1:
raise ValueError('Image should have one detected staff, got shape: ' +
str(stafflines.shape))
positive_example_count = 0
negative_example_whitelist = np.ones(
(stafflines.shape[staffline_extractor.Axes.POSITION],
stafflines.shape[staffline_extractor.Axes.X]), np.bool)
# Blacklist xs where the patch would overlap with either end.
negative_example_overlap_from_end = max(self.negative_example_distance,
self.patch_width // 2)
negative_example_whitelist[:, :negative_example_overlap_from_end] = False
negative_example_whitelist[:,
-negative_example_overlap_from_end - 1:] = False
all_positive_examples = []
for glyph in staff_message.glyph:
staffline = staffline_extractor.get_staffline(glyph.y_position,
stafflines[0])
glyph_x = int(
round(glyph.x *
self.omr.glyph_classifier.staffline_extractor.target_height /
(image_staffline_distance * self.omr.glyph_classifier
.staffline_extractor.staffline_distance_multiple)))
example = self._create_example(staffline, glyph_x, glyph.type)
if example:
staffline_index = staffline_extractor.y_position_to_index(
glyph.y_position,
stafflines.shape[staffline_extractor.Axes.POSITION])
# Blacklist the area adjacent to the glyph, even if it is not selected
# as a positive example below.
negative_example_whitelist[staffline_index, glyph_x -
self.negative_example_distance + 1:glyph_x +
self.negative_example_distance] = False
all_positive_examples.append(example)
positive_example_count += 1
for example in random.sample(all_positive_examples,
POSITIVE_EXAMPLES_PER_IMAGE):
yield example
self.patch_counter.inc()
negative_example_staffline, negative_example_x = np.where(
negative_example_whitelist)
negative_example_inds = np.random.choice(
len(negative_example_staffline),
int(positive_example_count * self.negative_to_positive_example_ratio))
negative_example_staffline = negative_example_staffline[
negative_example_inds]
negative_example_x = negative_example_x[negative_example_inds]
for staffline, x in zip(negative_example_staffline, negative_example_x):
example = self._create_example(stafflines[0, staffline], x,
musicscore_pb2.Glyph.NONE)
assert example, 'Negative example xs should always be in range'
yield example
self.patch_counter.inc()
def _create_example(self, staffline, x, label):
start_x = x - self.patch_width // 2
limit_x = x + self.patch_width // 2 + 1
assert limit_x - start_x == self.patch_width
# x is the last axis of staffline
if 0 <= start_x <= limit_x < staffline.shape[-1]:
patch = staffline[:, start_x:limit_x]
example = tf.train.Example()
example.features.feature['patch'].float_list.value.extend(patch.ravel())
example.features.feature['label'].int64_list.value.append(label)
example.features.feature['height'].int64_list.value.append(patch.shape[0])
example.features.feature['width'].int64_list.value.append(patch.shape[1])
return example
else:
return None<|fim▁end|>
| |
<|file_name|>webpack.config.dev.js<|end_file_name|><|fim▁begin|>var webpack = require('webpack');
var dotenv = require('dotenv').load();
module.exports = exports = require('./webpack.config');
exports.plugins = [
new webpack.DefinePlugin({
__API_URL__: JSON.stringify(process.env.LANDLINE_API_URL),
__PROD__: false,
__S3_BUCKET__: JSON.stringify('landline-dev')
}),
new webpack.NoErrorsPlugin()
];
exports.output = Object.create(exports.output);
exports.output.filename = exports.output.filename.replace(/\.js$/, ".dev.js");<|fim▁hole|><|fim▁end|>
|
exports.devtool = 'source-map';
|
<|file_name|>messaging-input-field.component.spec.ts<|end_file_name|><|fim▁begin|>import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { MessagingInputFieldComponent } from './messaging-input-field.component';
describe('MessagingInputFieldComponent', () => {
let component: MessagingInputFieldComponent;
let fixture: ComponentFixture<MessagingInputFieldComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ MessagingInputFieldComponent ]
})
.compileComponents();
}));
beforeEach(() => {<|fim▁hole|>
it('should create', () => {
expect(component).toBeTruthy();
});
});<|fim▁end|>
|
fixture = TestBed.createComponent(MessagingInputFieldComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
|
<|file_name|>userAction.js<|end_file_name|><|fim▁begin|>'use strict';
var auth = require('../middleware/auth');
var express = require('express');
var router = express.Router();
var achievements = require('../common/achievements');
var User = require('../models').users;
var Action = require('../models').actions;
var Log = require('../models').logs;
/**
* @api {get} /user/action Get user's action list
* @apiGroup User Action
*
* @apiExample {curl} Example usage:
* # Get API token via /api/user/token
* export API_TOKEN=fc35e6b2f27e0f5ef...
*
* curl -i -X GET -H "Authorization: Bearer $API_TOKEN" \
* http://localhost:3000/api/user/action
*
* @apiSuccessExample {json} Success-Response:
* {
* "pending": {},
* "inProgress": {
* "55b230d69a8c96f177154fa1": {
* "_id": "55b230d69a8c96f177154fa1",
* "name": "Disable standby",
* "description": "Turn off and unplug standby power of TV, stereo, computer, etc.",
* "effort": 2,
* "impact": 2,
* "category": null,
* "startedDate": "2015-08-11T10:31:39.934Z"
* },
* "55b230d69a8c96f177154fa2": {
* "startedDate": "2015-08-11T10:43:33.485Z",
* "impact": 3,
* "effort": 4,
* "description": "Find and seal up leaks",
* "name": "Leaks",
* "_id": "55b230d69a8c96f177154fa2"
* }
* },
* "done": {},
* "declined": {},
* "na": {}
* }
*/
router.get('/', auth.authenticate(), function(req, res) {
res.json(req.user.actions);
Log.create({
userId: req.user._id,
category: 'User Action',
type: 'get',
data: {}
});
});
/**
* @api {get} /user/action/suggested Get list of suggested user actions
* @apiGroup User Action
* @apiDescription Returns top three most recent actions that the user has not tried
*
* @apiExample {curl} Example usage:
* # Get API token via /api/user/token
* export API_TOKEN=fc35e6b2f27e0f5ef...
*
* curl -i -X GET -H "Authorization: Bearer $API_TOKEN" \
* http://localhost:3000/api/user/action/suggested
*
* @apiSuccessExample {json} Success-Response:
* [
* {
* "__v": 0,
* "_id": "555f0163688305b57c7cef6c",
* "description": "Disabling standby can save up to 10% in total electricity costs.",
* "effort": 2,
* "impact": 2,
* "name": "Disable standby on your devices",
* "ratings": []
* },
* {
* ...
* }
* ]
*
* @apiVersion 1.0.0
*/
router.get('/suggested', auth.authenticate(), function(req, res) {
Action.getSuggested(req.user, res.successRes);
Log.create({
userId: req.user._id,
category: 'User Action',
type: 'getSuggested',
data: res.successRes
});
});
<|fim▁hole|> * @apiGroup User Action
* @apiDescription Used to start/stop actions for a user.
*
* @apiParam {String} actionId Action's MongoId
* @apiParam {String} state Can be one of: 'pending', 'inProgress', 'alreadyDoing',
* 'done', 'canceled', 'declined', 'na'.
* @apiParam {Date} postponed Must be provided if state is 'pending'. Specifies
* at which time the user will be reminded of the action again.
*
* @apiExample {curl} Example usage:
* # Get API token via /api/user/token
* export API_TOKEN=fc35e6b2f27e0f5ef...
*
* curl -i -X POST -H "Authorization: Bearer $API_TOKEN" -H "Content-Type: application/json" -d \
* '{
* "state": "inProgress"
* }' \
* http://localhost:3000/api/user/action/55b230d69a8c96f177154fa1
*
* @apiSuccessExample {json} Success-Response:
* {
* "pending": {},
* "inProgress": {
* "55b230d69a8c96f177154fa1": {
* "_id": "55b230d69a8c96f177154fa1",
* "name": "Disable standby",
* "description": "Turn off and unplug standby power of TV, stereo, computer, etc.",
* "effort": 2,
* "impact": 2,
* "category": null,
* "startedDate": "2015-08-11T10:31:39.934Z"
* },
* "55b230d69a8c96f177154fa2": {
* "startedDate": "2015-08-11T10:43:33.485Z",
* "impact": 3,
* "effort": 4,
* "description": "Find and seal up leaks",
* "name": "Leaks",
* "_id": "55b230d69a8c96f177154fa2"
* }
* },
* "done": {},
* "declined": {},
* "na": {}
* }
*/
router.post('/:actionId', auth.authenticate(), function(req, res) {
User.setActionState(req.user, req.params.actionId, req.body.state, req.body.postponed,
function(err, user) {
if (!err) {
achievements.updateAchievement(req.user, 'actionsDone', function(oldVal) {
// make sure we never decerase the action count
return Math.max(oldVal, user.actions ? user.actions.done.length : 0);
});
}
res.successRes(err, user);
});
Log.create({
userId: req.user._id,
category: 'User Action',
type: 'update',
data: req.body
});
});
module.exports = router;<|fim▁end|>
|
/**
* @api {post} /user/action/:actionId Change state for user action
|
<|file_name|>browser_CaptivePortalWatcher_1.js<|end_file_name|><|fim▁begin|>"use strict";
add_task(setupPrefsAndRecentWindowBehavior);
let testcases = [
/**
* A portal is detected when there's no browser window,
* then a browser window is opened, and the portal is logged into
* and redirects to a different page. The portal tab should be added
* and focused when the window is opened, and left open after login
* since it redirected.
*/
function* test_detectedWithNoBrowserWindow_Redirect() {<|fim▁hole|> yield portalDetected();
let win = yield focusWindowAndWaitForPortalUI();
let browser = win.gBrowser.selectedTab.linkedBrowser;
let loadPromise =
BrowserTestUtils.browserLoaded(browser, false, CANONICAL_URL_REDIRECTED);
BrowserTestUtils.loadURI(browser, CANONICAL_URL_REDIRECTED);
yield loadPromise;
yield freePortal(true);
ensurePortalTab(win);
ensureNoPortalNotification(win);
yield closeWindowAndWaitForXulWindowVisible(win);
},
/**
* Test the various expected behaviors of the "Show Login Page" button
* in the captive portal notification. The button should be visible for
* all tabs except the captive portal tab, and when clicked, should
* ensure a captive portal tab is open and select it.
*/
function* test_showLoginPageButton() {
let win = yield openWindowAndWaitForFocus();
yield portalDetected();
let notification = ensurePortalNotification(win);
testShowLoginPageButtonVisibility(notification, "visible");
function testPortalTabSelectedAndButtonNotVisible() {
is(win.gBrowser.selectedTab, tab, "The captive portal tab should be selected.");
testShowLoginPageButtonVisibility(notification, "hidden");
}
let button = notification.querySelector("button.notification-button");
function* clickButtonAndExpectNewPortalTab() {
let p = BrowserTestUtils.waitForNewTab(win.gBrowser, CANONICAL_URL);
button.click();
let tab = yield p;
is(win.gBrowser.selectedTab, tab, "The captive portal tab should be selected.");
return tab;
}
// Simulate clicking the button. The portal tab should be opened and
// selected and the button should hide.
let tab = yield clickButtonAndExpectNewPortalTab();
testPortalTabSelectedAndButtonNotVisible();
// Close the tab. The button should become visible.
yield BrowserTestUtils.removeTab(tab);
ensureNoPortalTab(win);
testShowLoginPageButtonVisibility(notification, "visible");
// When the button is clicked, a new portal tab should be opened and
// selected.
tab = yield clickButtonAndExpectNewPortalTab();
// Open another arbitrary tab. The button should become visible. When it's clicked,
// the portal tab should be selected.
let anotherTab = yield BrowserTestUtils.openNewForegroundTab(win.gBrowser);
testShowLoginPageButtonVisibility(notification, "visible");
button.click();
is(win.gBrowser.selectedTab, tab, "The captive portal tab should be selected.");
// Close the portal tab and select the arbitrary tab. The button should become
// visible and when it's clicked, a new portal tab should be opened.
yield BrowserTestUtils.removeTab(tab);
win.gBrowser.selectedTab = anotherTab;
testShowLoginPageButtonVisibility(notification, "visible");
tab = yield clickButtonAndExpectNewPortalTab();
yield BrowserTestUtils.removeTab(anotherTab);
yield freePortal(true);
ensureNoPortalTab(win);
ensureNoPortalNotification(win);
yield closeWindowAndWaitForXulWindowVisible(win);
},
];
for (let testcase of testcases) {
add_task(testcase);
}<|fim▁end|>
| |
<|file_name|>dashboard.js<|end_file_name|><|fim▁begin|>/**
Template Controllers
@module Templates
*/
/**
The dashboard template
@class [template] views_dashboard
@constructor
*/
Template['views_dashboard'].helpers({
/**
Get all current wallets
@method (wallets)
*/
'wallets': function(disabled){
var wallets = Wallets.find({disabled: disabled}, {sort: {creationBlock: 1}}).fetch();
// sort wallets by balance
wallets.sort(Helpers.sortByBalance);
return wallets;
},
/**
Get all current accounts
@method (accounts)
*/
'accounts': function(){
// balance need to be present, to show only full inserted accounts (not ones added by mist.requestAccount)
var accounts = EthAccounts.find({name: {$exists: true}}, {sort: {name: 1}}).fetch();
accounts.sort(Helpers.sortByBalance);
return accounts;
},
/**
Are there any accounts?
@method (hasAccounts)
*/
'hasAccounts' : function() {
return (EthAccounts.find().count() > 0);
},
/**
Are there any accounts?
@method (hasAccounts)
*/
'hasMinimumBalance' : function() {
var enoughBalance = false;
_.each(_.pluck(EthAccounts.find({}).fetch(), 'balance'), function(bal){
if(new BigNumber(bal, '10').gt(1000000000000000000)) enoughBalance = true;
});
return enoughBalance;
},
/**
Get all transactions
@method (allTransactions)
*/
'allTransactions': function(){
return Transactions.find({}, {sort: {timestamp: -1}}).count();
},
/**
Returns an array of pending confirmations, from all accounts
@method (pendingConfirmations)
@return {Array}
*/
'pendingConfirmations': function(){
return _.pluck(PendingConfirmations.find({operation: {$exists: true}, confirmedOwners: {$ne: []}}).fetch(), '_id');
}
});
Template['views_dashboard'].events({
/**
Request to create an account in mist
@event click .create.account
*/
'click .create.account': function(e){<|fim▁hole|> mist.requestAccount(function(e, account) {
if(!e) {
account = account.toLowerCase();
EthAccounts.upsert({address: account}, {$set: {
address: account,
new: true
}});
}
});
}
});<|fim▁end|>
|
e.preventDefault();
|
<|file_name|>sk.js<|end_file_name|><|fim▁begin|>tinyMCE.addI18n('sk.simple',{
<|fim▁hole|>bullist_desc:"Zoznam s odr\u00E1\u017Ekami",
numlist_desc:"\u010C\u00EDslovan\u00FD zoznam",
undo_desc:"Sp\u00E4\u0165 (Ctrl+Z)",
redo_desc:"Znovu (Ctrl+Y)",
cleanup_desc:"Vy\u010Disti\u0165 neupraven\u00FD k\u00F3d"
});<|fim▁end|>
|
bold_desc:"Tu\u010Dn\u00FD text (Ctrl+B)",
italic_desc:"\u0160ikm\u00FD text (kurz\u00EDva) (Ctrl+I)",
underline_desc:"Pod\u010Diarknut\u00FD text (Ctrl+U)",
striketrough_desc:"Pre\u0161krtnut\u00FD text",
|
<|file_name|>partner_sequence_unify.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2013 jmesteve All Rights Reserved
# https://github.com/jmesteve
# <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, fields
from openerp.tools.translate import _<|fim▁hole|>
class res_partner(orm.Model):
_inherit = 'res.partner'
_columns = {
'ref': fields.char('Reference', size=64, select=True, required=True),
}
_sql_constraints = [
('uniq_reference', 'unique(ref)', "The reference must be unique"),
]
_defaults = {
'ref': '/',
}
# new register
def create(self, cr, uid, vals, context=None):
if not 'ref' in vals or vals['ref'] == '/':
vals['ref'] = self.pool.get('ir.sequence').get(cr, uid, 'res.partner')
return super(res_partner, self).create(cr, uid, vals, context)
# edit register
def write(self, cr, uid, ids, vals, context=None):
if not hasattr(ids, '__iter__'):
ids = [ids]
partners_without_code = self.search(cr, uid, [
('ref', 'in', [False, '/']), ('id', 'in', ids)], context=context)
direct_write_ids = set(ids) - set(partners_without_code)
super(res_partner, self).write(
cr, uid, list(direct_write_ids), vals, context)
for partner_id in partners_without_code:
vals['ref'] = self.pool.get('ir.sequence').get(cr, uid, 'res.partner')
super(res_partner, self).write(cr, uid, partner_id, vals, context)
return True
def copy(self, cr, uid, id, default={}, context=None):
product = self.read(cr, uid, id, ['ref'], context=context)
if product['ref']:
default.update({
'ref': product['ref'] + _('-copy'),
})
return super(res_partner, self).copy(cr, uid, id, default, context)<|fim▁end|>
| |
<|file_name|>fs.rs<|end_file_name|><|fim▁begin|>/*
Rust - Std Misc Filesystem Operation
Licence : GNU GPL v3 or later
Author : Aurélien DESBRIÈRES
Mail : aurelien(at)hackers(dot)camp
Created on : June 2017
Write with Emacs-nox ───────────────┐
Rust ───────────────────────────────┘
*/
// fs.rs
use std::fs;
use std::fs::{File, OpenOptions};
use std::io;
use std::io::prelude::*;
use std::os::unix;
use std::path::Path;
// A simple implementation of `% cat path`
fn cat(path: &Path) -> io::Result<String> {
let mut f = try!(File::open(path));
let mut s = String::new();
match f.read_to_string(&mut s) {
Ok(_) => Ok(s),
Err(e) => Err(e),
}
}
// A simple implementation of `% echo s > path`
fn echo(s: &str, path: &Path) -> io::Result<()> {
let mut f = try!(File::create(path));
f.write_all(s.as_bytes())
}
// A simple implementation of `% touch path` (ignores existing files)
fn touch(path: &Path) -> io::Result<()> {
match OpenOptions::new().create(true).write(true).open(path) {
Ok(_) => Ok(()),
Err(e) => Err(e),
}
}
fn main() {
println!("`mkdir a`");
// Create a directory, returns `io::Result<()>`
match fs::create_dir("a") {
Err(why) => println!("! {:?}", why.kind()),
Ok(_) => {},
}
println!("`echo hello > a/b.txt`");
// The previous match can be simplified using the `unwrap_or_else` method
echo("hello", &Path::new("a/b.txt")).unwrap_or_else(|why| {
println!("! {:?}", why.kind());
});
println!("`mkdir -p a/c/d`");
// Recursively create a directory, returns `io::Result<()>`
fs::create_dir_all("a/c/d").unwrap_or_else(|why| {
println!("! {:?}", why.kind());
});
println!("`touch a/c/e.txt`");
touch(&Path::new("a/c/e.txt")).unwrap_or_else(|why| {
println!("! {:?}", why.kind());<|fim▁hole|> });
println!("`ln -s ../b.txt a/c/b.txt`");
// Create a symbolinc link, returns `io::Result<()>`
if cfg!(target_family = "unix") {
unix::fs::symlink("../b.txt", "a/c/b.txt").unwrap_or_else(|why| {
println!("! {:?}", why.kind());
});
}
println!("`cat a/c/b.txt`");
match cat(&Path::new("a/c/b.txt")) {
Err(why) => println!("! {:?}", why.kind()),
Ok(s) => println!("> {}", s),
}
println!("`ls a`");
// Read the constants of a directory, return `io::Result<Vec<Path>>`
match fs::read_dir("a") {
Err(why) => println!("! {:?}", why.kind()),
Ok(paths) => for path in paths {
println!("> {:?}", path.unwrap().path());
},
}
println!("`rm a/c/e.txt`");
// Remove a file, returns `io::Result<()>`
fs::remove_file("a/c/e.txt").unwrap_or_else(|why| {
println!("! {:?}", why.kind());
});
println!("`rmdir a/c/d`");
// Remove an empty directory, returns `io::Result<()>`
fs::remove_dir("a/c/d").unwrap_or_else(|why| {
println!("! {:?}", why.kind());
});
}<|fim▁end|>
| |
<|file_name|>RootClosedMapDataNode.java<|end_file_name|><|fim▁begin|>/*
* Swift Parallel Scripting Language (http://swift-lang.org)
* Code from Java CoG Kit Project (see notice below) with modifications.
*
* Copyright 2005-2014 University of Chicago
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//----------------------------------------------------------------------
//This code is developed as part of the Java CoG Kit project
//The terms of the license can be found at http://www.cogkit.org/license
//This message may not be removed or altered.
//----------------------------------------------------------------------
/*
* Created on Mar 28, 2014
*/
package org.griphyn.vdl.mapping.nodes;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import k.thr.LWThread;
import org.griphyn.vdl.karajan.Pair;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.DuplicateMappingChecker;
import org.griphyn.vdl.mapping.HandleOpenException;
import org.griphyn.vdl.mapping.Mapper;
import org.griphyn.vdl.mapping.Path;
import org.griphyn.vdl.mapping.RootHandle;
import org.griphyn.vdl.mapping.file.FileGarbageCollector;
import org.griphyn.vdl.type.Field;
import org.griphyn.vdl.type.Types;
public class RootClosedMapDataNode extends AbstractClosedDataNode implements ArrayHandle, RootHandle {
private int line = -1;
private LWThread thread;
private Mapper mapper;
private Map<Comparable<?>, DSHandle> values;
public RootClosedMapDataNode(Field field, Map<?, ?> values, DuplicateMappingChecker dmChecker) {
super(field);
setValues(values);
if (getType().itemType().hasMappedComponents()) {
this.mapper = new InitMapper(dmChecker);
}
}
private void setValues(Map<?, ?> m) {
values = new HashMap<Comparable<?>, DSHandle>();
for (Map.Entry<?, ? extends Object> e : m.entrySet()) {
Comparable<?> k = (Comparable<?>) e.getKey();
Object n = e.getValue();
if (n instanceof DSHandle) {
values.put(k, (DSHandle) n);
}
else if (n instanceof String) {
values.put(k,
NodeFactory.newNode(Field.Factory.createField(k, Types.STRING), getRoot(), this, n));
}
else if (n instanceof Integer) {
values.put(k,
NodeFactory.newNode(Field.Factory.createField(k, Types.INT), getRoot(), this, n));
}
else if (n instanceof Double) {
values.put(k,
NodeFactory.newNode(Field.Factory.createField(k, Types.FLOAT), getRoot(), this, n));
}
else {
throw new RuntimeException(
"An array variable can only be initialized by a list of DSHandle or primitive values");
}
}
}
@Override
public RootHandle getRoot() {
return this;
}
@Override
public DSHandle getParent() {
return null;
}
@Override
public Path getPathFromRoot() {
return Path.EMPTY_PATH;
}
@Override
public void init(Mapper mapper) {
if (!getType().itemType().hasMappedComponents()) {
return;
}
if (mapper == null) {
initialized();
}
else {
this.getInitMapper().setMapper(mapper);
this.mapper.initialize(this);
}
}
@Override
public void mapperInitialized(Mapper mapper) {
synchronized(this) {
this.mapper = mapper;
}
initialized();
}
protected void initialized() {
if (variableTracer.isEnabled()) {
variableTracer.trace(thread, line, getName() + " INITIALIZED " + mapper);
}
}
public synchronized Mapper getMapper() {
if (mapper instanceof InitMapper) {
return ((InitMapper) mapper).getMapper();
}
else {
return mapper;
}
}
@Override
public int getLine() {
return line;
}
@Override
public void setLine(int line) {
this.line = line;
}
@Override
public void setThread(LWThread thread) {
this.thread = thread;
}
@Override
public LWThread getThread() {
return thread;
}
@Override
public String getName() {
return (String) getField().getId();
}
@Override
protected AbstractDataNode getParentNode() {
return null;
}
@Override
public Mapper getActualMapper() {
return mapper;
}
@Override
public void closeArraySizes() {
// already closed<|fim▁hole|> public Object getValue() {
return values;
}
@Override
public Map<Comparable<?>, DSHandle> getArrayValue() {
return values;
}
@Override
public boolean isArray() {
return true;
}
@Override
public Iterable<List<?>> entryList() {
final Iterator<Map.Entry<Comparable<?>, DSHandle>> i = values.entrySet().iterator();
return new Iterable<List<?>>() {
@Override
public Iterator<List<?>> iterator() {
return new Iterator<List<?>>() {
@Override
public boolean hasNext() {
return i.hasNext();
}
@Override
public List<?> next() {
Map.Entry<Comparable<?>, DSHandle> e = i.next();
return new Pair<Object>(e.getKey(), e.getValue());
}
@Override
public void remove() {
i.remove();
}
};
}
};
}
@Override
protected Object getRawValue() {
return values;
}
@Override
protected void getFringePaths(List<Path> list, Path myPath)
throws HandleOpenException {
for (Map.Entry<Comparable<?>, DSHandle> e : values.entrySet()) {
DSHandle h = e.getValue();
if (h instanceof AbstractDataNode) {
AbstractDataNode ad = (AbstractDataNode) h;
ad.getFringePaths(list, myPath.addLast(e.getKey()));
}
else {
list.addAll(h.getFringePaths());
}
}
}
@Override
public void getLeaves(List<DSHandle> list) throws HandleOpenException {
for (Map.Entry<Comparable<?>, DSHandle> e : values.entrySet()) {
DSHandle h = e.getValue();
if (h instanceof AbstractDataNode) {
AbstractDataNode ad = (AbstractDataNode) h;
ad.getLeaves(list);
}
else {
list.addAll(h.getLeaves());
}
}
}
@Override
public int arraySize() {
return values.size();
}
@Override
protected void clean0() {
FileGarbageCollector.getDefault().clean(this);
super.clean0();
}
@Override
protected void finalize() throws Throwable {
if (!isCleaned()) {
clean();
}
super.finalize();
}
}<|fim▁end|>
|
}
@Override
|
<|file_name|>terminated-multibyte.js<|end_file_name|><|fim▁begin|>const sizeOf = {
object: function () {
return function (object) {
let $start = 0
$start += 1 * object.array.length + 2
return $start
}
} ()
}
const serializer = {
all: {
object: function () {
return function (object, $buffer, $start) {
let $i = []
for ($i[0] = 0; $i[0] < object.array.length; $i[0]++) {
$buffer[$start++] = object.array[$i[0]] & 0xff
}
$buffer[$start++] = 0xd
$buffer[$start++] = 0xa
return { start: $start, serialize: null }
}
} ()
},
inc: {
object: function () {
return function (object, $step = 0, $i = []) {
let $_, $bite
return function $serialize ($buffer, $start, $end) {
for (;;) {
switch ($step) {
case 0:
$i[0] = 0
$step = 1
case 1:
$bite = 0
$_ = object.array[$i[0]]
case 2:
while ($bite != -1) {
if ($start == $end) {
$step = 2
return { start: $start, serialize: $serialize }
}
$buffer[$start++] = $_ >>> $bite * 8 & 0xff
$bite--
}
if (++$i[0] != object.array.length) {
$step = 1
continue
}
$step = 3
case 3:
if ($start == $end) {
$step = 3
return { start: $start, serialize: $serialize }
}
$buffer[$start++] = 0xd
case 4:
if ($start == $end) {
$step = 4
return { start: $start, serialize: $serialize }
}
$buffer[$start++] = 0xa
case 5:
}
break
}
return { start: $start, serialize: null }
}
}
} ()
}
}
const parser = {
all: {
object: function () {
return function ($buffer, $start) {
let $i = []
let object = {
array: []
}
$i[0] = 0
for (;;) {
if (
$buffer[$start] == 0xd &&
$buffer[$start + 1] == 0xa
) {
$start += 2
break
}
object.array[$i[0]] = $buffer[$start++]
$i[0]++
}
return object
}
} ()
},
inc: {
object: function () {
return function (object, $step = 0, $i = []) {
let $length = 0
return function $parse ($buffer, $start, $end) {
for (;;) {
switch ($step) {
case 0:
object = {
array: []
}
case 1:
$i[0] = 0
case 2:
$step = 2
if ($start == $end) {
return { start: $start, object: null, parse: $parse }
}
if ($buffer[$start] != 0xd) {
$step = 4
continue
}
$start++
$step = 3
case 3:
$step = 3
if ($start == $end) {
return { start: $start, object: null, parse: $parse }
}
if ($buffer[$start] != 0xa) {
$step = 4
$parse(Buffer.from([ 0xd ]), 0, 1)
continue
}
$start++
$step = 7
continue
case 4:
case 5:
if ($start == $end) {
$step = 5
return { start: $start, object: null, parse: $parse }
}
object.array[$i[0]] = $buffer[$start++]
case 6:
$i[0]++
$step = 2
continue
case 7:
}
return { start: $start, object: object, parse: null }
break
}
}
}
} ()
}
}
module.exports = {
sizeOf: sizeOf,
serializer: {
all: serializer.all,
inc: serializer.inc,
bff: function ($incremental) {
return {
object: function () {
return function (object) {
return function ($buffer, $start, $end) {
let $i = []
if ($end - $start < 2 + object.array.length * 1) {
return $incremental.object(object, 0, $i)($buffer, $start, $end)
}
for ($i[0] = 0; $i[0] < object.array.length; $i[0]++) {
$buffer[$start++] = object.array[$i[0]] & 0xff
}
$buffer[$start++] = 0xd
$buffer[$start++] = 0xa
return { start: $start, serialize: null }
}
}
} ()
}
} (serializer.inc)
},
parser: {
all: parser.all,
inc: parser.inc,
bff: function ($incremental) {
return {
object: function () {
return function () {
return function ($buffer, $start, $end) {
let $i = []
let object = {
array: []
}
$i[0] = 0
for (;;) {
if ($end - $start < 2) {
return $incremental.object(object, 2, $i)($buffer, $start, $end)
}
if (
$buffer[$start] == 0xd &&<|fim▁hole|> $buffer[$start + 1] == 0xa
) {
$start += 2
break
}
if ($end - $start < 1) {
return $incremental.object(object, 4, $i)($buffer, $start, $end)
}
object.array[$i[0]] = $buffer[$start++]
$i[0]++
}
return { start: $start, object: object, parse: null }
}
} ()
}
}
} (parser.inc)
}
}<|fim▁end|>
| |
<|file_name|>namespace_a_e_mmailer.js<|end_file_name|><|fim▁begin|>var namespace_a_e_mmailer =
[
[ "AEMMailer", "d5/d12/class_a_e_mmailer_1_1_a_e_m_mailer.html", "d5/d12/class_a_e_mmailer_1_1_a_e_m_mailer" ],
[ "Error", "dc/d02/class_a_e_mmailer_1_1_error.html", null ],<|fim▁hole|><|fim▁end|>
|
[ "MissingEmailAccountsException", "d9/d2a/class_a_e_mmailer_1_1_missing_email_accounts_exception.html", null ],
[ "NoPasswordException", "d1/d47/class_a_e_mmailer_1_1_no_password_exception.html", null ]
];
|
<|file_name|>ExportXmlParser.cpp<|end_file_name|><|fim▁begin|>//-------------------------------------------------------------------------------------
// ExportXmlParser.cpp
//
// Simple callback non-validating XML parser implementation.
//
// Microsoft XNA Developer Connection.
// Copyright © Microsoft Corporation. All rights reserved.
//-------------------------------------------------------------------------------------
#include "stdafx.h"
#include "ExportXmlParser.h"
namespace ATG
{
//-------------------------------------------------------------------------------------
// Name: XMLParser::XMLParser
//-------------------------------------------------------------------------------------
XMLParser::XMLParser()
{
m_pWritePtr = m_pWriteBuf;
m_pReadPtr = m_pReadBuf;
m_pISAXCallback = NULL;
m_hFile = INVALID_HANDLE_VALUE;
}
//-------------------------------------------------------------------------------------
// Name: XMLParser::~XMLParser
//-------------------------------------------------------------------------------------
XMLParser::~XMLParser()
{
}
//-------------------------------------------------------------------------------------
// Name: XMLParser::FillBuffer
// Desc: Reads a block from the current open file
//-------------------------------------------------------------------------------------
VOID XMLParser::FillBuffer()
{
DWORD NChars;
m_pReadPtr = m_pReadBuf;
if( m_hFile == NULL )
{
if( m_uInXMLBufferCharsLeft > XML_READ_BUFFER_SIZE )
NChars = XML_READ_BUFFER_SIZE;
else
NChars = m_uInXMLBufferCharsLeft;
CopyMemory( m_pReadBuf, m_pInXMLBuffer, NChars );
m_uInXMLBufferCharsLeft -= NChars;
m_pInXMLBuffer += NChars;
}
else
{
ReadFile( m_hFile, m_pReadBuf, XML_READ_BUFFER_SIZE, &NChars, NULL );
}
m_dwCharsConsumed += NChars;
__int64 iProgress = ( (__int64)m_dwCharsConsumed * 1000 ) / (__int64)m_dwCharsTotal;
m_pISAXCallback->SetParseProgress( (DWORD)iProgress );
m_pReadBuf[ NChars ] = '\0';
m_pReadBuf[ NChars + 1] = '\0';
}
//-------------------------------------------------------------------------------------
// Name: XMLParser::SkipNextAdvance
// Desc: Puts the last character read back on the input stream
//-------------------------------------------------------------------------------------
VOID XMLParser::SkipNextAdvance()
{
m_bSkipNextAdvance = TRUE;
}
//-------------------------------------------------------------------------------------
// Name: XMLParser::ConsumeSpace
// Desc: Skips spaces in the current stream
//-------------------------------------------------------------------------------------
HRESULT XMLParser::ConsumeSpace()
{
HRESULT hr;
// Skip spaces
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
while ( ( m_Ch == ' ' ) || ( m_Ch == '\t' ) ||
( m_Ch == '\n' ) || ( m_Ch == '\r' ) )
{
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
}
SkipNextAdvance();
return S_OK;
}
//-------------------------------------------------------------------------------------
// Name: XMLParser::ConvertEscape
// Desc: Copies and converts an escape sequence into m_pWriteBuf
//-------------------------------------------------------------------------------------
HRESULT XMLParser::ConvertEscape()
{
HRESULT hr;
WCHAR wVal = 0;
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
// all escape sequences start with &, so ignore the first character
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
if ( m_Ch == '#' ) // character as hex or decimal
{
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
if ( m_Ch == 'x' ) // hex number
{
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
while ( m_Ch != ';' )
{
wVal *= 16;
if ( ( m_Ch >= '0' ) && ( m_Ch <= '9' ) )
{
wVal += m_Ch - '0';
}
else if ( ( m_Ch >= 'a' ) && ( m_Ch <= 'f' ) )
{
wVal += m_Ch - 'a' + 10;
}
else if ( ( m_Ch >= 'A' ) && ( m_Ch <= 'F' ) )
{
wVal += m_Ch - 'A' + 10;
}
else
{
Error( E_INVALID_XML_SYNTAX, "Expected hex digit as part of &#x escape sequence" );
return E_INVALID_XML_SYNTAX;
}
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
}
}
else // decimal number
{
while ( m_Ch != ';' )
{
wVal *= 10;
if ( ( m_Ch >= '0' ) && ( m_Ch <= '9' ) )
{
wVal += m_Ch - '0';
}
else
{
Error( E_INVALID_XML_SYNTAX, "Expected decimal digit as part of &# escape sequence" );
return E_INVALID_XML_SYNTAX;
}
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
}
}
// copy character into the buffer
m_Ch = wVal;
return S_OK;
}
// must be an entity reference
WCHAR *pEntityRefVal = m_pWritePtr;
UINT EntityRefLen;
SkipNextAdvance();
if( FAILED( hr = AdvanceName() ) )
return hr;
EntityRefLen = (UINT)( m_pWritePtr - pEntityRefVal );
m_pWritePtr = pEntityRefVal;
if ( EntityRefLen == 0 )
{
Error( E_INVALID_XML_SYNTAX, "Expecting entity name after &" );
return E_INVALID_XML_SYNTAX;
}
if( !wcsncmp( pEntityRefVal, L"lt", EntityRefLen ) )
wVal = '<';
else if( !wcsncmp( pEntityRefVal, L"gt", EntityRefLen ) )
wVal = '>';
else if( !wcsncmp( pEntityRefVal, L"amp", EntityRefLen ) )
wVal = '&';
else if( !wcsncmp( pEntityRefVal, L"apos", EntityRefLen ) )
wVal = '\'';
else if( !wcsncmp( pEntityRefVal, L"quot", EntityRefLen ) )
wVal = '"';
else
{
Error( E_INVALID_XML_SYNTAX, "Unrecognized entity name after & - (should be lt, gt, amp, apos, or quot)" );
return E_INVALID_XML_SYNTAX; // return false if unrecognized token sequence
}
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
if( m_Ch != ';' )
{
Error( E_INVALID_XML_SYNTAX, "Expected terminating ; for entity reference" );
return E_INVALID_XML_SYNTAX; // malformed reference - needs terminating ;
}
m_Ch = wVal;
return S_OK;
}
//-------------------------------------------------------------------------------------
// Name: XMLParser::AdvanceAttrVal
// Desc: Copies an attribute value into m_pWrite buf, skipping surrounding quotes
//-------------------------------------------------------------------------------------
HRESULT XMLParser::AdvanceAttrVal()
{
HRESULT hr;
WCHAR wQuoteChar;
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
if( ( m_Ch != '"' ) && ( m_Ch != '\'' ) )
{
Error( E_INVALID_XML_SYNTAX, "Attribute values must be enclosed in quotes" );
return E_INVALID_XML_SYNTAX;
}
wQuoteChar = m_Ch;
for( ;; )
{
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
else if( m_Ch == wQuoteChar )
break;
else if( m_Ch == '&' )
{
SkipNextAdvance();
if( FAILED( hr = ConvertEscape() ) )
return hr;
}
else if( m_Ch == '<' )
{
Error( E_INVALID_XML_SYNTAX, "Illegal character '<' in element tag" );
return E_INVALID_XML_SYNTAX;
}
// copy character into the buffer
if( m_pWritePtr - m_pWriteBuf >= XML_WRITE_BUFFER_SIZE )
{
Error( E_INVALID_XML_SYNTAX, "Total element tag size may not be more than %d characters", XML_WRITE_BUFFER_SIZE );
return E_INVALID_XML_SYNTAX;
}
*m_pWritePtr = m_Ch;
m_pWritePtr++;
}
return S_OK;
}
//-------------------------------------------------------------------------------------
// Name: XMLParser::AdvanceName
// Desc: Copies a name into the m_pWriteBuf - returns TRUE on success, FALSE on failure
// Ignores leading whitespace. Currently does not support unicode names
//-------------------------------------------------------------------------------------
HRESULT XMLParser::AdvanceName()
{
HRESULT hr;
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
if( ( ( m_Ch < 'A' ) || ( m_Ch > 'Z' ) ) &&
( ( m_Ch < 'a' ) || ( m_Ch > 'z' ) ) &&
( m_Ch != '_' ) && ( m_Ch != ':' ) )
{
Error( E_INVALID_XML_SYNTAX, "Names must start with an alphabetic character or _ or :" );
return E_INVALID_XML_SYNTAX;
}
while( ( ( m_Ch >= 'A' ) && ( m_Ch <= 'Z' ) ) ||
( ( m_Ch >= 'a' ) && ( m_Ch <= 'z' ) ) ||
( ( m_Ch >= '0' ) && ( m_Ch <= '9' ) ) ||
( m_Ch == '_' ) || ( m_Ch == ':' ) ||
( m_Ch == '-' ) || ( m_Ch == '.' ) )
{
if( m_pWritePtr - m_pWriteBuf >= XML_WRITE_BUFFER_SIZE )
{
Error( E_INVALID_XML_SYNTAX, "Total element tag size may not be more than %d characters", XML_WRITE_BUFFER_SIZE );
return E_INVALID_XML_SYNTAX;
}
*m_pWritePtr = m_Ch;
m_pWritePtr++;
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
}
SkipNextAdvance();
return S_OK;
}
//-------------------------------------------------------------------------------------
// Name: XMLParser::AdvanceCharacter
// Desc: Copies the character at *m_pReadPtr to m_Ch
// handling difference in UTF16 / UTF8, and big/little endian
// and getting another chunk of the file if needed
// Returns S_OK if there are more characters, E_ABORT for no characters to read
//-------------------------------------------------------------------------------------
HRESULT XMLParser::AdvanceCharacter( BOOL bOkToFail )
{
if( m_bSkipNextAdvance )
{
m_bSkipNextAdvance = FALSE;
return S_OK;
}
// If we hit EOF in the middle of a character,
// it's ok-- we'll just have a corrupt last character
// (the buffer is padded with double NULLs )
if( *m_pReadPtr == '\0' )
{
// Read more from the file
FillBuffer();
// We are at EOF if it is still NULL
if( *m_pReadPtr == '\0' )
{
if( !bOkToFail )
{
Error( E_INVALID_XML_SYNTAX, "Unexpected EOF while parsing XML file" );
return E_INVALID_XML_SYNTAX;
}
else
{
return E_FAIL;
}
}
}
if( m_bUnicode == FALSE )
{
m_Ch = *((CHAR *)m_pReadPtr);
m_pReadPtr++;
}
else // if( m_bUnicode == TRUE )
{
m_Ch = *((WCHAR *)m_pReadPtr);
if( m_bReverseBytes )
{
m_Ch = ( m_Ch << 8 ) + ( m_Ch >> 8 );
}
m_pReadPtr += 2;
}
if( m_Ch == '\n' )
{
m_pISAXCallback->m_LineNum++;
m_pISAXCallback->m_LinePos = 0;
}
else if( m_Ch != '\r' )
m_pISAXCallback->m_LinePos++;
return S_OK;
}
//-------------------------------------------------------------------------------------
// Name: XMLParser::AdvanceElement
// Desc: Builds <element> data, calls callback
//-------------------------------------------------------------------------------------
HRESULT XMLParser::AdvanceElement()
{
HRESULT hr;
// write ptr at the beginning of the buffer
m_pWritePtr = m_pWriteBuf;
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
// if first character wasn't '<', we wouldn't be here
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
if( m_Ch == '!' )
{
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
if ( m_Ch == '-' )
{
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
if( m_Ch != '-' )
{
Error( E_INVALID_XML_SYNTAX, "Expecting '-' after '<!-'" );
return E_INVALID_XML_SYNTAX;
}
if( FAILED( hr = AdvanceComment() ) )
return hr;
return S_OK;
}
if( m_Ch != '[' )
{
Error( E_INVALID_XML_SYNTAX, "Expecting '<![CDATA['" );
return E_INVALID_XML_SYNTAX;
}
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
if( m_Ch != 'C' )
{
Error( E_INVALID_XML_SYNTAX, "Expecting '<![CDATA['" );
return E_INVALID_XML_SYNTAX;
}
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
if( m_Ch != 'D' )
{
Error( E_INVALID_XML_SYNTAX, "Expecting '<![CDATA['" );
return E_INVALID_XML_SYNTAX;
}
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
if( m_Ch != 'A' )
{
Error( E_INVALID_XML_SYNTAX, "Expecting '<![CDATA['" );
return E_INVALID_XML_SYNTAX;
}
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
if( m_Ch != 'T' )
{
Error( E_INVALID_XML_SYNTAX, "Expecting '<![CDATA['" );
return E_INVALID_XML_SYNTAX;
}
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
if( m_Ch != 'A' )
{
Error( E_INVALID_XML_SYNTAX, "Expecting '<![CDATA['" );
return E_INVALID_XML_SYNTAX;
}
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
if( m_Ch != '[' )
{
Error( E_INVALID_XML_SYNTAX, "Expecting '<![CDATA['" );
return E_INVALID_XML_SYNTAX;
}
if( FAILED( hr = AdvanceCDATA() ) )
return hr;
}
else if( m_Ch == '/' )
{
WCHAR *pEntityRefVal = m_pWritePtr;
if( FAILED( hr = AdvanceName() ) )
return hr;
if( FAILED( m_pISAXCallback->ElementEnd( pEntityRefVal,
(UINT) ( m_pWritePtr - pEntityRefVal ) ) ) )
return E_ABORT;
if( FAILED( hr = ConsumeSpace() ) )
return hr;
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
if( m_Ch != '>' )
{
Error( E_INVALID_XML_SYNTAX, "Expecting '>' after name for closing entity reference" );
return E_INVALID_XML_SYNTAX;
}
}
else if( m_Ch == '?' )
{
// just skip any xml header tag since not really important after identifying character set
for( ;; )
{
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
if ( m_Ch == '>' )
return S_OK;
}
}
else
{
XMLAttribute Attributes[ XML_MAX_ATTRIBUTES_PER_ELEMENT ];
UINT NumAttrs;
WCHAR *pEntityRefVal = m_pWritePtr;
UINT EntityRefLen;
NumAttrs = 0;
SkipNextAdvance();
// Entity tag
if( FAILED( hr = AdvanceName() ) )
return hr;
EntityRefLen = (UINT)( m_pWritePtr - pEntityRefVal );
if( FAILED( hr = ConsumeSpace() ) )
return hr;
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
// read attributes
while( ( m_Ch != '>' ) && ( m_Ch != '/' ) )
{
SkipNextAdvance();
if ( NumAttrs >= XML_MAX_ATTRIBUTES_PER_ELEMENT )
{
Error( E_INVALID_XML_SYNTAX, "Elements may not have more than %d attributes", XML_MAX_ATTRIBUTES_PER_ELEMENT );
return E_INVALID_XML_SYNTAX;
}
Attributes[ NumAttrs ].strName = m_pWritePtr;
// Attribute name
if( FAILED( hr = AdvanceName() ) )
return hr;
Attributes[ NumAttrs ].NameLen = (UINT)( m_pWritePtr - Attributes[ NumAttrs ].strName );
if( FAILED( hr = ConsumeSpace() ) )
return hr;
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
if( m_Ch != '=' )
{
Error( E_INVALID_XML_SYNTAX, "Expecting '=' character after attribute name" );
return E_INVALID_XML_SYNTAX;
}
if( FAILED( hr = ConsumeSpace() ) )
return hr;
Attributes[ NumAttrs ].strValue = m_pWritePtr;
if( FAILED( hr = AdvanceAttrVal() ) )
return hr;
Attributes[ NumAttrs ].ValueLen = (UINT)( m_pWritePtr -
Attributes[ NumAttrs ].strValue );
++NumAttrs;
if( FAILED( hr = ConsumeSpace() ) )
return hr;
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
}
if( m_Ch == '/' )
{
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
if( m_Ch != '>' )
{
Error( E_INVALID_XML_SYNTAX, "Expecting '>' after '/' in element tag" );
return E_INVALID_XML_SYNTAX;
}
if( FAILED( m_pISAXCallback->ElementBegin( pEntityRefVal, EntityRefLen,
Attributes, NumAttrs ) ) )
return E_ABORT;
if( FAILED( m_pISAXCallback->ElementEnd( pEntityRefVal, EntityRefLen ) ) )
return E_ABORT;
}
else
{
if( FAILED( m_pISAXCallback->ElementBegin( pEntityRefVal, EntityRefLen,
Attributes, NumAttrs ) ) )
return E_ABORT;
}
}
return S_OK;
}
//-------------------------------------------------------------------------------------
// Name: XMLParser::AdvanceCDATA
// Desc: Read a CDATA section
//-------------------------------------------------------------------------------------
HRESULT XMLParser::AdvanceCDATA()
{
HRESULT hr;
WORD wStage = 0;
if( FAILED( m_pISAXCallback->CDATABegin() ) )
return E_ABORT;
for( ;; )
{
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
*m_pWritePtr = m_Ch;
m_pWritePtr++;
if( ( m_Ch == ']' ) && ( wStage == 0 ) )
wStage = 1;
else if( ( m_Ch == ']' ) && ( wStage == 1 ) )
wStage = 2;
else if( ( m_Ch == '>' ) && ( wStage == 2 ) )
{
m_pWritePtr -= 3;
break;
}
else
wStage = 0;
if( m_pWritePtr - m_pWriteBuf >= XML_WRITE_BUFFER_SIZE )
{
if( FAILED( m_pISAXCallback->CDATAData( m_pWriteBuf, (UINT)( m_pWritePtr - m_pWriteBuf ), TRUE ) ) )
return E_ABORT;
m_pWritePtr = m_pWriteBuf;
}
}
if( FAILED( m_pISAXCallback->CDATAData( m_pWriteBuf, (UINT)( m_pWritePtr - m_pWriteBuf ), FALSE ) ) )
return E_ABORT;
m_pWritePtr = m_pWriteBuf;
if( FAILED( m_pISAXCallback->CDATAEnd() ) )
return E_ABORT;
return S_OK;
}
//-------------------------------------------------------------------------------------
// Name: XMLParser::AdvanceComment
// Desk: Skips over a comment
//-------------------------------------------------------------------------------------
HRESULT XMLParser::AdvanceComment()
{
HRESULT hr;
WORD wStage;
wStage = 0;
for( ;; )
{
if( FAILED( hr = AdvanceCharacter() ) )
return hr;
if (( m_Ch == '-' ) && ( wStage == 0 ))
wStage = 1;
else if (( m_Ch == '-' ) && ( wStage == 1 ))
wStage = 2;
else if (( m_Ch == '>' ) && ( wStage == 2 ))
break;
else
wStage = 0;
}
return S_OK;
}
//-------------------------------------------------------------------------------------
// Name: XMLParser::RegisterSAXCallbackInterface
// Desc: Registers callback interface
//-------------------------------------------------------------------------------------
VOID XMLParser::RegisterSAXCallbackInterface( ISAXCallback *pISAXCallback )
{
m_pISAXCallback = pISAXCallback;
}
//-------------------------------------------------------------------------------------
// Name: XMLParser::GetSAXCallbackInterface
// Desc: Returns current callback interface
//-------------------------------------------------------------------------------------
ISAXCallback* XMLParser::GetSAXCallbackInterface()
{
return m_pISAXCallback;
}
//-------------------------------------------------------------------------------------
// Name: XMLParser::MainParseLoop
// Desc: Main Loop to Parse Data - source agnostic
//-------------------------------------------------------------------------------------
HRESULT XMLParser::MainParseLoop()
{
BOOL bWhiteSpaceOnly = TRUE;
HRESULT hr = S_OK;
if( FAILED( m_pISAXCallback->StartDocument() ) )
return E_ABORT;
m_pWritePtr = m_pWriteBuf;
FillBuffer();
if ( *((WCHAR *) m_pReadBuf ) == 0xFEFF )
{
m_bUnicode = TRUE;
m_bReverseBytes = FALSE;
m_pReadPtr += 2;
}
else if ( *((WCHAR *) m_pReadBuf ) == 0xFFFE )
{
m_bUnicode = TRUE;
m_bReverseBytes = TRUE;
m_pReadPtr += 2;
}
else if ( *((WCHAR *) m_pReadBuf ) == 0x003C )
{
m_bUnicode = TRUE;
m_bReverseBytes = FALSE;
}
else if ( *((WCHAR *) m_pReadBuf ) == 0x3C00 )
{
m_bUnicode = TRUE;
m_bReverseBytes = TRUE;
}
else if ( m_pReadBuf[ 0 ] == 0x3C )
{
m_bUnicode = FALSE;
m_bReverseBytes = FALSE;
}
else
{
Error( E_INVALID_XML_SYNTAX, "Unrecognized encoding (parser does not support UTF-8 language encodings)" );
return E_INVALID_XML_SYNTAX;
}
for( ;; )
{
if( FAILED( AdvanceCharacter( TRUE ) ) )
{
if ( ( (UINT) ( m_pWritePtr - m_pWriteBuf ) != 0 ) && ( !bWhiteSpaceOnly ) )
{
if( FAILED( m_pISAXCallback->ElementContent( m_pWriteBuf, (UINT)( m_pWritePtr - m_pWriteBuf ), FALSE ) ) )
return E_ABORT;
bWhiteSpaceOnly = TRUE;
}
if( FAILED( m_pISAXCallback->EndDocument() ) )
return E_ABORT;
return S_OK;
}
if( m_Ch == '<' )
{
if( ( (UINT) ( m_pWritePtr - m_pWriteBuf ) != 0 ) && ( !bWhiteSpaceOnly ) )
{
if( FAILED( m_pISAXCallback->ElementContent( m_pWriteBuf, (UINT)( m_pWritePtr - m_pWriteBuf ), FALSE ) ) )
return E_ABORT;
bWhiteSpaceOnly = TRUE;
}
SkipNextAdvance();
m_pWritePtr = m_pWriteBuf;
if( FAILED( hr = AdvanceElement() ) )
return hr;
m_pWritePtr = m_pWriteBuf;
}
else
{
if( m_Ch == '&' )
{
SkipNextAdvance();
if( FAILED( hr = ConvertEscape() ) )
return hr;
}
if( bWhiteSpaceOnly && ( m_Ch != ' ' ) && ( m_Ch != '\n' ) && ( m_Ch != '\r' ) &&
( m_Ch != '\t' ) )
{
bWhiteSpaceOnly = FALSE;
}
*m_pWritePtr = m_Ch;
m_pWritePtr++;
if( m_pWritePtr - m_pWriteBuf >= XML_WRITE_BUFFER_SIZE )
{
if( !bWhiteSpaceOnly )
{
if( FAILED( m_pISAXCallback->ElementContent( m_pWriteBuf,
( UINT ) ( m_pWritePtr - m_pWriteBuf ),
TRUE ) ) )
{
return E_ABORT;
}
}
m_pWritePtr = m_pWriteBuf;
bWhiteSpaceOnly = TRUE;
}
}
}
}
//-------------------------------------------------------------------------------------
// Name: XMLParser::ParseXMLFile
// Desc: Builds element data
//-------------------------------------------------------------------------------------
HRESULT XMLParser::ParseXMLFile( CONST CHAR *strFilename )
{
HRESULT hr;
if( m_pISAXCallback == NULL )
return E_NOINTERFACE;
m_pISAXCallback->m_LineNum = 1;
m_pISAXCallback->m_LinePos = 0;
m_pISAXCallback->m_strFilename = strFilename; // save this off only while we parse the file
m_bSkipNextAdvance = FALSE;
m_pReadPtr = m_pReadBuf;
m_pReadBuf[ 0 ] = '\0';
m_pReadBuf[ 1 ] = '\0';
m_pInXMLBuffer = NULL;
m_uInXMLBufferCharsLeft = 0;
m_hFile = CreateFile( strFilename, GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_FLAG_SEQUENTIAL_SCAN, NULL );
if( m_hFile == INVALID_HANDLE_VALUE )
{
Error( E_COULD_NOT_OPEN_FILE, "Error opening file" );
hr = E_COULD_NOT_OPEN_FILE;
}
else
{
LARGE_INTEGER iFileSize;
GetFileSizeEx( m_hFile, &iFileSize );
m_dwCharsTotal = (DWORD)iFileSize.QuadPart;
m_dwCharsConsumed = 0;
hr = MainParseLoop();
}
// Close the file
if( m_hFile != INVALID_HANDLE_VALUE )
CloseHandle( m_hFile );
m_hFile = INVALID_HANDLE_VALUE;
// we no longer own strFilename, so un-set it
m_pISAXCallback->m_strFilename = NULL;
return hr;
}
//-------------------------------------------------------------------------------------
// Name: XMLParser::ParseXMLFile
// Desc: Builds element data
//-------------------------------------------------------------------------------------
HRESULT XMLParser::ParseXMLBuffer( CONST CHAR *strBuffer, UINT uBufferSize )
{
HRESULT hr;
if( m_pISAXCallback == NULL )
return E_NOINTERFACE;
m_pISAXCallback->m_LineNum = 1;
m_pISAXCallback->m_LinePos = 0;
m_pISAXCallback->m_strFilename = ""; // save this off only while we parse the file
m_bSkipNextAdvance = FALSE;
m_pReadPtr = m_pReadBuf;
m_pReadBuf[ 0 ] = '\0';
m_pReadBuf[ 1 ] = '\0';
m_hFile = NULL;
m_pInXMLBuffer = strBuffer;
m_uInXMLBufferCharsLeft = uBufferSize;
m_dwCharsTotal = m_uInXMLBufferCharsLeft;
m_dwCharsConsumed = 0;
hr = MainParseLoop();
<|fim▁hole|>
return hr;
}
//-------------------------------------------------------------------------------------
// XMLParser::Error()
// Logs an error through the callback interface
//-------------------------------------------------------------------------------------
VOID XMLParser::Error( HRESULT hErr, CONST CHAR* strFormat, ... )
{
CONST INT MAX_OUTPUT_STR = 160;
CHAR strBuffer[ MAX_OUTPUT_STR ];
va_list pArglist;
va_start( pArglist, strFormat );
vsprintf_s( strBuffer, strFormat, pArglist );
m_pISAXCallback->Error( hErr, strBuffer );
va_end( pArglist );
}
} // namespace ATG<|fim▁end|>
|
// we no longer own strFilename, so un-set it
m_pISAXCallback->m_strFilename = NULL;
|
<|file_name|>insert_grp_desg.py<|end_file_name|><|fim▁begin|># script.py
import argparse
import pyexcel as pe
import configparser
import os
import sys
import sqlite3
import my_connection_grp_desg as connection
from insert import sanction_grp as insert_sanc
DB_URL = None
global conn
conn = None
sheet = None
desg_ls=None
unit_ls=None
sect_ls=None
def load_tables():
# conn.row_factory = sqlite3.Row
c = conn.cursor()
c.execute("select dscd from desg")
global desg_ls
desg_ls = [ x[0] for x in c.fetchall()]
c.execute("select code from unit")
global unit_ls
unit_ls = [ x[0] for x in c.fetchall()]
c.execute("select code from section")
global sect_ls
sect_ls = [ x[0] for x in c.fetchall()]
c.execute("select eis from employee")
global eis_ls
eis_ls = [ x[0] for x in c.fetchall()]
eis_list = []
working_unit = []
not_first_row = False
def validate_row(row, ignore_multi_unit):
# hold all error list
err = []
# check wrong codes
if row['DSCD'] not in desg_ls:
err.append(" dscd("+row['DSCD']+")")
if row['SECTION_CD'] not in sect_ls:
err.append(" sect("+row['SECTION_CD']+")")
if row['WORKING UNIT'] not in unit_ls:
err.append(" unit("+row['WORKING UNIT']+")")
if row['ONROLL_UNIT'] not in unit_ls:
err.append(" roll_unit("+row['ONROLL_UNIT']+")")
# check duplicate eis in file
global eis_list
if str(row['EIS']) in eis_list:
err.append(" eis_repeat("+str(row['EIS'])+")")
else:
eis_list.append(str(row['EIS']))
# check duplicate eis in db
try:
if int(row['EIS']) in eis_ls:
err.append(" eis_dup_db("+str(row['EIS'])+")")
except ValueError as e:
err.append(" eis_err("+str(row['EIS'])+")")
# check if multiple working_unit present in file
global working_unit
global not_first_row
if not ignore_multi_unit:
if not_first_row:
if str(row['WORKING UNIT']) not in working_unit:
err.append(" multiple_work_unit("+str(row['WORKING UNIT'])+")")
else:
working_unit.append(str(row['WORKING UNIT']))
not_first_row = True
# return err list
if not err:
return None
else:
return err
def read_file(xls_path, sheet_name, upload, ignore_multi_unit):
# book = pe.get_book(file_name=os.path.normpath(xls_path))
# sheets = book.to_dict()
# for name in sheets.keys():
# print(name)
try:
sheet = pe.get_sheet(file_name=os.path.normpath(xls_path), sheet_name=sheet_name, name_columns_by_row=0)
except ValueError as e:
print("Sheet name not in excel file: {0}".format(e))
sys.exit()
except AttributeError as e:<|fim▁hole|> print("Sheet name not in excel file: {0}".format(e))
sys.exit()
#raise e
except NotImplementedError as e:
print("File not found or File not in proper format: {0}".format(e))
sys.exit()
#raise e
records = sheet.get_records()
error_ls = []
for idx, record in enumerate(records):
err_row = validate_row(record, ignore_multi_unit)
if err_row:
error_ls.append(err_row)
print('ERR @ ROW {} => {}'.format(idx+2, err_row))
if error_ls:
print('correct the above errors and upload')
else:
print('{0} rows will be inserted. add "-u" to upload'.format(len(records)))
if upload:
ls=[]
for idx, r in enumerate(records):
#sno AREA UNIT MINE_TYPE ONROLL_UNIT WORKING UNIT SECTION_TYPE CADRE SECTION SECTION_CD DESIG DSCD EIS NAME GENDER DOB Comments
ls.append(('N','W',None,r['SECTION_CD'],r['WORKING UNIT'],r['ONROLL_UNIT'],r['DSCD'],r['GENDER'],r['DOB'],r['NAME'],r['EIS'],r['Comments']))
c = conn.cursor()
c.executemany('''insert into employee (emp_type,working,o_dcd,sect,ucde,roll_ucde,desg,gend,dob,name,eis,comments)
values(?,?,?,?,?, ?,?,?,?,?, ?,?)''',ls)
conn.commit()
print('--->{0} records inserted sucessfully'.format(len(ls)))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
# parser.add_argument(
# 'filename', metavar='int', type=int, choices=range(10),
# nargs='+', help='give a file name')
parser.add_argument('filename', help='give file name')
parser.add_argument('table', help='e for inserting employee; s for inserting sanction')
parser.add_argument("-sh",'--sheetname', help='give sheet name; type * to include all sheets')
parser.add_argument("-u", "--upload", action="store_true", help="to update/commit changes into database")
parser.add_argument("-im", "--ignore_multi_unit", action="store_true", help="to upload file with multiple units and suppress its errors")#default=max,
args = parser.parse_args()
print(args)
#read_config()
conn = connection.get_connection()
if args.table == 's':
insert_sanc.load_tables()
insert_sanc.read_file(args.filename, args.sheetname, args.upload)
else:
print('supplied argument or order of argument is wrong')
sys.exit()<|fim▁end|>
| |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>from __future__ import division
from __future__ import print_function
import tensorflow as tf
import numpy as np
from keras.datasets import mnist, cifar10, cifar100
from sklearn.preprocessing import LabelBinarizer
from nets import LeNet, LeNetVarDropout, VGG, VGGVarDropout
sess = tf.Session()
def main():
dataset = 'cifar10' # mnist, cifar10, cifar100
# Load the data
# It will be downloaded first if necessary
if dataset == 'mnist':
(X_train, y_train), (X_test, y_test) = mnist.load_data()
img_size = 28
num_classes = 10
num_channels = 1
elif dataset == 'cifar10':
(X_train, y_train), (X_test, y_test) = cifar10.load_data()
img_size = 32
num_classes = 10
num_channels = 3
elif dataset == 'cifar100':
(X_train, y_train), (X_test, y_test) = cifar100.load_data()
img_size = 32
num_classes = 100
num_channels = 3
lb = LabelBinarizer()
lb.fit(y_train)
y_train_one_hot = lb.transform(y_train)
y_test_one_hot = lb.transform(y_test)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train = np.reshape(X_train,[-1,img_size,img_size,num_channels])
X_test = np.reshape(X_test,[-1,img_size,img_size,num_channels])<|fim▁hole|> X_test /= 255
print('X_train shape:', X_train.shape)
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')
m = VGGVarDropout(img_size,num_channels,num_classes)
sess.run(tf.global_variables_initializer())
m.fit(X_train,y_train_one_hot,sess)
pred = m.predict(X_test,sess)
y_test = np.squeeze(y_test)
acc = np.mean(np.equal(y_test,pred))
print("\nTest accuracy: %.3f" % acc)
if __name__ == "__main__":
main()<|fim▁end|>
|
X_train /= 255
|
<|file_name|>espeak-demo.js<|end_file_name|><|fim▁begin|>var espeak = require('node-espeak');
espeak.initialize();
<|fim▁hole|>});
espeak.speak("hello world!");<|fim▁end|>
|
espeak.onVoice(function(wav, samples, samplerate) {
|
<|file_name|>MonotoneChainBuilder.java<|end_file_name|><|fim▁begin|>/*
* The JTS Topology Suite is a collection of Java classes that
* implement the fundamental operations required to validate a given
* geo-spatial data set to a known topological specification.
*
* Copyright (C) 2001 Vivid Solutions
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* For more information, contact:
*
* Vivid Solutions
* Suite #1A
* 2328 Government Street
* Victoria BC V8T 5G5
* Canada
*
* (250)385-6040
* www.vividsolutions.com
*/
package com.vividsolutions.jts.index.chain;
import java.util.*;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geomgraph.Quadrant;
/**
* Constructs {@link MonotoneChain}s
* for sequences of {@link Coordinate}s.
*
* @version 1.7
*/
public class MonotoneChainBuilder {
public static int[] toIntArray(List list)
{
int[] array = new int[list.size()];
for (int i = 0; i < array.length; i++) {
array[i] = ((Integer) list.get(i)).intValue();
}
return array;
}
public static List getChains(Coordinate[] pts)
{
return getChains(pts, null);
}
/**
* Return a list of the {@link MonotoneChain}s
* for the given list of coordinates.
*/
public static List getChains(Coordinate[] pts, Object context)
{
List mcList = new ArrayList();
int[] startIndex = getChainStartIndices(pts);
for (int i = 0; i < startIndex.length - 1; i++) {
MonotoneChain mc = new MonotoneChain(pts, startIndex[i], startIndex[i + 1], context);
mcList.add(mc);
}
return mcList;
}
/**
* Return an array containing lists of start/end indexes of the monotone chains
* for the given list of coordinates.
* The last entry in the array points to the end point of the point array,
* for use as a sentinel.
*/
public static int[] getChainStartIndices(Coordinate[] pts)
{
// find the startpoint (and endpoints) of all monotone chains in this edge
int start = 0;
List startIndexList = new ArrayList();
startIndexList.add(new Integer(start));
do {
int last = findChainEnd(pts, start);
startIndexList.add(new Integer(last));
start = last;
} while (start < pts.length - 1);
// copy list to an array of ints, for efficiency
int[] startIndex = toIntArray(startIndexList);
return startIndex;
}
/**
* Finds the index of the last point in a monotone chain
* starting at a given point.
* Any repeated points (0-length segments) will be included
* in the monotone chain returned.
*
* @return the index of the last point in the monotone chain
* starting at <code>start</code>.
*/
private static int findChainEnd(Coordinate[] pts, int start)
<|fim▁hole|> // (since they cannot be used to establish a quadrant)
while (safeStart < pts.length - 1 && pts[safeStart].equals2D(pts[safeStart + 1])) {
safeStart++;
}
// check if there are NO non-zero-length segments
if (safeStart >= pts.length - 1) {
return pts.length - 1;
}
// determine overall quadrant for chain (which is the starting quadrant)
int chainQuad = Quadrant.quadrant(pts[safeStart], pts[safeStart + 1]);
int last = start + 1;
while (last < pts.length) {
// skip zero-length segments, but include them in the chain
if (! pts[last - 1].equals2D(pts[last])) {
// compute quadrant for next possible segment in chain
int quad = Quadrant.quadrant(pts[last - 1], pts[last]);
if (quad != chainQuad) break;
}
last++;
}
return last - 1;
}
public MonotoneChainBuilder() {
}
}<|fim▁end|>
|
{
int safeStart = start;
// skip any zero-length segments at the start of the sequence
|
<|file_name|>fetch.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# This is a virtual module that is entirely implemented as an action plugin and runs on the controller
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: fetch
short_description: Fetch files from remote nodes
description:
- This module works like M(copy), but in reverse.
- It is used for fetching files from remote machines and storing them locally in a file tree, organized by hostname.
- Files that already exist at I(dest) will be overwritten if they are different than the I(src).
- This module is also supported for Windows targets.
version_added: '0.2'
options:
src:
description:
- The file on the remote system to fetch.
- This I(must) be a file, not a directory.
- Recursive fetching may be supported in a later release.
required: yes
dest:
description:
- A directory to save the file into.
- For example, if the I(dest) directory is C(/backup) a I(src) file named C(/etc/profile) on host
C(host.example.com), would be saved into C(/backup/host.example.com/etc/profile).
The host name is based on the inventory name.
required: yes
fail_on_missing:
version_added: '1.1'
description:
- When set to C(yes), the task will fail if the remote file cannot be read for any reason.
- Prior to Ansible 2.5, setting this would only fail if the source file was missing.
- The default was changed to C(yes) in Ansible 2.5.
type: bool
default: yes<|fim▁hole|> - Verify that the source and destination checksums match after the files are fetched.
type: bool
default: yes
flat:
version_added: '1.2'
description:
- Allows you to override the default behavior of appending hostname/path/to/file to the destination.
- If C(dest) ends with '/', it will use the basename of the source file, similar to the copy module.
- This can be useful if working with a single host, or if retrieving files that are uniquely named per host.
- If using multiple hosts with the same filename, the file will be overwritten for each host.
type: bool
default: no
notes:
- When running fetch with C(become), the M(slurp) module will also be
used to fetch the contents of the file for determining the remote
checksum. This effectively doubles the transfer size, and
depending on the file size can consume all available memory on the
remote or local hosts causing a C(MemoryError). Due to this it is
advisable to run this module without C(become) whenever possible.
- Prior to Ansible 2.5 this module would not fail if reading the remote
file was impossible unless C(fail_on_missing) was set.
- In Ansible 2.5 or later, playbook authors are encouraged to use
C(fail_when) or C(ignore_errors) to get this ability. They may
also explicitly set C(fail_on_missing) to C(no) to get the
non-failing behaviour.
- This module is also supported for Windows targets.
seealso:
- module: copy
- module: slurp
author:
- Ansible Core Team
- Michael DeHaan
'''
EXAMPLES = r'''
- name: Store file into /tmp/fetched/host.example.com/tmp/somefile
fetch:
src: /tmp/somefile
dest: /tmp/fetched
- name: Specifying a path directly
fetch:
src: /tmp/somefile
dest: /tmp/prefix-{{ inventory_hostname }}
flat: yes
- name: Specifying a destination path
fetch:
src: /tmp/uniquefile
dest: /tmp/special/
flat: yes
- name: Storing in a path relative to the playbook
fetch:
src: /tmp/uniquefile
dest: special/prefix-{{ inventory_hostname }}
flat: yes
'''<|fim▁end|>
|
validate_checksum:
version_added: '1.4'
description:
|
<|file_name|>find-kam-node.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
# find-kam-node.py: python2 example of loading kam, resolving kam node, and
# printing out BEL terms
#
# usage: find-kam-node.py <kam name> <source_bel_term>
from random import choice
from suds import *
from ws import *
import time
def load_kam(client, kam_name):
'''
Loads a KAM by name. This function will sleep until the KAM's
loadStatus is 'COMPLETE'.
'''
def call():
'''
Load the KAM and return result. Exit with error if 'loadStatus'
is FAILED.
'''
kam = client.create('Kam')
kam.name = kam_name
result = client.service.LoadKam(kam)
status = result['loadStatus']
if status == 'FAILED':
print 'FAILED!'
print sys.exc_info()[1]
exit_failure()
return result
# load kam and wait for completion
result = call()
while result['loadStatus'] != 'COMPLETE':
time.sleep(0.5)
result = call()
return result['handle']
if __name__ == '__main__':
from sys import argv, exit, stderr
if len(argv) != 3:
msg = 'usage: find-kam-node.py <kam name> <source_bel_term>\n'
stderr.write(msg)
exit(1)
# unpack command-line arguments; except the first script name argument
(kam_name, source_term) = argv[1:]
client = WS('http://localhost:8080/openbel-ws/belframework.wsdl')
handle = load_kam(client, kam_name)<|fim▁hole|>
# create nodes using BEL term labels from command-line
node = client.create("Node")
node.label = source_term
# resolve node
result = client.service.ResolveNodes(handle, [node], None)
if len(result) == 1 and result[0]:
the_node = result[0]
print "found node, id: %s" % (the_node.id)
terms = client.service.GetSupportingTerms(the_node, None)
for t in terms:
print t
else:
print "edge not found"
exit_success()<|fim▁end|>
|
print "loaded kam '%s', handle '%s'" % (kam_name, handle.handle)
|
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
from django.utils.translation import ugettext, ugettext_lazy as _
from ella.positions.models import Position
from ella.utils import timezone
class PositionOptions(admin.ModelAdmin):
def show_title(self, obj):
if not obj.target:
return '-- %s --' % ugettext('empty position')
else:
return u'%s [%s]' % (obj.target.title, ugettext(obj.target_ct.name),)
show_title.short_description = _('Title')
def is_filled(self, obj):
if obj.target:
return True
else:
return False
is_filled.short_description = _('Filled')
is_filled.boolean = True
def is_active(self, obj):
if obj.disabled:
return False
now = timezone.now()
active_from = not obj.active_from or obj.active_from <= now
active_till = not obj.active_till or obj.active_till > now
return active_from and active_till
is_active.short_description = _('Active')
is_active.boolean = True
list_display = ('name', 'category', 'box_type', 'is_active', 'is_filled', 'show_title', 'disabled',)
list_filter = ('category', 'name', 'disabled', 'active_from', 'active_till',)
search_fields = ('box_type', 'text',)<|fim▁hole|>
admin.site.register(Position, PositionOptions)<|fim▁end|>
|
# suggest_fields = {'category': ('tree_path', 'title', 'slug',),}
|
<|file_name|>Context.js<|end_file_name|><|fim▁begin|><|fim▁hole|>class Context {
user: Object;
req: express$Request;
res: express$Response;
depth: number;
constructor(req: express$Request, res: express$Response, depth: number = 0) {
this.req = req;
this.res = res;
this.depth = depth;
// $FlowIgnore
this.user = res.locals.user;
}
stepInto() {
return new Context(this.req, this.res, this.depth + 1);
}
isInternal() {
return this.depth > 0;
}
}
export default Context;<|fim▁end|>
|
// @flow
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![allow(unsafe_code)]
//! The rule tree.
#[cfg(feature = "servo")]
use heapsize::HeapSizeOf;
use properties::{Importance, PropertyDeclarationBlock};
use shared_lock::{Locked, StylesheetGuards, SharedRwLockReadGuard};
use smallvec::SmallVec;
use std::io::{self, Write};
use std::ptr;
use std::sync::atomic::{AtomicPtr, AtomicUsize, Ordering};
use stylearc::Arc;
use stylesheets::StyleRule;
use thread_state;
/// The rule tree, the structure servo uses to preserve the results of selector
/// matching.
///
/// This is organized as a tree of rules. When a node matches a set of rules,
/// they're inserted in order in the tree, starting with the less specific one.
///
/// When a rule is inserted in the tree, other elements may share the path up to
/// a given rule. If that's the case, we don't duplicate child nodes, but share
/// them.
///
/// When the rule node refcount drops to zero, it doesn't get freed. It gets
/// instead put into a free list, and it is potentially GC'd after a while in a
/// single-threaded fashion.
///
/// That way, a rule node that represents a likely-to-match-again rule (like a
/// :hover rule) can be reused if we haven't GC'd it yet.
///
/// See the discussion at https://github.com/servo/servo/pull/15562 and the IRC
/// logs at http://logs.glob.uno/?c=mozilla%23servo&s=3+Apr+2017&e=3+Apr+2017
/// logs from http://logs.glob.uno/?c=mozilla%23servo&s=3+Apr+2017&e=3+Apr+2017#c644094
/// to se a discussion about the different memory orderings used here.
#[derive(Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct RuleTree {
root: StrongRuleNode,
}
/// A style source for the rule node. It can either be a CSS style rule or a
/// declaration block.
///
/// Note that, even though the declaration block from inside the style rule
/// could be enough to implement the rule tree, keeping the whole rule provides
/// more debuggability, and also the ability of show those selectors to
/// devtools.
#[derive(Debug, Clone)]
pub enum StyleSource {
/// A style rule stable pointer.
Style(Arc<Locked<StyleRule>>),
/// A declaration block stable pointer.
Declarations(Arc<Locked<PropertyDeclarationBlock>>),
}
impl StyleSource {
#[inline]
fn ptr_equals(&self, other: &Self) -> bool {
use self::StyleSource::*;
match (self, other) {
(&Style(ref one), &Style(ref other)) => Arc::ptr_eq(one, other),
(&Declarations(ref one), &Declarations(ref other)) => Arc::ptr_eq(one, other),
_ => false,
}
}
fn dump<W: Write>(&self, guard: &SharedRwLockReadGuard, writer: &mut W) {
use self::StyleSource::*;
if let Style(ref rule) = *self {
let rule = rule.read_with(guard);
let _ = write!(writer, "{:?}", rule.selectors);
}
let _ = write!(writer, " -> {:?}", self.read(guard).declarations());
}
/// Read the style source guard, and obtain thus read access to the
/// underlying property declaration block.
#[inline]
pub fn read<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> &'a PropertyDeclarationBlock {
let block = match *self {
StyleSource::Style(ref rule) => &rule.read_with(guard).block,
StyleSource::Declarations(ref block) => block,
};
block.read_with(guard)
}
}
/// This value exists here so a node that pushes itself to the list can know
/// that is in the free list by looking at is next pointer, and comparing it
/// with null.
///
/// The root node doesn't have a null pointer in the free list, but this value.
const FREE_LIST_SENTINEL: *mut RuleNode = 0x01 as *mut RuleNode;
impl RuleTree {
/// Construct a new rule tree.
pub fn new() -> Self {
RuleTree {
root: StrongRuleNode::new(Box::new(RuleNode::root())),
}
}
/// Get the root rule node.
pub fn root(&self) -> StrongRuleNode {
self.root.clone()
}
fn dump<W: Write>(&self, guards: &StylesheetGuards, writer: &mut W) {
let _ = writeln!(writer, " + RuleTree");
self.root.get().dump(guards, writer, 0);
}
/// Dump the rule tree to stdout.
pub fn dump_stdout(&self, guards: &StylesheetGuards) {
let mut stdout = io::stdout();
self.dump(guards, &mut stdout);
}
/// Inserts the given rules, that must be in proper order by specifity, and
/// returns the corresponding rule node representing the last inserted one.
///
/// !important rules are detected and inserted into the appropriate position
/// in the rule tree. This allows selector matching to ignore importance,
/// while still maintaining the appropriate cascade order in the rule tree.
pub fn insert_ordered_rules_with_important<'a, I>(&self,
iter: I,
guards: &StylesheetGuards)
-> StrongRuleNode
where I: Iterator<Item=(StyleSource, CascadeLevel)>,
{
use self::CascadeLevel::*;
let mut current = self.root.clone();
let mut last_level = current.get().level;
let mut found_important = false;
let mut important_style_attr = None;
let mut important_author = SmallVec::<[StyleSource; 4]>::new();
let mut important_user = SmallVec::<[StyleSource; 4]>::new();
let mut important_ua = SmallVec::<[StyleSource; 4]>::new();
let mut transition = None;
for (source, level) in iter {
debug_assert!(last_level <= level, "Not really ordered");
debug_assert!(!level.is_important(), "Important levels handled internally");
let (any_normal, any_important) = {
let pdb = source.read(level.guard(guards));
(pdb.any_normal(), pdb.any_important())
};
if any_important {
found_important = true;
match level {
AuthorNormal => important_author.push(source.clone()),
UANormal => important_ua.push(source.clone()),
UserNormal => important_user.push(source.clone()),
StyleAttributeNormal => {
debug_assert!(important_style_attr.is_none());
important_style_attr = Some(source.clone());
},
_ => {},
};
}
if any_normal {
if matches!(level, Transitions) && found_important {
// There can be at most one transition, and it will come at
// the end of the iterator. Stash it and apply it after
// !important rules.
debug_assert!(transition.is_none());
transition = Some(source);
} else {
current = current.ensure_child(self.root.downgrade(), source, level);
}
}
last_level = level;
}
// Early-return in the common case of no !important declarations.
if !found_important {
return current;
}
//
// Insert important declarations, in order of increasing importance,
// followed by any transition rule.
//
for source in important_author.into_iter() {
current = current.ensure_child(self.root.downgrade(), source, AuthorImportant);
}
if let Some(source) = important_style_attr {
current = current.ensure_child(self.root.downgrade(), source, StyleAttributeImportant);
}
for source in important_user.into_iter() {
current = current.ensure_child(self.root.downgrade(), source, UserImportant);
}
for source in important_ua.into_iter() {
current = current.ensure_child(self.root.downgrade(), source, UAImportant);
}
if let Some(source) = transition {
current = current.ensure_child(self.root.downgrade(), source, Transitions);
}
current
}
/// Insert the given rules, that must be in proper order by specifity, and
/// return the corresponding rule node representing the last inserted one.
pub fn insert_ordered_rules<'a, I>(&self, iter: I) -> StrongRuleNode
where I: Iterator<Item=(StyleSource, CascadeLevel)>,
{
self.insert_ordered_rules_from(self.root.clone(), iter)
}
fn insert_ordered_rules_from<'a, I>(&self,
from: StrongRuleNode,
iter: I) -> StrongRuleNode
where I: Iterator<Item=(StyleSource, CascadeLevel)>,
{
let mut current = from;
let mut last_level = current.get().level;
for (source, level) in iter {
debug_assert!(last_level <= level, "Not really ordered");
current = current.ensure_child(self.root.downgrade(), source, level);
last_level = level;
}
current
}
/// This can only be called when no other threads is accessing this tree.
pub unsafe fn gc(&self) {
self.root.gc();
}
/// This can only be called when no other threads is accessing this tree.
pub unsafe fn maybe_gc(&self) {
self.root.maybe_gc();
}
/// Replaces a rule in a given level (if present) for another rule.
///
/// Returns the resulting node that represents the new path, or None if
/// the old path is still valid.
pub fn update_rule_at_level(&self,
level: CascadeLevel,
pdb: Option<&Arc<Locked<PropertyDeclarationBlock>>>,
path: &StrongRuleNode,
guards: &StylesheetGuards)
-> Option<StrongRuleNode> {
debug_assert!(level.is_unique_per_element());
// TODO(emilio): Being smarter with lifetimes we could avoid a bit of
// the refcount churn.
let mut current = path.clone();
// First walk up until the first less-or-equally specific rule.
let mut children = vec![];
while current.get().level > level {
children.push((current.get().source.clone().unwrap(), current.get().level));
current = current.parent().unwrap().clone();
}
// Then remove the one at the level we want to replace, if any.
//
// NOTE: Here we assume that only one rule can be at the level we're
// replacing.
//
// This is certainly true for HTML style attribute rules, animations and
// transitions, but could not be so for SMIL animations, which we'd need
// to special-case (isn't hard, it's just about removing the `if` and
// special cases, and replacing them for a `while` loop, avoiding the
// optimizations).
if current.get().level == level {
if let Some(pdb) = pdb {
// If the only rule at the level we're replacing is exactly the
// same as `pdb`, we're done, and `path` is still valid.
//
// TODO(emilio): Another potential optimization is the one where
// we can just replace the rule at that level for `pdb`, and
// then we don't need to re-create the children, and `path` is
// also equally valid. This is less likely, and would require an
// in-place mutation of the source, which is, at best, fiddly,
// so let's skip it for now.
let is_here_already = match current.get().source.as_ref() {
Some(&StyleSource::Declarations(ref already_here)) => {
Arc::ptr_eq(pdb, already_here)
},
_ => unreachable!("Replacing non-declarations style?"),
};
if is_here_already {
debug!("Picking the fast path in rule replacement");
return None;
}
}
current = current.parent().unwrap().clone();
}
debug_assert!(current.get().level != level,
"Multiple rules should've been replaced?");
// Insert the rule if it's relevant at this level in the cascade.
//
// These optimizations are likely to be important, because the levels
// where replacements apply (style and animations) tend to trigger
// pretty bad styling cases already.
if let Some(pdb) = pdb {
if level.is_important() {
if pdb.read_with(level.guard(guards)).any_important() {
current = current.ensure_child(self.root.downgrade(),
StyleSource::Declarations(pdb.clone()),
level);
}
} else {
if pdb.read_with(level.guard(guards)).any_normal() {
current = current.ensure_child(self.root.downgrade(),
StyleSource::Declarations(pdb.clone()),
level);
}
}
}
// Now the rule is in the relevant place, push the children as
// necessary.
Some(self.insert_ordered_rules_from(current, children.into_iter().rev()))
}
/// Returns new rule nodes without Transitions level rule.
pub fn remove_transition_rule_if_applicable(&self, path: &StrongRuleNode) -> StrongRuleNode {
// Return a clone if there is no transition level.
if path.cascade_level() != CascadeLevel::Transitions {
return path.clone();
}
path.parent().unwrap().clone()
}
/// Returns new rule node without rules from declarative animations.
pub fn remove_animation_rules(&self, path: &StrongRuleNode) -> StrongRuleNode {
// Return a clone if there are no animation rules.
if !path.has_animation_or_transition_rules() {
return path.clone();
}
let iter = path.self_and_ancestors().take_while(
|node| node.cascade_level() >= CascadeLevel::SMILOverride);
let mut last = path;
let mut children = vec![];
for node in iter {
if node.cascade_level().is_animation() {
children.push((node.get().source.clone().unwrap(), node.cascade_level()));
}
last = node;
}
self.insert_ordered_rules_from(last.parent().unwrap().clone(), children.into_iter().rev())
}
}
/// The number of RuleNodes added to the free list before we will consider
/// doing a GC when calling maybe_gc(). (The value is copied from Gecko,
/// where it likely did not result from a rigorous performance analysis.)
const RULE_TREE_GC_INTERVAL: usize = 300;
/// The cascade level these rules are relevant at, as per[1].
///
/// The order of variants declared here is significant, and must be in
/// _ascending_ order of precedence.
///
/// [1]: https://drafts.csswg.org/css-cascade/#cascade-origin
#[repr(u8)]
#[derive(Eq, PartialEq, Copy, Clone, Debug, PartialOrd)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum CascadeLevel {
/// Normal User-Agent rules.
UANormal = 0,
/// Presentational hints.
PresHints,
/// User normal rules.
UserNormal,
/// Author normal rules.
AuthorNormal,
/// Style attribute normal rules.
StyleAttributeNormal,
/// SVG SMIL animations.
SMILOverride,
/// CSS animations and script-generated animations.
Animations,
/// Author-supplied important rules.
AuthorImportant,
/// Style attribute important rules.
StyleAttributeImportant,
/// User important rules.
UserImportant,
/// User-agent important rules.
UAImportant,
/// Transitions
Transitions,
}
impl CascadeLevel {
/// Select a lock guard for this level
pub fn guard<'a>(&self, guards: &'a StylesheetGuards<'a>) -> &'a SharedRwLockReadGuard<'a> {
match *self {
CascadeLevel::UANormal |
CascadeLevel::UserNormal |
CascadeLevel::UserImportant |
CascadeLevel::UAImportant => guards.ua_or_user,
_ => guards.author,
}
}
/// Returns whether this cascade level is unique per element, in which case
/// we can replace the path in the cascade without fear.
pub fn is_unique_per_element(&self) -> bool {
match *self {
CascadeLevel::Transitions |
CascadeLevel::Animations |
CascadeLevel::SMILOverride |
CascadeLevel::StyleAttributeNormal |
CascadeLevel::StyleAttributeImportant => true,
_ => false,
}
}
/// Returns whether this cascade level represents important rules of some
/// sort.
#[inline]
pub fn is_important(&self) -> bool {
match *self {
CascadeLevel::AuthorImportant |
CascadeLevel::StyleAttributeImportant |
CascadeLevel::UserImportant |
CascadeLevel::UAImportant => true,
_ => false,
}
}
/// Returns the importance relevant for this rule. Pretty similar to
/// `is_important`.
#[inline]
pub fn importance(&self) -> Importance {
if self.is_important() {
Importance::Important
} else {
Importance::Normal
}
}
/// Returns whether this cascade level represents an animation rules.
#[inline]
pub fn is_animation(&self) -> bool {
match *self {
CascadeLevel::SMILOverride |
CascadeLevel::Animations |
CascadeLevel::Transitions => true,
_ => false,
}
}
}
/// A node in the rule tree.
pub struct RuleNode {
/// The root node. Only the root has no root pointer, for obvious reasons.
root: Option<WeakRuleNode>,
/// The parent rule node. Only the root has no parent.
parent: Option<StrongRuleNode>,
/// The actual style source, either coming from a selector in a StyleRule,
/// or a raw property declaration block (like the style attribute).
source: Option<StyleSource>,
/// The cascade level this rule is positioned at.
level: CascadeLevel,
refcount: AtomicUsize,
first_child: AtomicPtr<RuleNode>,
next_sibling: AtomicPtr<RuleNode>,
prev_sibling: AtomicPtr<RuleNode>,
/// The next item in the rule tree free list, that starts on the root node.
next_free: AtomicPtr<RuleNode>,
/// Number of RuleNodes we have added to the free list since the last GC.
/// (We don't update this if we rescue a RuleNode from the free list. It's
/// just used as a heuristic to decide when to run GC.)
///
/// Only used on the root RuleNode. (We could probably re-use one of the
/// sibling pointers to save space.)
free_count: AtomicUsize,
}
unsafe impl Sync for RuleTree {}
unsafe impl Send for RuleTree {}
impl RuleNode {
fn new(root: WeakRuleNode,
parent: StrongRuleNode,
source: StyleSource,
level: CascadeLevel) -> Self {
debug_assert!(root.upgrade().parent().is_none());
RuleNode {
root: Some(root),
parent: Some(parent),
source: Some(source),
level: level,
refcount: AtomicUsize::new(1),
first_child: AtomicPtr::new(ptr::null_mut()),
next_sibling: AtomicPtr::new(ptr::null_mut()),
prev_sibling: AtomicPtr::new(ptr::null_mut()),
next_free: AtomicPtr::new(ptr::null_mut()),
free_count: AtomicUsize::new(0),
}
}
fn root() -> Self {
RuleNode {
root: None,
parent: None,
source: None,
level: CascadeLevel::UANormal,
refcount: AtomicUsize::new(1),
first_child: AtomicPtr::new(ptr::null_mut()),
next_sibling: AtomicPtr::new(ptr::null_mut()),
prev_sibling: AtomicPtr::new(ptr::null_mut()),
next_free: AtomicPtr::new(FREE_LIST_SENTINEL),
free_count: AtomicUsize::new(0),
}
}
fn is_root(&self) -> bool {
self.parent.is_none()
}
/// Remove this rule node from the child list.
///
/// This method doesn't use proper synchronization, and it's expected to be
/// called in a single-threaded fashion, thus the unsafety.
///
/// This is expected to be called before freeing the node from the free
/// list.
unsafe fn remove_from_child_list(&self) {
debug!("Remove from child list: {:?}, parent: {:?}",
self as *const RuleNode, self.parent.as_ref().map(|p| p.ptr()));
// NB: The other siblings we use in this function can also be dead, so
// we can't use `get` here, since it asserts.
let prev_sibling =
self.prev_sibling.swap(ptr::null_mut(), Ordering::Relaxed);
let next_sibling =
self.next_sibling.swap(ptr::null_mut(), Ordering::Relaxed);
// Store the `next` pointer as appropriate, either in the previous
// sibling, or in the parent otherwise.
if prev_sibling == ptr::null_mut() {
let parent = self.parent.as_ref().unwrap();
parent.get().first_child.store(next_sibling, Ordering::Relaxed);
} else {
let previous = &*prev_sibling;
previous.next_sibling.store(next_sibling, Ordering::Relaxed);
}
// Store the previous sibling pointer in the next sibling if present,
// otherwise we're done.
if next_sibling != ptr::null_mut() {
let next = &*next_sibling;
next.prev_sibling.store(prev_sibling, Ordering::Relaxed);
}
}
fn dump<W: Write>(&self, guards: &StylesheetGuards, writer: &mut W, indent: usize) {
const INDENT_INCREMENT: usize = 4;
for _ in 0..indent {
let _ = write!(writer, " ");
}
let _ = writeln!(writer, " - {:?} (ref: {:?}, parent: {:?})",
self as *const _, self.refcount.load(Ordering::Relaxed),
self.parent.as_ref().map(|p| p.ptr()));
for _ in 0..indent {
let _ = write!(writer, " ");
}
match self.source {
Some(ref source) => {
source.dump(self.level.guard(guards), writer);
}
None => {
if indent != 0 {
warn!("How has this happened?");
}
let _ = write!(writer, "(root)");
}
}
let _ = write!(writer, "\n");
for child in self.iter_children() {
child.get().dump(guards, writer, indent + INDENT_INCREMENT);
}
}
fn iter_children(&self) -> RuleChildrenListIter {
// See next_sibling to see why we need Acquire semantics here.
let first_child = self.first_child.load(Ordering::Acquire);
RuleChildrenListIter {
current: if first_child.is_null() {
None
} else {
Some(WeakRuleNode { ptr: first_child })
}
}
}
}
#[derive(Clone)]
struct WeakRuleNode {
ptr: *mut RuleNode,
}
/// A strong reference to a rule node.
#[derive(Debug, PartialEq)]
pub struct StrongRuleNode {
ptr: *mut RuleNode,
}
#[cfg(feature = "servo")]
impl HeapSizeOf for StrongRuleNode {
fn heap_size_of_children(&self) -> usize { 0 }
}
impl StrongRuleNode {
fn new(n: Box<RuleNode>) -> Self {
debug_assert!(n.parent.is_none() == n.source.is_none());
let ptr = Box::into_raw(n);
debug!("Creating rule node: {:p}", ptr);
StrongRuleNode {
ptr: ptr,
}
}
fn downgrade(&self) -> WeakRuleNode {
WeakRuleNode {
ptr: self.ptr,
}
}
fn next_sibling(&self) -> Option<WeakRuleNode> {
// We use acquire semantics here to ensure proper synchronization while
// inserting in the child list.
let ptr = self.get().next_sibling.load(Ordering::Acquire);
if ptr.is_null() {
None
} else {
Some(WeakRuleNode {
ptr: ptr
})
}
}
fn parent(&self) -> Option<&StrongRuleNode> {
self.get().parent.as_ref()
}
fn ensure_child(&self,
root: WeakRuleNode,
source: StyleSource,
level: CascadeLevel) -> StrongRuleNode {
let mut last = None;
// TODO(emilio): We could avoid all the refcount churn here.
for child in self.get().iter_children() {
if child .get().level == level &&
child.get().source.as_ref().unwrap().ptr_equals(&source) {
return child;
}
last = Some(child);
}
let mut node = Box::new(RuleNode::new(root,
self.clone(),
source.clone(),
level));
let new_ptr: *mut RuleNode = &mut *node;
loop {
let strong;
{
let next_sibling_ptr = match last {
Some(ref l) => &l.get().next_sibling,
None => &self.get().first_child,
};
// We use `AqcRel` semantics to ensure the initializing writes
// in `node` are visible after the swap succeeds.
let existing =
next_sibling_ptr.compare_and_swap(ptr::null_mut(),
new_ptr,
Ordering::AcqRel);
if existing == ptr::null_mut() {
// Now we know we're in the correct position in the child
// list, we can set the back pointer, knowing that this will
// only be accessed again in a single-threaded manner when
// we're sweeping possibly dead nodes.
if let Some(ref l) = last {
node.prev_sibling.store(l.ptr(), Ordering::Relaxed);
}
return StrongRuleNode::new(node);
}
// Existing is not null: some thread insert a child node since we accessed `last`.
strong = WeakRuleNode { ptr: existing }.upgrade();
if strong.get().source.as_ref().unwrap().ptr_equals(&source) {
// That node happens to be for the same style source, use
// that, and let node fall out of scope.
return strong;
}
}
// Try again inserting after the new last child.
last = Some(strong);
}
}
/// Raw pointer to the RuleNode
pub fn ptr(&self) -> *mut RuleNode {
self.ptr
}
fn get(&self) -> &RuleNode {
if cfg!(debug_assertions) {
let node = unsafe { &*self.ptr };
assert!(node.refcount.load(Ordering::Relaxed) > 0);
}
unsafe { &*self.ptr }
}
/// Get the style source corresponding to this rule node. May return `None`
/// if it's the root node, which means that the node hasn't matched any
/// rules.
pub fn style_source(&self) -> Option<&StyleSource> {
self.get().source.as_ref()
}
/// The cascade level for this node
pub fn cascade_level(&self) -> CascadeLevel {
self.get().level
}
/// Get the importance that this rule node represents.
pub fn importance(&self) -> Importance {
self.get().level.importance()
}
/// Get an iterator for this rule node and its ancestors.
pub fn self_and_ancestors(&self) -> SelfAndAncestors {
SelfAndAncestors {
current: Some(self)
}
}
/// Returns whether this node has any child, only intended for testing
/// purposes, and called on a single-threaded fashion only.
pub unsafe fn has_children_for_testing(&self) -> bool {
!self.get().first_child.load(Ordering::Relaxed).is_null()
}
unsafe fn pop_from_free_list(&self) -> Option<WeakRuleNode> {
// NB: This can run from the root node destructor, so we can't use
// `get()`, since it asserts the refcount is bigger than zero.
let me = &*self.ptr;
debug_assert!(me.is_root());
// FIXME(#14213): Apparently the layout data can be gone from script.
//
// That's... suspicious, but it's fine if it happens for the rule tree
// case, so just don't crash in the case we're doing the final GC in
// script.
if !cfg!(feature = "testing") {
debug_assert!(!thread_state::get().is_worker() &&
(thread_state::get().is_layout() ||
thread_state::get().is_script()));
}
let current = me.next_free.load(Ordering::Relaxed);
if current == FREE_LIST_SENTINEL {
return None;
}
debug_assert!(!current.is_null(),
"Multiple threads are operating on the free list at the \
same time?");
debug_assert!(current != self.ptr,
"How did the root end up in the free list?");
let next = (*current).next_free.swap(ptr::null_mut(), Ordering::Relaxed);
debug_assert!(!next.is_null(),
"How did a null pointer end up in the free list?");
me.next_free.store(next, Ordering::Relaxed);
debug!("Popping from free list: cur: {:?}, next: {:?}", current, next);
Some(WeakRuleNode { ptr: current })
}
unsafe fn assert_free_list_has_no_duplicates_or_null(&self) {
assert!(cfg!(debug_assertions), "This is an expensive check!");
use std::collections::HashSet;
let me = &*self.ptr;
assert!(me.is_root());
let mut current = self.ptr;
let mut seen = HashSet::new();
while current != FREE_LIST_SENTINEL {
let next = (*current).next_free.load(Ordering::Relaxed);
assert!(!next.is_null());
assert!(!seen.contains(&next));
seen.insert(next);
current = next;
}
}
unsafe fn gc(&self) {
if cfg!(debug_assertions) {
self.assert_free_list_has_no_duplicates_or_null();
}
// NB: This can run from the root node destructor, so we can't use
// `get()`, since it asserts the refcount is bigger than zero.
let me = &*self.ptr;
debug_assert!(me.is_root(), "Can't call GC on a non-root node!");
while let Some(weak) = self.pop_from_free_list() {
let needs_drop = {
let node = &*weak.ptr();
if node.refcount.load(Ordering::Relaxed) == 0 {
node.remove_from_child_list();
true
} else {
false
}
};
debug!("GC'ing {:?}: {}", weak.ptr(), needs_drop);
if needs_drop {
let _ = Box::from_raw(weak.ptr());
}
}
me.free_count.store(0, Ordering::Relaxed);
debug_assert!(me.next_free.load(Ordering::Relaxed) == FREE_LIST_SENTINEL);
}
unsafe fn maybe_gc(&self) {
debug_assert!(self.get().is_root(), "Can't call GC on a non-root node!");
if self.get().free_count.load(Ordering::Relaxed) > RULE_TREE_GC_INTERVAL {
self.gc();
}
}
/// Implementation of `nsRuleNode::HasAuthorSpecifiedRules` for Servo rule nodes.
///
/// Returns true if any properties specified by `rule_type_mask` was set by an author rule.
#[cfg(feature = "gecko")]
pub fn has_author_specified_rules<E>(&self,
mut element: E,
guards: &StylesheetGuards,
rule_type_mask: u32,
author_colors_allowed: bool)
-> bool
where E: ::dom::TElement
{
use cssparser::RGBA;
use gecko_bindings::structs::{NS_AUTHOR_SPECIFIED_BACKGROUND, NS_AUTHOR_SPECIFIED_BORDER};
use gecko_bindings::structs::{NS_AUTHOR_SPECIFIED_PADDING, NS_AUTHOR_SPECIFIED_TEXT_SHADOW};
use properties::{CSSWideKeyword, LonghandId, LonghandIdSet};
use properties::{PropertyDeclaration, PropertyDeclarationId};
use std::borrow::Cow;
use values::specified::Color;
// Reset properties:
const BACKGROUND_PROPS: &'static [LonghandId] = &[
LonghandId::BackgroundColor,
LonghandId::BackgroundImage,
];
const BORDER_PROPS: &'static [LonghandId] = &[
LonghandId::BorderTopColor,
LonghandId::BorderTopStyle,
LonghandId::BorderTopWidth,
LonghandId::BorderRightColor,
LonghandId::BorderRightStyle,
LonghandId::BorderRightWidth,
LonghandId::BorderBottomColor,
LonghandId::BorderBottomStyle,
LonghandId::BorderBottomWidth,
LonghandId::BorderLeftColor,
LonghandId::BorderLeftStyle,
LonghandId::BorderLeftWidth,
LonghandId::BorderTopLeftRadius,
LonghandId::BorderTopRightRadius,
LonghandId::BorderBottomRightRadius,
LonghandId::BorderBottomLeftRadius,
];
const PADDING_PROPS: &'static [LonghandId] = &[
LonghandId::PaddingTop,
LonghandId::PaddingRight,
LonghandId::PaddingBottom,
LonghandId::PaddingLeft,
];
// Inherited properties:
const TEXT_SHADOW_PROPS: &'static [LonghandId] = &[
LonghandId::TextShadow,
];
fn inherited(id: LonghandId) -> bool {
id == LonghandId::TextShadow
}
// Set of properties that we are currently interested in.
let mut properties = LonghandIdSet::new();
if rule_type_mask & NS_AUTHOR_SPECIFIED_BACKGROUND != 0 {
for id in BACKGROUND_PROPS {
properties.insert(*id);
}
}
if rule_type_mask & NS_AUTHOR_SPECIFIED_BORDER != 0 {
for id in BORDER_PROPS {
properties.insert(*id);
}
}
if rule_type_mask & NS_AUTHOR_SPECIFIED_PADDING != 0 {
for id in PADDING_PROPS {
properties.insert(*id);
}
}
if rule_type_mask & NS_AUTHOR_SPECIFIED_TEXT_SHADOW != 0 {
for id in TEXT_SHADOW_PROPS {
properties.insert(*id);
}
}
// If author colors are not allowed, only claim to have author-specified rules if we're
// looking at a non-color property or if we're looking at the background color and it's
// set to transparent.
const IGNORED_WHEN_COLORS_DISABLED: &'static [LonghandId] = &[
LonghandId::BackgroundImage,
LonghandId::BorderTopColor,
LonghandId::BorderRightColor,
LonghandId::BorderBottomColor,
LonghandId::BorderLeftColor,
LonghandId::TextShadow,
];
if !author_colors_allowed {
for id in IGNORED_WHEN_COLORS_DISABLED {
properties.remove(*id);
}
}
let mut element_rule_node = Cow::Borrowed(self);
loop {
// We need to be careful not to count styles covered up by user-important or
// UA-important declarations. But we do want to catch explicit inherit styling in
// those and check our parent element to see whether we have user styling for
// those properties. Note that we don't care here about inheritance due to lack of
// a specified value, since all the properties we care about are reset properties.
//
// FIXME: The above comment is copied from Gecko, but the last sentence is no longer
// correct since 'text-shadow' support was added. This is a bug in Gecko, replicated
// in Stylo for now: https://bugzilla.mozilla.org/show_bug.cgi?id=1363088
let mut inherited_properties = LonghandIdSet::new();
let mut have_explicit_ua_inherit = false;
for node in element_rule_node.self_and_ancestors() {
let declarations = match node.style_source() {
Some(source) => source.read(node.cascade_level().guard(guards)).declarations(),
None => continue
};
// Iterate over declarations of the longhands we care about.
let node_importance = node.importance();
let longhands = declarations.iter().rev()
.filter_map(|&(ref declaration, importance)| {
if importance != node_importance { return None }
match declaration.id() {
PropertyDeclarationId::Longhand(id) => {
Some((id, declaration))
}
_ => None
}
});
match node.cascade_level() {
// Non-author rules:
CascadeLevel::UANormal |
CascadeLevel::UAImportant |
CascadeLevel::UserNormal |
CascadeLevel::UserImportant => {
for (id, declaration) in longhands {
if properties.contains(id) {
// This property was set by a non-author rule. Stop looking for it in
// this element's rule nodes.
properties.remove(id);
// However, if it is inherited, then it might be inherited from an
// author rule from an ancestor element's rule nodes.
if declaration.get_css_wide_keyword() == Some(CSSWideKeyword::Inherit) ||
(declaration.get_css_wide_keyword() == Some(CSSWideKeyword::Unset) &&
inherited(id))
{
have_explicit_ua_inherit = true;
inherited_properties.insert(id);
}
}
}
}
// Author rules:
CascadeLevel::PresHints |
CascadeLevel::AuthorNormal |
CascadeLevel::StyleAttributeNormal |
CascadeLevel::SMILOverride |
CascadeLevel::Animations |
CascadeLevel::AuthorImportant |
CascadeLevel::StyleAttributeImportant |
CascadeLevel::Transitions => {
for (id, declaration) in longhands {
if properties.contains(id) {
if !author_colors_allowed {
if let PropertyDeclaration::BackgroundColor(ref color) = *declaration {
return color.parsed == Color::RGBA(RGBA::transparent())
}
}
return true
}
}
}
}
}
if !have_explicit_ua_inherit { break }
// Continue to the parent element and search for the inherited properties.
element = match element.parent_element() {
Some(parent) => parent,
None => break
};
let parent_data = element.mutate_data().unwrap();
let parent_rule_node = parent_data.styles().primary.rules.clone();
element_rule_node = Cow::Owned(parent_rule_node);
properties = inherited_properties;
}
false
}
/// Returns true if there is either animation or transition level rule.
pub fn has_animation_or_transition_rules(&self) -> bool {
self.self_and_ancestors()
.take_while(|node| node.cascade_level() >= CascadeLevel::SMILOverride)
.any(|node| node.cascade_level().is_animation())
}
}
/// An iterator over a rule node and its ancestors.
#[derive(Clone)]
pub struct SelfAndAncestors<'a> {
current: Option<&'a StrongRuleNode>,
}
impl<'a> Iterator for SelfAndAncestors<'a> {
type Item = &'a StrongRuleNode;
fn next(&mut self) -> Option<Self::Item> {
self.current.map(|node| {
self.current = node.parent();
node
})
}
}
impl Clone for StrongRuleNode {
fn clone(&self) -> Self {
debug!("{:?}: {:?}+", self.ptr(), self.get().refcount.load(Ordering::Relaxed));
debug_assert!(self.get().refcount.load(Ordering::Relaxed) > 0);
self.get().refcount.fetch_add(1, Ordering::Relaxed);
StrongRuleNode {
ptr: self.ptr,
}
}
}
impl Drop for StrongRuleNode {
fn drop(&mut self) {
let node = unsafe { &*self.ptr };
debug!("{:?}: {:?}-", self.ptr(), node.refcount.load(Ordering::Relaxed));
debug!("Dropping node: {:?}, root: {:?}, parent: {:?}",
self.ptr,
node.root.as_ref().map(|r| r.ptr()),
node.parent.as_ref().map(|p| p.ptr()));
let should_drop = {
debug_assert!(node.refcount.load(Ordering::Relaxed) > 0);
node.refcount.fetch_sub(1, Ordering::Relaxed) == 1
};
if !should_drop {
return<|fim▁hole|> debug_assert_eq!(node.first_child.load(Ordering::Acquire),
ptr::null_mut());
if node.parent.is_none() {
debug!("Dropping root node!");
// NOTE: Calling this is fine, because the rule tree root
// destructor needs to happen from the layout thread, where the
// stylist, and hence, the rule tree, is held.
unsafe { self.gc() };
let _ = unsafe { Box::from_raw(self.ptr()) };
return;
}
let root = unsafe { &*node.root.as_ref().unwrap().ptr() };
let free_list = &root.next_free;
// We're sure we're already in the free list, don't spinloop if we're.
// Note that this is just a fast path, so it doesn't need to have an
// strong memory ordering.
if node.next_free.load(Ordering::Relaxed) != ptr::null_mut() {
return;
}
// Ensure we "lock" the free list head swapping it with a null pointer.
//
// Note that we use Acquire/Release semantics for the free list
// synchronization, in order to guarantee that the next_free
// reads/writes we do below are properly visible from multiple threads
// racing.
let mut old_head = free_list.load(Ordering::Relaxed);
loop {
match free_list.compare_exchange_weak(old_head,
ptr::null_mut(),
Ordering::Acquire,
Ordering::Relaxed) {
Ok(..) => {
if old_head != ptr::null_mut() {
break;
}
},
Err(new) => old_head = new,
}
}
// If other thread has raced with use while using the same rule node,
// just store the old head again, we're done.
//
// Note that we can use relaxed operations for loading since we're
// effectively locking the free list with Acquire/Release semantics, and
// the memory ordering is already guaranteed by that locking/unlocking.
if node.next_free.load(Ordering::Relaxed) != ptr::null_mut() {
free_list.store(old_head, Ordering::Release);
return;
}
// Else store the old head as the next pointer, and store ourselves as
// the new head of the free list.
//
// This can be relaxed since this pointer won't be read until GC.
node.next_free.store(old_head, Ordering::Relaxed);
// This can be release because of the locking of the free list, that
// ensures that all the other nodes racing with this one are using
// `Acquire`.
free_list.store(self.ptr(), Ordering::Release);
}
}
impl<'a> From<&'a StrongRuleNode> for WeakRuleNode {
fn from(node: &'a StrongRuleNode) -> Self {
WeakRuleNode {
ptr: node.ptr(),
}
}
}
impl WeakRuleNode {
fn upgrade(&self) -> StrongRuleNode {
debug!("Upgrading weak node: {:p}", self.ptr());
let node = unsafe { &*self.ptr };
node.refcount.fetch_add(1, Ordering::Relaxed);
StrongRuleNode {
ptr: self.ptr,
}
}
fn ptr(&self) -> *mut RuleNode {
self.ptr
}
}
struct RuleChildrenListIter {
current: Option<WeakRuleNode>,
}
impl Iterator for RuleChildrenListIter {
type Item = StrongRuleNode;
fn next(&mut self) -> Option<Self::Item> {
self.current.take().map(|current| {
let current = current.upgrade();
self.current = current.next_sibling();
current
})
}
}<|fim▁end|>
|
}
|
<|file_name|>syslogparser.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
This is a collection of classes that contain data for files from a
sosreport in the directory:
var/log/*
@author : Shane Bradley
@contact : [email protected]
@version : 2.16
@copyright : GPLv2
"""
class VarLogMessagesMsg:
def __init__(self, orginalMessage, timestamp, hostname, messageSender, pid, message):
self.__orginalMessage = orginalMessage
self.__timestamp = timestamp
self.__hostname = hostname
self.__messageSender = messageSender<|fim▁hole|> self.__pid = pid
self.__message = message
def __str__(self):
#return "%s | %s | %s | %s | %s" %(self.getTimestamp(), self.getHostname(), self.getMessageSender(), self.getPid(), self.getMessage())
return self.getOriginalMessage()
def getOriginalMessage(self):
return self.__orginalMessage
def getTimestamp(self):
return self.__timestamp
def getHostname(self):
return self.__hostname
def getMessageSender(self):
return self.__messageSender
def getPid(self):
return self.__pid
def getMessage(self):
return self.__message<|fim▁end|>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.