prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>XQueryContextItemDecl.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2013-2017 Grzegorz Ligas <[email protected]> and other contributors
* (see the CONTRIBUTORS file).<|fim▁hole|> *
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// This is a generated file. Not intended for manual editing.
package org.intellij.xquery.psi;
import java.util.List;
import org.jetbrains.annotations.*;
import com.intellij.psi.PsiElement;
public interface XQueryContextItemDecl extends XQueryPsiElement {
@Nullable
XQueryItemType getItemType();
@Nullable
XQuerySeparator getSeparator();
@Nullable
XQueryVarDefaultValue getVarDefaultValue();
@Nullable
XQueryVarValue getVarValue();
}<|fim▁end|> | *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at |
<|file_name|>qsystemlibrary.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
**
** Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
** All rights reserved.
** Contact: Nokia Corporation ([email protected])
**
** This file is part of the QtCore module of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial Usage
** Licensees holding valid Qt Commercial licenses may use this file in
** accordance with the Qt Commercial License Agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Nokia.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Nokia gives you certain additional
** rights. These rights are described in the Nokia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
** If you have questions regarding the use of this file, please contact
** Nokia at [email protected].
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qsystemlibrary_p.h"
#include <QtCore/qvarlengtharray.h>
#include <QtCore/qstringlist.h>
#include <QtCore/qfileinfo.h>
/*!
\internal
\class QSystemLibrary
The purpose of this class is to load only libraries that are located in
well-known and trusted locations on the filesystem. It does not suffer from
the security problem that QLibrary has, therefore it will never search in
the current directory.
The search order is the same as the order in DLL Safe search mode Windows,
except that we don't search:
* The current directory
* The 16-bit system directory. (normally c:\windows\system)
* The Windows directory. (normally c:\windows)
This means that the effective search order is:
1. Application path.
2. System libraries path.
3. Trying all paths inside the PATH environment variable.
Note, when onlySystemDirectory is true it will skip 1) and 3).
DLL Safe search mode is documented in the "Dynamic-Link Library Search
Order" document on MSDN.
Since library loading code is sometimes shared between Windows and WinCE,
this class can also be used on WinCE. However, its implementation just
calls the LoadLibrary() function. This is ok since it is documented as not
loading from the current directory on WinCE. This behaviour is documented
in the documentation for LoadLibrary for Windows CE at MSDN.
(http://msdn.microsoft.com/en-us/library/ms886736.aspx)
*/
QT_BEGIN_NAMESPACE
#if defined(Q_OS_WINCE)
HINSTANCE QSystemLibrary::load(const wchar_t *libraryName, bool onlySystemDirectory /* = true */)
{
return ::LoadLibrary(libraryName);
}
#else
#if !defined(QT_BOOTSTRAPPED)
extern QString qAppFileName();
#endif
static QString qSystemDirectory()
{
QVarLengthArray<wchar_t, MAX_PATH> fullPath;
UINT retLen = ::GetSystemDirectory(fullPath.data(), MAX_PATH);
if (retLen > MAX_PATH) {
fullPath.resize(retLen);
retLen = ::GetSystemDirectory(fullPath.data(), retLen);
}
// in some rare cases retLen might be 0
return QString::fromWCharArray(fullPath.constData(), int(retLen));
}
HINSTANCE QSystemLibrary::load(const wchar_t *libraryName, bool onlySystemDirectory /* = true */)
{
QStringList searchOrder;
#if !defined(QT_BOOTSTRAPPED)
if (!onlySystemDirectory)
searchOrder << QFileInfo(qAppFileName()).path();<|fim▁hole|>#endif
searchOrder << qSystemDirectory();
if (!onlySystemDirectory) {
const QString PATH(QLatin1String(qgetenv("PATH").constData()));
searchOrder << PATH.split(QLatin1Char(';'), QString::SkipEmptyParts);
}
QString fileName = QString::fromWCharArray(libraryName);
fileName.append(QLatin1String(".dll"));
// Start looking in the order specified
for (int i = 0; i < searchOrder.count(); ++i) {
QString fullPathAttempt = searchOrder.at(i);
if (!fullPathAttempt.endsWith(QLatin1Char('\\'))) {
fullPathAttempt.append(QLatin1Char('\\'));
}
fullPathAttempt.append(fileName);
HINSTANCE inst = ::LoadLibrary((const wchar_t *)fullPathAttempt.utf16());
if (inst != 0)
return inst;
}
return 0;
}
#endif //Q_OS_WINCE
QT_END_NAMESPACE<|fim▁end|> | |
<|file_name|>css_eb_tabmenu.ts<|end_file_name|><|fim▁begin|>import {CONST as C} from "../css_const";
import {ICSSProperties} from "../css_types";
const TabMenuBase: ICSSProperties = {
display: "flex",
flexDirection: "column",
fontFamily: "HelveticaNeue, Helvetica Neue, HelveticaNeueRoman, HelveticaNeue-Roman, Helvetica Neue Roman, TeXGyreHerosRegular, Helvetica, Tahoma, Geneva, Arial",
border: 0,
outline: "none",
userSelect: "none",
listStyle: "none",
overflow: "hidden",
fontSize: C.FontSize,
color: C.TextColor,
boxSizing: "border-box",
};
const tabsStyle: ICSSProperties = {
alignSelf: "flex-start",<|fim▁hole|>};
const tabBase: ICSSProperties = {
padding: "6px 5px 3px 5px",
margin: "auto 5px 0px 5px",
cursor: "pointer",
};
const selectedTab: ICSSProperties = {
borderBottom: `2px solid ${C.UnderlineColor}`,
};
const tabWithTitleStyle: ICSSProperties = {
display: "flex",
alignItems: "center",
justifyContent: "space-between",
paddingLeft: "10px",
background: C.Darken50,
marginBottom: "10px",
};
const notSelectedTab: ICSSProperties = {
};
export { TabMenuBase, selectedTab, notSelectedTab, tabsStyle, tabBase, tabWithTitleStyle };<|fim▁end|> | display: "flex",
flexDirection: "row",
alignItems: "baseline",
overflow: "hidden", |
<|file_name|>_sha512.py<|end_file_name|><|fim▁begin|>"""
This code was Ported from CPython's sha512module.c
"""
import _struct as struct
SHA_BLOCKSIZE = 128
SHA_DIGESTSIZE = 64
def new_shaobject():
return {
'digest': [0]*8,
'count_lo': 0,
'count_hi': 0,
'data': [0]* SHA_BLOCKSIZE,
'local': 0,
'digestsize': 0
}
ROR64 = lambda x, y: (((x & 0xffffffffffffffff) >> (y & 63)) | (x << (64 - (y & 63)))) & 0xffffffffffffffff
Ch = lambda x, y, z: (z ^ (x & (y ^ z)))
Maj = lambda x, y, z: (((x | y) & z) | (x & y))
S = lambda x, n: ROR64(x, n)
R = lambda x, n: (x & 0xffffffffffffffff) >> n
Sigma0 = lambda x: (S(x, 28) ^ S(x, 34) ^ S(x, 39))
Sigma1 = lambda x: (S(x, 14) ^ S(x, 18) ^ S(x, 41))
Gamma0 = lambda x: (S(x, 1) ^ S(x, 8) ^ R(x, 7))
Gamma1 = lambda x: (S(x, 19) ^ S(x, 61) ^ R(x, 6))
def sha_transform(sha_info):
W = []
d = sha_info['data']
for i in xrange(0,16):
W.append( (d[8*i]<<56) + (d[8*i+1]<<48) + (d[8*i+2]<<40) + (d[8*i+3]<<32) + (d[8*i+4]<<24) + (d[8*i+5]<<16) + (d[8*i+6]<<8) + d[8*i+7])
for i in xrange(16,80):
W.append( (Gamma1(W[i - 2]) + W[i - 7] + Gamma0(W[i - 15]) + W[i - 16]) & 0xffffffffffffffff )
ss = sha_info['digest'][:]
def RND(a,b,c,d,e,f,g,h,i,ki):
t0 = (h + Sigma1(e) + Ch(e, f, g) + ki + W[i]) & 0xffffffffffffffff
t1 = (Sigma0(a) + Maj(a, b, c)) & 0xffffffffffffffff
d = (d + t0) & 0xffffffffffffffff
h = (t0 + t1) & 0xffffffffffffffff
return d & 0xffffffffffffffff, h & 0xffffffffffffffff
ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],0,0x428a2f98d728ae22)
ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],1,0x7137449123ef65cd)
ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],2,0xb5c0fbcfec4d3b2f)
ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],3,0xe9b5dba58189dbbc)
ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],4,0x3956c25bf348b538)
ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],5,0x59f111f1b605d019)
ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],6,0x923f82a4af194f9b)
ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],7,0xab1c5ed5da6d8118)
ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],8,0xd807aa98a3030242)
ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],9,0x12835b0145706fbe)
ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],10,0x243185be4ee4b28c)
ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],11,0x550c7dc3d5ffb4e2)
ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],12,0x72be5d74f27b896f)
ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],13,0x80deb1fe3b1696b1)
ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],14,0x9bdc06a725c71235)
ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],15,0xc19bf174cf692694)
ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],16,0xe49b69c19ef14ad2)
ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],17,0xefbe4786384f25e3)
ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],18,0x0fc19dc68b8cd5b5)
ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],19,0x240ca1cc77ac9c65)
ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],20,0x2de92c6f592b0275)
ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],21,0x4a7484aa6ea6e483)
ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],22,0x5cb0a9dcbd41fbd4)
ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],23,0x76f988da831153b5)
ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],24,0x983e5152ee66dfab)
ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],25,0xa831c66d2db43210)
ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],26,0xb00327c898fb213f)
ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],27,0xbf597fc7beef0ee4)
ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],28,0xc6e00bf33da88fc2)
ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],29,0xd5a79147930aa725)
ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],30,0x06ca6351e003826f)
ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],31,0x142929670a0e6e70)
ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],32,0x27b70a8546d22ffc)
ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],33,0x2e1b21385c26c926)
ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],34,0x4d2c6dfc5ac42aed)
ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],35,0x53380d139d95b3df)
ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],36,0x650a73548baf63de)
ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],37,0x766a0abb3c77b2a8)
ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],38,0x81c2c92e47edaee6)
ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],39,0x92722c851482353b)
ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],40,0xa2bfe8a14cf10364)
ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],41,0xa81a664bbc423001)
ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],42,0xc24b8b70d0f89791)
ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],43,0xc76c51a30654be30)
ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],44,0xd192e819d6ef5218)
ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],45,0xd69906245565a910)
ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],46,0xf40e35855771202a)
ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],47,0x106aa07032bbd1b8)
ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],48,0x19a4c116b8d2d0c8)
ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],49,0x1e376c085141ab53)
ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],50,0x2748774cdf8eeb99)
ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],51,0x34b0bcb5e19b48a8)
ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],52,0x391c0cb3c5c95a63)
ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],53,0x4ed8aa4ae3418acb)
ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],54,0x5b9cca4f7763e373)
ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],55,0x682e6ff3d6b2b8a3)
ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],56,0x748f82ee5defb2fc)
ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],57,0x78a5636f43172f60)
ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],58,0x84c87814a1f0ab72)
ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],59,0x8cc702081a6439ec)
ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],60,0x90befffa23631e28)
ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],61,0xa4506cebde82bde9)
ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],62,0xbef9a3f7b2c67915)
ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],63,0xc67178f2e372532b)
ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],64,0xca273eceea26619c)
ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],65,0xd186b8c721c0c207)
ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],66,0xeada7dd6cde0eb1e)
ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],67,0xf57d4f7fee6ed178)
ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],68,0x06f067aa72176fba)
ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],69,0x0a637dc5a2c898a6)
ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],70,0x113f9804bef90dae)
ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],71,0x1b710b35131c471b)
ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],72,0x28db77f523047d84)
ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],73,0x32caab7b40c72493)
ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],74,0x3c9ebe0a15c9bebc)
ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],75,0x431d67c49c100d4c)
ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],76,0x4cc5d4becb3e42b6)
ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],77,0x597f299cfc657e2a)
ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],78,0x5fcb6fab3ad6faec)
ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],79,0x6c44198c4a475817)
dig = []
for i, x in enumerate(sha_info['digest']):
dig.append( (x + ss[i]) & 0xffffffffffffffff )
sha_info['digest'] = dig
def sha_init():
sha_info = new_shaobject()
sha_info['digest'] = [ 0x6a09e667f3bcc908, 0xbb67ae8584caa73b, 0x3c6ef372fe94f82b, 0xa54ff53a5f1d36f1, 0x510e527fade682d1, 0x9b05688c2b3e6c1f, 0x1f83d9abfb41bd6b, 0x5be0cd19137e2179]
sha_info['count_lo'] = 0
sha_info['count_hi'] = 0
sha_info['local'] = 0
sha_info['digestsize'] = 64
return sha_info
def sha384_init():
sha_info = new_shaobject()
sha_info['digest'] = [ 0xcbbb9d5dc1059ed8, 0x629a292a367cd507, 0x9159015a3070dd17, 0x152fecd8f70e5939, 0x67332667ffc00b31, 0x8eb44a8768581511, 0xdb0c2e0d64f98fa7, 0x47b5481dbefa4fa4]
sha_info['count_lo'] = 0
sha_info['count_hi'] = 0
sha_info['local'] = 0
sha_info['digestsize'] = 48
return sha_info
def getbuf(s):
if isinstance(s, str):
return s
elif isinstance(s, unicode):
return str(s)
else:
return buffer(s)
def sha_update(sha_info, buffer):
count = len(buffer)
buffer_idx = 0
clo = (sha_info['count_lo'] + (count << 3)) & 0xffffffff
if clo < sha_info['count_lo']:
sha_info['count_hi'] += 1
sha_info['count_lo'] = clo
sha_info['count_hi'] += (count >> 29)
if sha_info['local']:
i = SHA_BLOCKSIZE - sha_info['local']
if i > count:
i = count
# copy buffer
for x in enumerate(buffer[buffer_idx:buffer_idx+i]):
sha_info['data'][sha_info['local']+x[0]] = struct.unpack('B', x[1])[0]
count -= i
buffer_idx += i
sha_info['local'] += i
if sha_info['local'] == SHA_BLOCKSIZE:
sha_transform(sha_info)
sha_info['local'] = 0
else:
return
while count >= SHA_BLOCKSIZE:
# copy buffer
sha_info['data'] = [struct.unpack('B',c)[0] for c in buffer[buffer_idx:buffer_idx + SHA_BLOCKSIZE]]
count -= SHA_BLOCKSIZE
buffer_idx += SHA_BLOCKSIZE
sha_transform(sha_info)
# copy buffer
pos = sha_info['local']
sha_info['data'][pos:pos+count] = [struct.unpack('B',c)[0] for c in buffer[buffer_idx:buffer_idx + count]]
sha_info['local'] = count
def sha_final(sha_info):
lo_bit_count = sha_info['count_lo']
hi_bit_count = sha_info['count_hi']
count = (lo_bit_count >> 3) & 0x7f
sha_info['data'][count] = 0x80;
count += 1
if count > SHA_BLOCKSIZE - 16:
# zero the bytes in data after the count
sha_info['data'] = sha_info['data'][:count] + ([0] * (SHA_BLOCKSIZE - count))
sha_transform(sha_info)
# zero bytes in data
sha_info['data'] = [0] * SHA_BLOCKSIZE
else:
sha_info['data'] = sha_info['data'][:count] + ([0] * (SHA_BLOCKSIZE - count))
sha_info['data'][112] = 0;
sha_info['data'][113] = 0;
sha_info['data'][114] = 0;
sha_info['data'][115] = 0;
sha_info['data'][116] = 0;
sha_info['data'][117] = 0;
sha_info['data'][118] = 0;
sha_info['data'][119] = 0;
sha_info['data'][120] = (hi_bit_count >> 24) & 0xff
sha_info['data'][121] = (hi_bit_count >> 16) & 0xff
sha_info['data'][122] = (hi_bit_count >> 8) & 0xff
sha_info['data'][123] = (hi_bit_count >> 0) & 0xff
sha_info['data'][124] = (lo_bit_count >> 24) & 0xff
sha_info['data'][125] = (lo_bit_count >> 16) & 0xff
sha_info['data'][126] = (lo_bit_count >> 8) & 0xff
sha_info['data'][127] = (lo_bit_count >> 0) & 0xff
sha_transform(sha_info)
dig = []
for i in sha_info['digest']:
dig.extend([ ((i>>56) & 0xff), ((i>>48) & 0xff), ((i>>40) & 0xff), ((i>>32) & 0xff), ((i>>24) & 0xff), ((i>>16) & 0xff), ((i>>8) & 0xff), (i & 0xff) ])
return ''.join([chr(i) for i in dig])
class sha512(object):
digest_size = digestsize = SHA_DIGESTSIZE
block_size = SHA_BLOCKSIZE
def __init__(self, s=None):
self._sha = sha_init()
if s:
sha_update(self._sha, getbuf(s))
def update(self, s):
sha_update(self._sha, getbuf(s))
<|fim▁hole|> return sha_final(self._sha.copy())[:self._sha['digestsize']]
def hexdigest(self):
return ''.join([('0%x' % ord(i))[-2:] for i in self.digest()])
def copy(self):
new = sha512.__new__(sha512)
new._sha = self._sha.copy()
return new
class sha384(sha512):
digest_size = digestsize = 48
def __init__(self, s=None):
self._sha = sha384_init()
if s:
sha_update(self._sha, getbuf(s))
def copy(self):
new = sha384.__new__(sha384)
new._sha = self._sha.copy()
return new
def test():
import _sha512
a_str = "just a test string"
assert _sha512.sha512().hexdigest() == sha512().hexdigest()
assert _sha512.sha512(a_str).hexdigest() == sha512(a_str).hexdigest()
assert _sha512.sha512(a_str*7).hexdigest() == sha512(a_str*7).hexdigest()
s = sha512(a_str)
s.update(a_str)
assert _sha512.sha512(a_str+a_str).hexdigest() == s.hexdigest()
if __name__ == "__main__":
test()<|fim▁end|> | def digest(self): |
<|file_name|>issue-51191.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.<|fim▁hole|>
impl Struct {
fn bar(self: &mut Self) {
//~^ WARN function cannot return without recursing
(&mut self).bar();
//~^ ERROR cannot borrow `self` as mutable, as it is not declared as mutable [E0596]
}
fn imm(self) {
(&mut self).bar();
//~^ ERROR cannot borrow `self` as mutable, as it is not declared as mutable [E0596]
}
fn mtbl(mut self) {
(&mut self).bar();
}
fn immref(&self) {
(&mut self).bar();
//~^ ERROR cannot borrow `self` as mutable, as it is not declared as mutable [E0596]
//~^^ ERROR cannot borrow data in a `&` reference as mutable [E0596]
}
fn mtblref(&mut self) {
(&mut self).bar();
//~^ ERROR cannot borrow `self` as mutable, as it is not declared as mutable [E0596]
}
}
fn main () {}<|fim▁end|> |
#![feature(nll)]
struct Struct; |
<|file_name|>MainPage.tsx<|end_file_name|><|fim▁begin|>import * as React from "react";
import {
BackHandler,
DeviceEventEmitter,
Dimensions,
FlatList,
Platform,
SectionList,
StatusBar,
StyleSheet,
Text,
TouchableOpacity,
View,
Geolocation
} from "react-native";
import {Icon, SocialIcon, Button} from "react-native-elements";
import * as Swiper from "react-native-swiper";
const {width} = Dimensions.get("window");
import * as firebase from "firebase";
import {List, ListItem, SearchBar} from "react-native-elements";
import Swipeout from 'react-native-swipeout';
import _ from "lodash";
function getDistanceFromLatLonInKm(lat1, lon1, lat2, lon2) {
let R = 6371; // Radius of the earth in km
let dLat = deg2rad(lat2 - lat1); // deg2rad below
let dLon = deg2rad(lon2 - lon1);
let a =
Math.sin(dLat / 2) * Math.sin(dLat / 2) +
Math.cos(deg2rad(lat1)) * Math.cos(deg2rad(lat2)) *
Math.sin(dLon / 2) * Math.sin(dLon / 2)
;
let c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a));
let d = R * c; // Distance in km
// console.log("Just calculated a distance.");
return d;
}
function deg2rad(deg) {
return deg * (Math.PI / 180)
}
interface IProps {
navigation: any;
}
interface IState {
name: string,
lat: number,
lng: number,
}
interface ICrowd {
name: string;
key: string;
desc: string;
dis: string
}
// CRASHLYTICS STUFF
const Fabric = require("react-native-fabric");
const {Crashlytics} = Fabric;
Crashlytics.setUserName("erickson");
Crashlytics.setUserEmail("[email protected]");
Crashlytics.setUserIdentifier("1234");
// Crashlytics.setBool('has_posted', 'Tufts University');
const rootRef = firebase.database().ref();
const itemsRef = rootRef.child("users");
const crowdsRef = rootRef.child("crowds");
var dataSource = [
{data: [], header: "Your Crowds"},
{data: [], header: "Explore Crowds"},
];
class Main extends React.Component<IProps> {
public static navigationOptions = ({navigation}) => {
return {
gesturesEnabled: false,
headerLeft: null,
headerTintColor: "#FFFFFF",
headerRight: <Icon name="add" color="#FFFFFF" size={35}
onPress={() => {
if (navigation.state.params.addNewGroup !== undefined) {
navigation.state.params.addNewGroup();
}
}}/>,
headerStyle: {
backgroundColor: "#003EFF",
marginTop: (Platform.OS === 'ios') ? -20 : 0,
},
title: "Crowds",
};
};
constructor(props: any) {
super(props);
this.state = {};
navigator.geolocation.getCurrentPosition((position) => {
this.setState({
lat: position.coords.latitude,
lng: position.coords.longitude
});
this.getGroupInfo();
}, (error) => console.log(new Date(), error));
}
public componentDidMount() {
this.props.navigation.setParams({addNewGroup: this.addNewGroup.bind(this)});
this.checkIfExist();
}
public checkIfExist = () => {
// alert(this.props.navigation.state.params.UUID);
itemsRef.child(this.props.navigation.state.params.UUID).once("value", (snapshot) => {
if (snapshot.val() !== null) {
console.log(snapshot.val());
alert("Welcome Back");
this.setState({name: snapshot.val().fullName});
} else {
// TODO: Add logic for welcoming new user
alert("Welcome New User!!");
this.props.navigation.navigate("NewUser", {UUID: this.props.navigation.state.params.UUID, returnData: this.returnName.bind(this)});
}
});
};
public returnName = (name) => {
this.setState({name: name});
};
public addNewGroup = () => {
this.props.navigation.navigate("NewGroup", {_id: this.props.navigation.state.params.UUID});
};
// TODO: Add flatlist to display all the available groups
// TODO: Add distance back
public getGroupInfo = () => {
crowdsRef.on("child_added", (snapshot) => {
const returnObj = snapshot.val();
let members = returnObj.members;
let distance = getDistanceFromLatLonInKm(returnObj.lat, returnObj.lng, this.state.lat, this.state.lng);
for (let key in members) {
let id = members[key].userID;
if (id == this.props.navigation.state.params.UUID) {
const newCrowd: ICrowd = {name: returnObj.name, key: snapshot.key, desc: returnObj.desc, dis: distance.toFixed(2).toString() + " kms away"};
dataSource[0].data.push(newCrowd);
this.forceUpdate();
return;
}<|fim▁hole|> this.forceUpdate();
}
// console.log(returnObj);
},
);
};
public deleteGroup = (item) => {
crowdsRef.off();
crowdsRef.child(item.item.key).child("members").once('value', (snapshot) => {
let members = snapshot.val();
for (let key in members) {
let id = members[key].userID;
if (id == this.props.navigation.state.params.UUID) {
crowdsRef.child(item.item.key).child("members").child(key).remove(() => {
dataSource = [
{data: [], header: "Your Crowds"},
{data: [], header: "Explore Crowds"},
];
this.getGroupInfo();
});
break;
}
}
});
};
public renderItem = (item) => {
let swipeBtns = [{
text: 'Delete',
backgroundColor: 'red',
underlayColor: 'rgba(0, 0, 0, 1, 0.6)',
onPress: () => {
this.deleteGroup(item)
}
}];
if (item.section.header !== "Explore Crowds") {
return (
<Swipeout right={swipeBtns}>
<TouchableOpacity onPress={() => {
this.navigateToCrowd(item.item.key, item.item.name)
}
}>
<ListItem
roundAvatar
title={item.item.name}
subtitle={
<View style={styles.subtitleView}>
<Text style={styles.ratingText}>{item.item.dis}</Text>
</View>
}
underlayColor={"#FFFFFF"}
badge={{ value: "0 messages", textStyle: { color: 'orange' }, containerStyle: { marginTop: 10 } }}
containerStyle={{backgroundColor: '#FFFFFF'}} />
</TouchableOpacity>
</Swipeout>
);
} else {
return (
<TouchableOpacity onPress={() => {
if (item.section.header === "Explore Crowds") {
for (let i = 0; i < dataSource[1].data.length; i++) {
if (dataSource[1].data[i].key == item.item.key) {
dataSource[0].data.push(dataSource[1].data[i]);
dataSource[1].data.splice(i, 1);
this.forceUpdate();
let crowdRef = crowdsRef.child(item.item.key).child('members');
crowdRef.push({
userID: this.props.navigation.state.params.UUID
});
}
}
}
this.navigateToCrowd(item.item.key, item.item.name)
}
}>
<ListItem
roundAvatar
title={item.item.name}
subtitle={
<View style={styles.subtitleView}>
<Text style={styles.ratingText}>{item.item.desc}</Text>
</View>
}
underlayColor={"#FFFFFF"}
badge={{ value: item.item.dis, textStyle: { color: 'orange' }, containerStyle: { marginTop: 10 } }}
containerStyle={{backgroundColor: '#FFFFFF'}} />
</TouchableOpacity>)
}
};
public renderHeader = (item) => {
return <Text style={styles.header}>{item.section.header}</Text>;
};
public navigateToCrowd = (crowdKey, crowdName) => {
this.props.navigation.navigate("CrowdChat", {
key: crowdKey, crowdName,
UUID: this.props.navigation.state.params.UUID, fullName: this.state.name
});
};
public editInfo = () => {
this.props.navigation.navigate("EditInfo", {
UUID: this.props.navigation.state.params.UUID
});
};
public render() {
return (
<View style={styles.container}>
<View style={{marginBottom: 10, marginTop: -10}}>
<Button
small
backgroundColor="#33A6FF"
onPress={this.editInfo}
icon={{name: 'envira', type: 'font-awesome'}}
title='Edit My Information'/>
</View>
<SectionList
renderItem={this.renderItem}
renderSectionHeader={this.renderHeader}
sections={dataSource}
keyExtractor={(item) => item.name}
/>
</View>
);
}
}
const styles = StyleSheet.create({
container: {
backgroundColor: "#F3FCFF",
flex: 1,
justifyContent: "center",
paddingTop: 40,
},
group: {
alignSelf: "stretch",
backgroundColor: "#fd9d64",
height: 50,
marginBottom: 5,
},
header: {
fontFamily: "sans-serif-thin",
fontSize: 30,
},
wrapper: {},
text: {
color: "#000000",
fontSize: 30,
fontWeight: "bold",
},
textView: {
marginLeft: 40,
marginRight: 40,
},
subtitleView: {
flexDirection: 'row',
paddingLeft: 10,
paddingTop: 5
},
ratingImage: {
height: 19.21,
width: 100
},
ratingText: {
paddingLeft: 10,
color: 'grey'
}
});
export default Main;<|fim▁end|> | }
if (distance <= 1) {
const newCrowd: ICrowd = {name: returnObj.name, key: snapshot.key, desc: returnObj.desc, dis: distance.toFixed(2).toString() + " kms away"};
dataSource[1].data.push(newCrowd); |
<|file_name|>0007_auto__add_field_voterfile_voter_file_content__chg_field_voterfile_vote.py<|end_file_name|><|fim▁begin|># encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'VoterFile.voter_file_content'
db.add_column('helios_voterfile', 'voter_file_content', self.gf('django.db.models.fields.TextField')(null=True), keep_default=False)
# Changing field 'VoterFile.voter_file'
db.alter_column('helios_voterfile', 'voter_file', self.gf('django.db.models.fields.files.FileField')(max_length=250, null=True))
def backwards(self, orm):
# Deleting field 'VoterFile.voter_file_content'
db.delete_column('helios_voterfile', 'voter_file_content')
# User chose to not deal with backwards NULL issues for 'VoterFile.voter_file'
raise RuntimeError("Cannot reverse this migration. 'VoterFile.voter_file' and its values cannot be restored.")
models = {
'helios_auth.user': {
'Meta': {'unique_together': "(('user_type', 'user_id'),)", 'object_name': 'User'},
'admin_p': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'info': ('helios_auth.jsonfield.JSONField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'token': ('helios_auth.jsonfield.JSONField', [], {'null': 'True'}),
'user_id': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'user_type': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'helios.auditedballot': {
'Meta': {'object_name': 'AuditedBallot'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'election': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['helios.Election']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'raw_vote': ('django.db.models.fields.TextField', [], {}),
'vote_hash': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'helios.castvote': {
'Meta': {'object_name': 'CastVote'},
'cast_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invalidated_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'quarantined_p': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'released_from_quarantine_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'verified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'vote': ('helios.datatypes.djangofield.LDObjectField', [], {}),
'vote_hash': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'vote_tinyhash': ('django.db.models.fields.CharField', [], {'max_length': '50', 'unique': 'True', 'null': 'True'}),
'voter': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['helios.Voter']"})
},
'helios.election': {
'Meta': {'object_name': 'Election'},
'admin': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['helios_auth.User']"}),
'archived_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'cast_url': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'complaint_period_ends_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'datatype': ('django.db.models.fields.CharField', [], {'default': "'legacy/Election'", 'max_length': '250'}),
'description': ('django.db.models.fields.TextField', [], {}),
'election_type': ('django.db.models.fields.CharField', [], {'default': "'election'", 'max_length': '250'}),
'eligibility': ('helios.datatypes.djangofield.LDObjectField', [], {'null': 'True'}),
'encrypted_tally': ('helios.datatypes.djangofield.LDObjectField', [], {'null': 'True'}),
'featured_p': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'frozen_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'openreg': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'private_key': ('helios.datatypes.djangofield.LDObjectField', [], {'null': 'True'}),
'private_p': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'public_key': ('helios.datatypes.djangofield.LDObjectField', [], {'null': 'True'}),
'questions': ('helios.datatypes.djangofield.LDObjectField', [], {'null': 'True'}),
'registration_starts_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'result': ('helios.datatypes.djangofield.LDObjectField', [], {'null': 'True'}),
'result_proof': ('helios_auth.jsonfield.JSONField', [], {'null': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'tallies_combined_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'tallying_finished_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'tallying_started_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'tallying_starts_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'use_advanced_audit_features': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'use_voter_aliases': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'voters_hash': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'voting_ended_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'voting_ends_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'voting_extended_until': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'voting_started_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'voting_starts_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'})
},
'helios.electionlog': {
'Meta': {'object_name': 'ElectionLog'},<|fim▁hole|> },
'helios.trustee': {
'Meta': {'object_name': 'Trustee'},
'decryption_factors': ('helios.datatypes.djangofield.LDObjectField', [], {'null': 'True'}),
'decryption_proofs': ('helios.datatypes.djangofield.LDObjectField', [], {'null': 'True'}),
'election': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['helios.Election']"}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'pok': ('helios.datatypes.djangofield.LDObjectField', [], {'null': 'True'}),
'public_key': ('helios.datatypes.djangofield.LDObjectField', [], {'null': 'True'}),
'public_key_hash': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'secret_key': ('helios.datatypes.djangofield.LDObjectField', [], {'null': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'helios.voter': {
'Meta': {'unique_together': "(('election', 'voter_login_id'),)", 'object_name': 'Voter'},
'alias': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'cast_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'election': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['helios.Election']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['helios_auth.User']", 'null': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'vote': ('helios.datatypes.djangofield.LDObjectField', [], {'null': 'True'}),
'vote_hash': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'voter_email': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'voter_login_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'voter_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'voter_password': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
},
'helios.voterfile': {
'Meta': {'object_name': 'VoterFile'},
'election': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['helios.Election']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'num_voters': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'processing_finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'processing_started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'voter_file': ('django.db.models.fields.files.FileField', [], {'max_length': '250', 'null': 'True'}),
'voter_file_content': ('django.db.models.fields.TextField', [], {'null': 'True'})
}
}
complete_apps = ['helios']<|fim▁end|> | 'at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'election': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['helios.Election']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'log': ('django.db.models.fields.CharField', [], {'max_length': '500'}) |
<|file_name|>adding.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
__license__ = 'GPL v3'
__copyright__ = '2010, Kovid Goyal <[email protected]>'
__docformat__ = 'restructuredtext en'
import os
from PyQt5.Qt import Qt, QVBoxLayout, QFormLayout
from calibre.gui2.preferences import ConfigWidgetBase, test_widget, \
CommaSeparatedList, AbortCommit
from calibre.gui2.preferences.adding_ui import Ui_Form
from calibre.utils.config import prefs
from calibre.gui2.widgets import FilenamePattern
from calibre.gui2.auto_add import AUTO_ADDED
from calibre.gui2 import gprefs, choose_dir, error_dialog, question_dialog
class ConfigWidget(ConfigWidgetBase, Ui_Form):
def genesis(self, gui):
self.gui = gui
r = self.register
r('read_file_metadata', prefs)
r('swap_author_names', prefs)
r('add_formats_to_existing', prefs)
r('check_for_dupes_on_ctl', prefs)
r('preserve_date_on_ctl', gprefs)
r('manual_add_auto_convert', gprefs)
choices = [
(_('Ignore duplicate incoming formats'), 'ignore'),
(_('Overwrite existing duplicate formats'), 'overwrite'),
(_('Create new record for each duplicate format'), 'new record')]
r('automerge', gprefs, choices=choices)
r('new_book_tags', prefs, setting=CommaSeparatedList)
r('mark_new_books', prefs)
r('auto_add_path', gprefs, restart_required=True)
r('auto_add_everything', gprefs, restart_required=True)
r('auto_add_check_for_duplicates', gprefs)
r('auto_add_auto_convert', gprefs)
r('auto_convert_same_fmt', gprefs)
self.filename_pattern = FilenamePattern(self)
self.metadata_box.l = QVBoxLayout(self.metadata_box)
self.metadata_box.layout().insertWidget(0, self.filename_pattern)
self.filename_pattern.changed_signal.connect(self.changed_signal.emit)<|fim▁hole|> self.tag_map_rules = self.add_filter_rules = None
self.tag_map_rules_button.clicked.connect(self.change_tag_map_rules)
self.add_filter_rules_button.clicked.connect(self.change_add_filter_rules)
self.tabWidget.setCurrentIndex(0)
self.actions_tab.layout().setFieldGrowthPolicy(QFormLayout.AllNonFixedFieldsGrow)
def change_tag_map_rules(self):
from calibre.gui2.tag_mapper import RulesDialog
d = RulesDialog(self)
if gprefs.get('tag_map_on_add_rules'):
d.rules = gprefs['tag_map_on_add_rules']
if d.exec_() == d.Accepted:
self.tag_map_rules = d.rules
self.changed_signal.emit()
def change_add_filter_rules(self):
from calibre.gui2.add_filters import RulesDialog
d = RulesDialog(self)
if gprefs.get('add_filter_rules'):
d.rules = gprefs['add_filter_rules']
if d.exec_() == d.Accepted:
self.add_filter_rules = d.rules
self.changed_signal.emit()
def choose_aa_path(self):
path = choose_dir(self, 'auto add path choose',
_('Choose a folder'))
if path:
self.opt_auto_add_path.setText(path)
def initialize(self):
ConfigWidgetBase.initialize(self)
self.filename_pattern.blockSignals(True)
self.filename_pattern.initialize()
self.filename_pattern.blockSignals(False)
self.init_blocked_auto_formats()
self.opt_automerge.setEnabled(self.opt_add_formats_to_existing.isChecked())
self.tag_map_rules = self.add_filter_rules = None
# Blocked auto formats {{{
def blocked_auto_formats_changed(self, *args):
fmts = self.current_blocked_auto_formats
old = gprefs['blocked_auto_formats']
if set(fmts) != set(old):
self.changed_signal.emit()
def init_blocked_auto_formats(self, defaults=False):
if defaults:
fmts = gprefs.defaults['blocked_auto_formats']
else:
fmts = gprefs['blocked_auto_formats']
viewer = self.opt_blocked_auto_formats
viewer.blockSignals(True)
exts = set(AUTO_ADDED)
viewer.clear()
for ext in sorted(exts):
viewer.addItem(ext)
item = viewer.item(viewer.count()-1)
item.setFlags(Qt.ItemIsEnabled|Qt.ItemIsUserCheckable)
item.setCheckState(Qt.Checked if
ext in fmts else Qt.Unchecked)
viewer.blockSignals(False)
@property
def current_blocked_auto_formats(self):
fmts = []
viewer = self.opt_blocked_auto_formats
for i in range(viewer.count()):
if viewer.item(i).checkState() == Qt.Checked:
fmts.append(unicode(viewer.item(i).text()))
return fmts
# }}}
def restore_defaults(self):
ConfigWidgetBase.restore_defaults(self)
self.filename_pattern.initialize(defaults=True)
self.init_blocked_auto_formats(defaults=True)
self.tag_map_rules = []
self.add_filter_rules = []
def commit(self):
path = unicode(self.opt_auto_add_path.text()).strip()
if path != gprefs['auto_add_path']:
if path:
path = os.path.abspath(path)
self.opt_auto_add_path.setText(path)
if not os.path.isdir(path):
error_dialog(self, _('Invalid folder'),
_('You must specify an existing folder as your '
'auto-add folder. %s does not exist.')%path,
show=True)
raise AbortCommit('invalid auto-add folder')
if not os.access(path, os.R_OK|os.W_OK):
error_dialog(self, _('Invalid folder'),
_('You do not have read/write permissions for '
'the folder: %s')%path, show=True)
raise AbortCommit('invalid auto-add folder')
if os.path.basename(path)[0] in '._':
error_dialog(self, _('Invalid folder'),
_('Cannot use folders whose names start with a '
'period or underscore: %s')%os.path.basename(path), show=True)
raise AbortCommit('invalid auto-add folder')
if not question_dialog(self, _('Are you sure?'),
_('<b>WARNING:</b> Any files you place in %s will be '
'automatically deleted after being added to '
'calibre. Are you sure?')%path):
return
pattern = self.filename_pattern.commit()
prefs['filename_pattern'] = pattern
fmts = self.current_blocked_auto_formats
old = gprefs['blocked_auto_formats']
changed = set(fmts) != set(old)
if changed:
gprefs['blocked_auto_formats'] = self.current_blocked_auto_formats
if self.tag_map_rules is not None:
if self.tag_map_rules:
gprefs['tag_map_on_add_rules'] = self.tag_map_rules
else:
gprefs.pop('tag_map_on_add_rules', None)
if self.add_filter_rules is not None:
if self.add_filter_rules:
gprefs['add_filter_rules'] = self.add_filter_rules
else:
gprefs.pop('add_filter_rules', None)
ret = ConfigWidgetBase.commit(self)
return changed or ret
def refresh_gui(self, gui):
# Ensure worker process reads updated settings
gui.spare_pool().shutdown()
# Update rules used int he auto adder
gui.auto_adder.read_rules()
if __name__ == '__main__':
from calibre.gui2 import Application
app = Application([])
test_widget('Import/Export', 'Adding')<|fim▁end|> | self.auto_add_browse_button.clicked.connect(self.choose_aa_path)
for signal in ('Activated', 'Changed', 'DoubleClicked', 'Clicked'):
signal = getattr(self.opt_blocked_auto_formats, 'item'+signal)
signal.connect(self.blocked_auto_formats_changed) |
<|file_name|>fake.cc<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2009-2014, Treehouse Networks Ltd. New Zealand
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT<|fim▁hole|> * POSSIBILITY OF SUCH DAMAGE.
*/
/*
* Fake Basic Authentication program for Squid.
*
* This code gets the user details and returns OK.
* It is intended for testing use and as a base for further implementation.
*/
#include "squid.h"
#include "helpers/defines.h"
#include <cstring>
/**
* options:
* -d enable debugging.
* -h interface help.
*/
char *program_name = NULL;
static void
usage(void)
{
fprintf(stderr,
"Usage: %s [-d] [-v] [-h]\n"
" -d enable debugging.\n"
" -h this message\n\n",
program_name);
}
static void
process_options(int argc, char *argv[])
{
int opt;
opterr = 0;
while (-1 != (opt = getopt(argc, argv, "hd"))) {
switch (opt) {
case 'd':
debug_enabled = 1;
break;
case 'h':
usage();
exit(0);
default:
fprintf(stderr, "%s: FATAL: unknown option: -%c. Exiting\n", program_name, opt);
usage();
exit(1);
}
}
}
int
main(int argc, char *argv[])
{
char buf[HELPER_INPUT_BUFFER];
int buflen = 0;
setbuf(stdout, NULL);
setbuf(stderr, NULL);
program_name = argv[0];
process_options(argc, argv);
debug("%s build " __DATE__ ", " __TIME__ " starting up...\n", program_name);
while (fgets(buf, HELPER_INPUT_BUFFER, stdin) != NULL) {
char *p;
if ((p = strchr(buf, '\n')) != NULL) {
*p = '\0'; /* strip \n */
buflen = p - buf; /* length is known already */
} else
buflen = strlen(buf); /* keep this so we only scan the buffer for \0 once per loop */
debug("Got %d bytes '%s' from Squid\n", buflen, buf);
/* send 'OK' result back to Squid */
SEND_OK("");
}
debug("%s build " __DATE__ ", " __TIME__ " shutting down...\n", program_name);
exit(0);
}<|fim▁end|> | * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
<|file_name|>order.js<|end_file_name|><|fim▁begin|>"use strict";
var CustomError = require('custom-error-instance');
var inventory = require('./inventory');
var menu = require('./menu');
var OrderError = CustomError('OrderError');
module.exports = function() {
var done = false;
var factory = {};
var store = {};
/**
* Add an item from the menu to the order.
* @param {string} name The name of the menu item to add.
*/
factory.add = function(name) {
var item = menu.get(name);
if (done) throw new OrderError('Order has been closed.', { code: 'EDONE' });
if (!item) throw new OrderError('Menu item does not exist: ' + name, { code: 'EDNE' });
if (!menu.available(name)) throw new OrderError('Insufficient inventory', { code: 'EINV' });
if (!store.hasOwnProperty(name)) store[name] = 0;
store[name]++;
item.ingredients.forEach(function(ingredient) {
inventory.changeQuantity(ingredient.name, -1 * ingredient.quantity);
});
return factory;
};
factory.checkout = function() {
done = true;
console.log('Order complete. Income: $' + factory.cost());
};
factory.cost = function() {
var total = 0;
Object.keys(store).forEach(function(menuItemName) {
var item = menu.get(menuItemName);
if (item) {
total += item.cost;
} else {
factory.remove(menuItemName);
}
});
return total;
};
factory.remove = function(name) {
var item;
if (done) throw new OrderError('Order has been closed.', { code: 'EDONE' });<|fim▁hole|>
store[name]--;
if (store[name] <= 0) delete store[name];
item = menu.get(name);
item.ingredients.forEach(function(ingredient) {
inventory.changeQuantity(ingredient.name, ingredient.quantity);
});
return factory;
};
return factory;
};<|fim▁end|> | if (!store.hasOwnProperty(name)) return; |
<|file_name|>open_cursor.ts<|end_file_name|><|fim▁begin|>import {IStatement} from "./_statement";
import {verNot, seq, optPrio} from "../combi";
import {Select, SQLTarget, SQLHints} from "../expressions";
import {Version} from "../../../version";
import {IStatementRunnable} from "../statement_runnable";
export class OpenCursor implements IStatement {
<|fim▁hole|> const ret = seq("OPEN CURSOR",
optPrio("WITH HOLD"),
SQLTarget,
"FOR",
Select,
optPrio(SQLHints));
return verNot(Version.Cloud, ret);
}
}<|fim▁end|> | public getMatcher(): IStatementRunnable { |
<|file_name|>Feature.py<|end_file_name|><|fim▁begin|>import json
from feature_ramp import redis
class Feature(object):
"""
A class to control ramping features to a percentage of users
without needing to deploy to change the ramp.
Usage:
Feature("on_off_toggled").activate()
Feature("on_off_toggled").is_active
Feature("on_off_toggled").deactivate()
Feature("all_functionality").set_percentage(5)
Feature("all_functionality").add_to_whitelist(identifier)
Feature("all_functionality").is_visible(identifier)
Feature("all_functionality").remove_from_whitelist(identifier)
Feature("all_functionality").deactivate()
Feature("go_away").reset_settings()
Feature("go_away").delete()
"""
REDIS_NAMESPACE = 'feature'
REDIS_VERSION = 1
REDIS_SET_KEY = 'active_features'
def __init__(self, feature_name, feature_group_name=None, default_percentage=0):
self.feature_name = feature_name # set here so redis_key() works
self.feature_group_name = feature_group_name
key = self._get_redis_key()
redis_raw = redis.get(key)
redis_data = self._deserialize(redis_raw)
self.whitelist = redis_data.get('whitelist', [])
self.blacklist = redis_data.get('blacklist', [])
self.percentage = redis_data.get('percentage', default_percentage)
def is_visible(self, identifier):
""" Returns true if the feature is visible to the given identifier.
Whitelisted users are always on even if they are also blacklisted.
Blacklisted users are always off unless whitelisted.
For users neither white or blacklisted, it will respect ramp percentage.
"""
if self.is_whitelisted(identifier):
return True
if self.is_blacklisted(identifier):
return False
return self._is_ramped(identifier)
@property
def is_active(self):
""" Returns true if a single-toggle feature is on or off.
Similar to is_visible() but does not require an identifier.
"""
return self.percentage > 0
def is_whitelisted(self, identifier):
""" Given a identifier, returns true if the id is present in the whitelist. """
return identifier in self.whitelist
def is_blacklisted(self, identifier):
""" Given a identifier, returns true if the id is present in the blacklist. """
return identifier in self.blacklist
def _is_ramped(self, identifier):
"""
Checks whether ``identifier`` is ramped for this feature or not.
``identifier`` can be a user_id, email address, etc
Warning: This method ignores white- and blacklists. For
completeness, you probably want to use is_visible().
Users are ramped for features by this method in a deterministic
way, such that the same set of users will be ramped
consistently for the same feature across multiple requests.
However, different features will have different sets of users
ramped, so that the same set of users aren't always the ones
getting the first percent of experimental changes (e.g.,
user.id in {1, 101, 202, ...}). To achieve this, whether or not
this user is ramped is computed by hashing the feature name and
combining this hash with the user's integer id, using the
modulus operator to distribute the results evenly on a scale
of 0 to 100.
Returns True if the feature is ramped high enough that the
feature should be visible to the user with that id, and False
if not.
"""
consistent_offset = hash(self.feature_name) % 100 if not self.feature_group_name else hash(self.feature_group_name)
identifier = identifier if isinstance(identifier, basestring) else str(identifier)
ramp_ranking = (consistent_offset + hash(identifier)) % 100
return ramp_ranking < self.percentage
def activate(self):
""" Ramp feature to 100%. This is a convenience method useful for single-toggle features. """
self.set_percentage(100)
def deactivate(self):
""" Ramp feature to 0%. This is a convenience method useful for single-toggle features. """
self.set_percentage(0)
def reset_settings(self):
""" Clears all settings for the feature. The feature is deactivated and
the whitelist and blacklist are emptied.
"""
self.percentage = 0
self.whitelist = []
self.blacklist = []
self._save()
def delete(self):
""" Deletes the feature settings from Redis entirely. """
key = self._get_redis_key()
redis.delete(key)
redis.srem(Feature._get_redis_set_key(), key)
def set_percentage(self, percentage):
""" Ramps the feature to the given percentage.
If percentage is not a number between 0 and 100 inclusive, ValueError is raised.
Calls int() on percentage because we are using modulus to select the users
being shown the feature in _is_ramped(); floats will truncated.
"""
percentage = int(float(percentage))
if (percentage < 0 or percentage > 100):
raise ValueError("Percentage is not a valid integer")
self.percentage = percentage
self._save()
def add_to_whitelist(self, identifier):
""" Whitelist the given identifier to always see the feature regardless of ramp. """
self.whitelist.append(identifier)
self._save()
def remove_from_whitelist(self, identifier):
""" Remove the given identifier from the whitelist to respect ramp percentage. """
self.whitelist.remove(identifier)
self._save()
def add_to_blacklist(self, identifier):
""" Blacklist the given identifier to never see the feature regardless of ramp. """
self.blacklist.append(identifier)
self._save()
def remove_from_blacklist(self, identifier):
""" Remove the given identifier from the blacklist to respect ramp percentage. """
self.blacklist.remove(identifier)
self._save()
@classmethod
def all_features(cls, include_data=False):
"""
Returns a list of all active feature names.
With an optional flag, this method will instead return a dict with
ramping data for the feature included.
Example ramping data:
{ 'feature_name':
{ 'percentage': 50, 'whitelist': [3], 'blacklist': [4,5] }
}
"""
key = cls._get_redis_set_key()
features = [cls._get_feature_name_from_redis_key(rkey) for rkey in redis.smembers(key)]
if not include_data:
return features
# we intentionally do not use pipelining here, since that would lock Redis and
# this does not need to be atomic
features_with_data = dict()
for feature in features:
data = cls(feature)
features_with_data[feature] = {'percentage': data.percentage}
if data.whitelist:
features_with_data[feature]['whitelist'] = data.whitelist
if data.blacklist:
features_with_data[feature]['blacklist'] = data.blacklist
return features_with_data
def _save(self):
""" Saves the feature settings to Redis in a dictionary. """
key = self._get_redis_key()
value = json.dumps(self._get_redis_data())
redis.set(key, value)
# store feature key in a set so we know what's turned on without
# needing to search all Redis keys with a * which is slow.
set_key = Feature._get_redis_set_key()
redis.sadd(set_key, key)
def _get_redis_key(self):
""" Returns the key used in Redis to store a feature's information, with namespace. """
return '{0}.{1}.{2}'.format(Feature.REDIS_NAMESPACE,
Feature.REDIS_VERSION,
self.feature_name)
@classmethod
def _get_feature_name_from_redis_key(self, key):
""" Returns the feature name given the namespaced key used in Redis. """
return key.split('.')[-1]
<|fim▁hole|> """ Returns the key used in Redis to store a feature's information, with namespace. """
return '{0}.{1}'.format(Feature.REDIS_NAMESPACE,
Feature.REDIS_SET_KEY)
def _get_redis_data(self):
""" Returns the dictionary representation of this object for storage in Redis. """
return {
'whitelist': self.whitelist,
'blacklist': self.blacklist,
'percentage': self.percentage
}
def _deserialize(self, redis_obj):
""" Deserializes the serialized JSON representation of this object's dictionary
from Redis. If no object is provided, it returns an empty dictionary.
"""
if redis_obj is None:
return {}
return json.loads(redis_obj)
def __str__(self):
""" Pretty print the feature and some stats """
stats = self._get_redis_data()
return "Feature: {0}\nwhitelisted: {1}\nblacklisted: {2}\npercentage: {3}\n".format(self.feature_name, stats['whitelist'], stats['blacklist'], stats['percentage'])<|fim▁end|> | @classmethod
def _get_redis_set_key(cls): |
<|file_name|>config.rs<|end_file_name|><|fim▁begin|>use std::io::Read;
use std;
use serde_json;
#[derive(Serialize, Deserialize)]
pub struct ConfigData{
pub username:String,
pub password:String,
pub channels:Vec<String>,
pub admins:Vec<String>,
pub nyaa:Nyaa,
}
#[derive(Serialize, Deserialize,Clone)]
pub struct Nyaa{
pub delay:u64,
}
#[derive(Debug)]
pub enum ConfigErr{
Parse,
Open,
Read
}<|fim▁hole|> pub fn new(file: &str)->Result<ConfigData,ConfigErr>{
let s = try!(file_to_string(file));
serde_json::from_str(&s).map_err(|_|ConfigErr::Parse)
}
}
fn file_to_string(file: &str)->Result<String,ConfigErr>{
let mut f = try!(std::fs::File::open(file).map_err(|_|ConfigErr::Open));
let mut s = String::new();
match f.read_to_string(&mut s){
Ok(_)=>Ok(s),
Err(_)=>Err(ConfigErr::Read),
}
}
#[cfg(test)]
mod test {
#[test]
fn new_config_data(){
let mut cd = ConfigData::new("tests/config_test.json").unwrap();
assert_eq!("name",cd.username());
assert_eq!("oauth:1234",cd.password());
assert_eq!("___4Header",cd.channels()[0]);
assert_eq!("PagChomp",cd.channels()[1]);
assert_eq!("Keepo",cd.channels()[2]);
assert_eq!(3,cd.channels().len());
assert_eq!("443297327",cd.admins()[0]);
assert_eq!("443417327",cd.admins()[1]);
assert_eq!(2,cd.admins().len());
assert_eq!(100,cd.nyaa().delay().to_owned());
}
}<|fim▁end|> |
impl ConfigData{ |
<|file_name|>ShortUrlsList.test.tsx<|end_file_name|><|fim▁begin|>import { shallow, ShallowWrapper } from 'enzyme';
import { ReactElement } from 'react';
import { Mock } from 'ts-mockery';
import { useNavigate } from 'react-router-dom';
import shortUrlsListCreator from '../../src/short-urls/ShortUrlsList';
import { ShortUrlsOrderableFields, ShortUrl, ShortUrlsOrder } from '../../src/short-urls/data';
import { MercureBoundProps } from '../../src/mercure/helpers/boundToMercureHub';
import { ShortUrlsList as ShortUrlsListModel } from '../../src/short-urls/reducers/shortUrlsList';
import { OrderingDropdown } from '../../src/utils/OrderingDropdown';
import Paginator from '../../src/short-urls/Paginator';
import { ReachableServer } from '../../src/servers/data';
import { Settings } from '../../src/settings/reducers/settings';
jest.mock('react-router-dom', () => ({
...jest.requireActual('react-router-dom'),
useNavigate: jest.fn().mockReturnValue(jest.fn()),
useParams: jest.fn().mockReturnValue({}),
useLocation: jest.fn().mockReturnValue({ search: '?tags=test%20tag&search=example.com' }),
}));
describe('<ShortUrlsList />', () => {
let wrapper: ShallowWrapper;
const ShortUrlsTable = () => null;
const ShortUrlsFilteringBar = () => null;
const listShortUrlsMock = jest.fn();
const navigate = jest.fn();
const shortUrlsList = Mock.of<ShortUrlsListModel>({
shortUrls: {
data: [
Mock.of<ShortUrl>({
shortCode: 'testShortCode',
shortUrl: 'https://www.example.com/testShortUrl',<|fim▁hole|> longUrl: 'https://www.example.com/testLongUrl',
tags: [ 'test tag' ],
}),
],
},
});
const ShortUrlsList = shortUrlsListCreator(ShortUrlsTable, ShortUrlsFilteringBar);
const createWrapper = (defaultOrdering: ShortUrlsOrder = {}) => shallow(
<ShortUrlsList
{...Mock.of<MercureBoundProps>({ mercureInfo: { loading: true } })}
listShortUrls={listShortUrlsMock}
shortUrlsList={shortUrlsList}
selectedServer={Mock.of<ReachableServer>({ id: '1' })}
settings={Mock.of<Settings>({ shortUrlsList: { defaultOrdering } })}
/>,
).dive(); // Dive is needed as this component is wrapped in a HOC
beforeEach(() => {
(useNavigate as any).mockReturnValue(navigate);
wrapper = createWrapper();
});
afterEach(jest.clearAllMocks);
afterEach(() => wrapper?.unmount());
it('wraps expected components', () => {
expect(wrapper.find(ShortUrlsTable)).toHaveLength(1);
expect(wrapper.find(OrderingDropdown)).toHaveLength(1);
expect(wrapper.find(Paginator)).toHaveLength(1);
expect(wrapper.find(ShortUrlsFilteringBar)).toHaveLength(1);
});
it('passes current query to paginator', () => {
expect(wrapper.find(Paginator).prop('currentQueryString')).toEqual('?tags=test%20tag&search=example.com');
});
it('gets list refreshed every time a tag is clicked', () => {
wrapper.find(ShortUrlsTable).simulate('tagClick', 'foo');
wrapper.find(ShortUrlsTable).simulate('tagClick', 'bar');
wrapper.find(ShortUrlsTable).simulate('tagClick', 'baz');
expect(navigate).toHaveBeenCalledTimes(3);
expect(navigate).toHaveBeenNthCalledWith(1, expect.stringContaining(`tags=${encodeURIComponent('test tag,foo')}`));
expect(navigate).toHaveBeenNthCalledWith(2, expect.stringContaining(`tags=${encodeURIComponent('test tag,bar')}`));
expect(navigate).toHaveBeenNthCalledWith(3, expect.stringContaining(`tags=${encodeURIComponent('test tag,baz')}`));
});
it('invokes order icon rendering', () => {
const renderIcon = (field: ShortUrlsOrderableFields) =>
(wrapper.find(ShortUrlsTable).prop('renderOrderIcon') as (field: ShortUrlsOrderableFields) => ReactElement)(field);
expect(renderIcon('visits').props.currentOrder).toEqual({});
wrapper.find(OrderingDropdown).simulate('change', 'visits');
expect(renderIcon('visits').props.currentOrder).toEqual({ field: 'visits' });
wrapper.find(OrderingDropdown).simulate('change', 'visits', 'ASC');
expect(renderIcon('visits').props.currentOrder).toEqual({ field: 'visits', dir: 'ASC' });
});
it('handles order through table', () => {
const orderByColumn: (field: ShortUrlsOrderableFields) => Function = wrapper.find(ShortUrlsTable).prop('orderByColumn');
expect(wrapper.find(OrderingDropdown).prop('order')).toEqual({});
orderByColumn('visits')();
expect(wrapper.find(OrderingDropdown).prop('order')).toEqual({ field: 'visits', dir: 'ASC' });
orderByColumn('title')();
expect(wrapper.find(OrderingDropdown).prop('order')).toEqual({ field: 'title', dir: 'ASC' });
orderByColumn('shortCode')();
expect(wrapper.find(OrderingDropdown).prop('order')).toEqual({ field: 'shortCode', dir: 'ASC' });
});
it('handles order through dropdown', () => {
expect(wrapper.find(OrderingDropdown).prop('order')).toEqual({});
wrapper.find(OrderingDropdown).simulate('change', 'visits', 'ASC');
expect(wrapper.find(OrderingDropdown).prop('order')).toEqual({ field: 'visits', dir: 'ASC' });
wrapper.find(OrderingDropdown).simulate('change', 'shortCode', 'DESC');
expect(wrapper.find(OrderingDropdown).prop('order')).toEqual({ field: 'shortCode', dir: 'DESC' });
wrapper.find(OrderingDropdown).simulate('change', undefined, undefined);
expect(wrapper.find(OrderingDropdown).prop('order')).toEqual({});
});
it.each([
[ Mock.of<ShortUrlsOrder>({ field: 'visits', dir: 'ASC' }), 'visits', 'ASC' ],
[ Mock.of<ShortUrlsOrder>({ field: 'title', dir: 'DESC' }), 'title', 'DESC' ],
[ Mock.of<ShortUrlsOrder>(), undefined, undefined ],
])('has expected initial ordering', (initialOrderBy, field, dir) => {
const wrapper = createWrapper(initialOrderBy);
expect(wrapper.find(OrderingDropdown).prop('order')).toEqual({ field, dir });
});
});<|fim▁end|> | |
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from django import forms
from .models import Question, Answer, Categories, Customuser
from django.contrib import auth
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
class add_Question_Form(forms.ModelForm): # just a regular form
question_text = forms.CharField(label=_("question_text"),
widget=forms.Textarea({'cols': '40', 'rows': '5'}))
class Meta:
model = Question
fields = ['question_text', 'upload', <|fim▁hole|>
def clean_text(self):
if question_text == "":
raise forms.ValidationError(
"Need a question",)
else:
return True
def save(self,commit=True):
question = super(add_Question_Form, self).save(commit=False)
question.question_text = self.cleaned_data["question_text"]
if commit:
question.save()
return question
class add_Answer_Form(forms.ModelForm):
class Meta:
model = Answer
fields = ['answer_text']
def clean_text(self):
return self.cleaned_data.get('answer_text')
class UserCreationForm(forms.ModelForm):
"""
A form that creates a user, with no privileges, from the given username and
password.
"""
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
password1 = forms.CharField(label=_("Password"),
widget=forms.PasswordInput)
password2 = forms.CharField(label=_("Password confirmation"),
widget=forms.PasswordInput,
help_text=_("Enter the same password as before, for verification."))
# User's username field and our own 2 fields pass1 and pass2 are used. Later
# we shall set the User's password by user.set_password.
class Meta:
model = Customuser
fields = ("username","email","first_name","department")
def clean_password2(self):
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
self.instance.username = self.cleaned_data.get('username')
# To remove invalid passwords like short words, number only cases
auth.password_validation.validate_password(self.cleaned_data.get('password2'), self.instance)
return password2
def save(self, commit=True):
user = super(UserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password2"])
if commit:
user.save()
return user
class AuthenticationForm(forms.Form):
"""
Base class for authenticating users. Extend this to get a form that accepts
username/password logins.
"""
username = forms.CharField( max_length=254,
widget=forms.TextInput( attrs={'autofocus': ''}),
)
password = forms.CharField(label=_("Password"), widget=forms.PasswordInput)
error_messages = {
'invalid_login': _("Please enter a correct username and password. "
"Note that both fields may be case-sensitive."),
'inactive': _("This account is inactive."),
}
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
if username and password:
self.user_cache = auth.authenticate(username=username,
password=password)
if self.user_cache is None:
raise forms.ValidationError(
self.error_messages['invalid_login'],
code='invalid_login',
)
else:
return self.cleaned_data
class UserForm(forms.ModelForm):
class Meta:
model = Customuser
fields = ('categories',)
class CustomuserAdminForm(forms.ModelForm):
class Meta:
model = Customuser
fields = ("username","email","first_name","last_name",
'department','groups','is_active','is_staff','is_superuser')
# fields = ['username','password','verify,'first_name','last_name','email','batch',]
################### Django classes ##########################<|fim▁end|> | 'category1','category2',
'category3','category4'] |
<|file_name|>reeval.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance<|fim▁hole|># http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import argparse
import cPickle
import os
import mxnet as mx
from ..logger import logger
from ..config import config, default, generate_config
from ..dataset import *
def reeval(args):
# load imdb
imdb = eval(args.dataset)(args.image_set, args.root_path, args.dataset_path)
# load detection results
cache_file = os.path.join(imdb.cache_path, imdb.name, 'detections.pkl')
with open(cache_file) as f:
detections = cPickle.load(f)
# eval
imdb.evaluate_detections(detections)
def parse_args():
parser = argparse.ArgumentParser(description='imdb test')
# general
parser.add_argument('--network', help='network name', default=default.network, type=str)
parser.add_argument('--dataset', help='dataset name', default=default.dataset, type=str)
args, rest = parser.parse_known_args()
generate_config(args.network, args.dataset)
parser.add_argument('--image_set', help='image_set name', default=default.image_set, type=str)
parser.add_argument('--root_path', help='output data folder', default=default.root_path, type=str)
parser.add_argument('--dataset_path', help='dataset path', default=default.dataset_path, type=str)
# other
parser.add_argument('--no_shuffle', help='disable random shuffle', action='store_true')
args = parser.parse_args()
return args
def main():
args = parse_args()
logger.info('Called with argument: %s' % args)
reeval(args)
if __name__ == '__main__':
main()<|fim▁end|> | # with the License. You may obtain a copy of the License at
# |
<|file_name|>guess_my_number.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# vim: tw=76
import kxg
import random
import pyglet<|fim▁hole|> """
Keep track of the secret number, the range of numbers that haven't been
eliminated yet, and the winner (if there is one).
"""
def __init__(self):
super().__init__()
self.number = 0
self.lower_bound = 0
self.upper_bound = 0
self.winner = 0
class Referee(kxg.Referee):
"""
Pick the secret number.
"""
def on_start_game(self, num_players):
number = random.randint(LOWER_BOUND + 1, UPPER_BOUND - 1)
self >> PickNumber(number, LOWER_BOUND, UPPER_BOUND)
class PickNumber(kxg.Message):
"""
Pick the secret number and communicate that choice to all the clients.
"""
def __init__(self, number, lower_bound, upper_bound):
self.number = number
self.lower_bound = lower_bound
self.upper_bound = upper_bound
def on_check(self, world):
if world.number:
raise kxg.MessageCheck("number already picked")
def on_execute(self, world):
world.number = self.number
world.lower_bound = self.lower_bound
world.upper_bound = self.upper_bound
class GuessNumber(kxg.Message):
"""
Make a guess on behalf of the given player. If the guess is
right, that player wins the game. If the guess is wrong, the
range of numbers that the secret number could be is narrowed
accordingly.
"""
def __init__(self, player, guess):
self.player = player
self.guess = guess
def on_check(self, world):
pass
def on_execute(self, world):
if self.guess == world.number:
world.winner = self.player
world.end_game()
elif self.guess < world.number:
world.lower_bound = max(self.guess, world.lower_bound)
elif self.guess > world.number:
world.upper_bound = min(self.guess, world.upper_bound)
class Gui:
"""
Manage GUI objects like the window, which exist before and after the game
itself.
"""
def __init__(self):
self.width, self.height = 600, 400
self.window = pyglet.window.Window()
self.window.set_size(self.width, self.height)
self.window.set_visible(True)
self.label = pyglet.text.Label(
"",
color=(255, 255, 255, 255),
font_name='Deja Vu Sans', font_size=32,
x=self.width//2, y=self.height//2,
anchor_x='center', anchor_y='center',
)
def on_refresh_gui(self):
self.window.clear()
self.label.draw()
class GuiActor(kxg.Actor):
"""
Show the players the range of numbers that haven't been eliminated yet,
and allow the player to guess what the number is.
"""
def __init__(self):
super().__init__()
self.guess = None
self.prompt = "{0.lower_bound} < {1} < {0.upper_bound}"
def on_setup_gui(self, gui):
self.gui = gui
self.gui.window.set_handlers(self)
def on_draw(self):
self.gui.on_refresh_gui()
def on_mouse_scroll(self, x, y, dx, dy):
# If the user scrolls the mouse wheel, update the guess accordingly.
if self.guess is None:
if dy < 0:
self.guess = self.world.upper_bound
else:
self.guess = self.world.lower_bound
self.guess = sorted([
self.world.lower_bound,
self.guess + dy,
self.world.upper_bound,
])[1]
self.on_update_prompt()
def on_key_press(self, symbol, modifiers):
# If the user types a number, add that digit to the guess.
try:
digit = int(chr(symbol))
self.guess = 10 * (self.guess or 0) + digit
except ValueError:
pass
# If the user hits backspace, remove the last digit from the guess.
if symbol == pyglet.window.key.BACKSPACE:
if self.guess is not None:
guess_str = str(self.guess)[:-1]
self.guess = int(guess_str) if guess_str else None
# If the user hits enter, guess the current number.
if symbol == pyglet.window.key.ENTER:
if self.guess:
self >> GuessNumber(self.id, self.guess)
self.guess = None
self.on_update_prompt()
@kxg.subscribe_to_message(PickNumber)
@kxg.subscribe_to_message(GuessNumber)
def on_update_prompt(self, message=None):
guess_str = '???' if self.guess is None else str(self.guess)
self.gui.label.text = self.prompt.format(self.world, guess_str)
def on_finish_game(self):
self.gui.window.pop_handlers()
if self.world.winner == self.id:
self.gui.label.text = "You won!"
else:
self.gui.label.text = "You lost!"
class AiActor(kxg.Actor):
"""
Wait a random amount of time, then guess a random number within the
remaining range.
"""
def __init__(self):
super().__init__()
self.reset_timer()
def on_update_game(self, dt):
self.timer -= dt
if self.timer < 0:
lower_bound = self.world.lower_bound + 1
upper_bound = self.world.upper_bound - 1
guess = random.randint(lower_bound, upper_bound)
self >> GuessNumber(self.id, guess)
self.reset_timer()
def reset_timer(self):
self.timer = random.uniform(1, 3)
if __name__ == '__main__':
kxg.quickstart.main(World, Referee, Gui, GuiActor, AiActor)<|fim▁end|> |
LOWER_BOUND, UPPER_BOUND = 0, 5000
class World(kxg.World): |
<|file_name|>support.rs<|end_file_name|><|fim▁begin|>extern crate hwloc;
use hwloc::Topology;
/// Example on how to check for specific topology support of a feature.
fn main() {
let topo = Topology::new();
// Check if Process Binding for CPUs is supported
println!("CPU Binding (current process) supported: {}", topo.support().cpu().set_current_process());<|fim▁hole|> println!("CPU Binding (any thread) supported: {}", topo.support().cpu().set_thread());
// Check if Memory Binding is supported
println!("Memory Binding supported: {}", topo.support().memory().set_current_process());
// Debug Print all the Support Flags
println!("All Flags:\n{:?}", topo.support());
}<|fim▁end|> | println!("CPU Binding (any process) supported: {}", topo.support().cpu().set_process());
// Check if Thread Binding for CPUs is supported
println!("CPU Binding (current thread) supported: {}", topo.support().cpu().set_current_thread()); |
<|file_name|>UserSearchLoader.java<|end_file_name|><|fim▁begin|>/*
* Twidere - Twitter client for Android
*
* Copyright (C) 2012 Mariotaku Lee <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.pahans.kichibichiya.loader;
import java.util.ArrayList;
import java.util.List;<|fim▁hole|>
import com.pahans.kichibichiya.model.ParcelableUser;
import twitter4j.ResponseList;
import twitter4j.Twitter;
import twitter4j.TwitterException;
import twitter4j.User;
import android.content.Context;
public class UserSearchLoader extends ParcelableUsersLoader {
private final String mQuery;
private final int mPage;
private final long mAccountId;
public UserSearchLoader(final Context context, final long account_id, final String query, final int page,
final List<ParcelableUser> users_list) {
super(context, account_id, users_list);
mQuery = query;
mPage = page;
mAccountId = account_id;
}
@Override
public List<ParcelableUser> getUsers() throws TwitterException {
final Twitter twitter = getTwitter();
if (twitter == null) return null;
final ResponseList<User> users = twitter.searchUsers(mQuery, mPage);
final List<ParcelableUser> result = new ArrayList<ParcelableUser>();
final int size = users.size();
for (int i = 0; i < size; i++) {
result.add(new ParcelableUser(users.get(i), mAccountId, (mPage - 1) * 20 + i));
}
return result;
}
}<|fim▁end|> | |
<|file_name|>TaskSetTest.js<|end_file_name|><|fim▁begin|>"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var chai = require("chai");
var Q = require("q");
var Defer = require("ts-promises/Defer");
var Tasks = require("../task/Tasks");
var TaskSet = require("../task/TaskSet");
var asr = chai.assert;
suite("TaskSet", function TaskSetTest() {
function createTaskRes1(res) {
return Q.resolve(res);
}
function createTaskRes2(res, waitMillis) {
var dfd = Q.defer();
setTimeout(function () { return dfd.resolve(res); }, waitMillis);
return dfd.promise;
}
function createTaskErr1(res) {
return Q.reject(res);
}
function createTaskErr2(res, waitMillis) {
var dfd = Q.defer();
setTimeout(function () { return dfd.reject(res); }, waitMillis);
return dfd.promise;
}
function startTasks(taskSet, namePrefix, resMsgPrefix, count) {
var tasks = [];
for (var i = 0; i < count; i++) {
var rr = Math.random() < 0.5 ? createTaskRes1(resMsgPrefix + i) : createTaskRes2(resMsgPrefix + i, Math.round(Math.random() * 10));
var t = taskSet.startTask(namePrefix + i, rr);
tasks.push(t);
}
return tasks;
}
test("README-example", function README_exampleTest(done) {
var taskSet = new TaskSet(null, function (name) { return console.log("success:", name); }, function (name) { return console.log("failure:", name); });
taskSet.startTask("task-1", Tasks.startTask("a", createTaskRes1("result a")).getPromise());
taskSet.startTask("task-2", createTaskRes1("result b"));
taskSet.startTask("task-3", createTaskRes1("error c"));
Q.all(taskSet.getPromises())
.then(function (results) { console.log("done:", results); done(); }, function (err) { console.error("error:", err); done(); });
});
test("task-set-function-and-promise-task-mix", function taskSetFunctionAndPromiseTaskMixTest(done) {
var taskSet = new TaskSet(null, null, null);
taskSet.startTask("task-1", createTaskRes1("result a"));
taskSet.startTask("task-2", Tasks.startTask("b", createTaskRes1("result b")).getPromise());
taskSet.startTask("task-3", createTaskRes1("result c"));
taskSet.startTask("task-4", createTaskErr1("error d"));
asr.equal(taskSet.getTasks().size, 4);
asr.equal(taskSet.getPromises().length, taskSet.getTasks().size);
Q.all(taskSet.getPromises()).done(function (results) {
done("unexpected success");
}, function (err) {
Q.all(taskSet.getPromises()).done(function (results) {<|fim▁hole|> asr.equal(err, "error d");
done();
}, function (err2) {
done("unexpected 2nd error");
});
});
});
test("task-set-success", function taskSetSuccessTest(done) {
// test success
var taskSet = new TaskSet();
var task1 = taskSet.startTask("task-res-1", createTaskRes1("success-1"));
var task2 = taskSet.startTask("task-res-2", createTaskRes2("success-2", 10));
Defer.when(taskSet.getPromises()).then(function (res) {
asr.deepEqual(res.sort(), ["success-1", "success-2"]);
asr.equal(task1.getResult(), "success-1");
asr.equal(task1.state, "COMPLETED");
asr.equal(task2.getResult(), "success-2");
asr.equal(task2.state, "COMPLETED");
done();
}, function (err) {
done("unexpected error");
});
});
test("task-set-failure", function taskSetFailureTest(done) {
// test success
var taskSet = new TaskSet();
var task1 = taskSet.startTask("task-res-1", createTaskRes1("success-1"));
var task2 = taskSet.startTask("task-res-2", createTaskRes2("success-2", 10));
var task3 = taskSet.startTask("task-err-1", createTaskErr1("error-1"));
var task4 = taskSet.startTask("task-err-2", createTaskErr2("error-2", 10));
Defer.when(taskSet.getPromises()).then(function (res) {
done("unexpected success");
}, function (err) {
asr.isTrue(err == "error-1" || err == "error-2");
done();
});
});
test("task-drop-completed", function taskSetDropCompleted(done) {
var taskSet = new TaskSet();
// test 4 tasks, limit 3, drop 25%
taskSet.maxCompletedTasks = 3;
taskSet.dropCompletedTasksPercentage = 0.25;
startTasks(taskSet, "task-res-", "success-", 4);
Defer.when(taskSet.getPromises()).then(function (res) {
asr.equal(taskSet.getCompletedTasks().length, 2);
taskSet.clearCompletedTasks();
}).then(function () {
// test 6 tasks, limit 5, drop 60%
taskSet.maxCompletedTasks = 5;
taskSet.dropCompletedTasksPercentage = 0.6;
startTasks(taskSet, "task-res-", "success-", 6);
return Defer.when(taskSet.getPromises());
}).then(function (res) {
asr.equal(taskSet.getCompletedTasks().length, 2);
taskSet.clearCompletedTasks();
done();
}, function (err) {
done("unexpected error");
});
});
});<|fim▁end|> | var allResults = taskSet.getCompletedTasks().map(function (t) { return t.task.getResult(); });
asr.deepEqual(allResults.sort(), ["result a", "result b", "result c"]); |
<|file_name|>icon.demo.tsx<|end_file_name|><|fim▁begin|>import * as React from "react";
import styled from "styled-components";
import { Themer } from "@patternplate/component-utility";
import { Icon, symbols } from "./icon";
function DemoIcon(props) {
return (
<StyledDemoIcon title={props.title}>
<Icon symbol={props.symbol} />
</StyledDemoIcon>
);
}
export default function IconDemo() {
return (
<Themer spacing={true}>
<StyledIconDemo><|fim▁hole|> {symbols.map(symbol => (
<DemoIcon key={symbol} symbol={symbol} title={symbol} />
))}
</StyledIconDemo>
</Themer>
);
}
const TITLE = props => props.title;
const StyledDemoIcon = styled.div`
display: flex;
align-items: center;
justify-content: center;
position: relative;
margin: 10px;
color: ${props => props.theme.colors.color};
&::after {
content: '${TITLE}';
display: block;
font-family: sans-serif;
margin-left: 10px;
}
`;
const StyledIconDemo = styled.div`
display: flex;
flex-wrap: wrap;
color: ${props => props.theme.colors.color};
`;<|fim▁end|> | |
<|file_name|>0010_ignoredevent.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-04-11 11:22
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('events', '0009_event_title'),
]
<|fim▁hole|> operations = [
migrations.CreateModel(
name='IgnoredEvent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ignored', models.BooleanField(default=True)),
('since', models.DateTimeField(auto_now=True)),
('event', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='events.Event')),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]<|fim▁end|> | |
<|file_name|>Camera.java<|end_file_name|><|fim▁begin|>package org.onebeartoe.electronics.photorama;
import java.io.File;
/**
* A call to the setCameraOutputPath() method is needed after the object is
* instantiated.
*
* @author Roberto Marquez
*/
public abstract class Camera
{
protected PhotoramaModes mode;
protected String outputPath;
protected TimeLapseConfiguration configuration;
protected boolean timeLapseOn;
public PhotoramaModes getMode()
{
return mode;
}
public String getOutputPath()
{
<|fim▁hole|> public abstract long getTimelapse();
public FrequencyUnits getTimelapseUnit()
{
return configuration.unit;
}
public void setMode(PhotoramaModes mode)
{
this.mode = mode;
stopTimelapse();
}
/**
* Make sure the path has a path separator character at the end.
* @param path
*/
public void setOutputPath(String path) throws Exception
{
File outdir = new File(path);
if( ! outdir.exists() )
{
// the output directory does not exist,
// try creating it
boolean dirCreated = outdir.mkdirs();
if( !dirCreated )
{
String message = "could not set output directory: " + path;
throw new Exception(message);
}
}
outputPath = path;
}
public void setTimelapse(long delay, FrequencyUnits unit)
{
configuration.delay = delay;
configuration.unit = unit;
if(timeLapseOn)
{
startTimelapse();
}
}
public abstract void startTimelapse();
public abstract void stopTimelapse();
public abstract void takeSnapshot();
}<|fim▁end|> | return outputPath;
}
|
<|file_name|>result_test.go<|end_file_name|><|fim▁begin|>package sqlmock
import (
"fmt"
"testing"
)
// used for examples
var mock = &sqlmock{}
func ExampleNewErrorResult() {
db, mock, _ := New()
result := NewErrorResult(fmt.Errorf("some error"))
mock.ExpectExec("^INSERT (.+)").WillReturnResult(result)
res, _ := db.Exec("INSERT something")
_, err := res.LastInsertId()
fmt.Println(err)
// Output: some error
}
func ExampleNewResult() {
var lastInsertID, affected int64
result := NewResult(lastInsertID, affected)
mock.ExpectExec("^INSERT (.+)").WillReturnResult(result)
fmt.Println(mock.ExpectationsWereMet())
// Output: there is a remaining expectation which was not matched: ExpectedExec => expecting Exec which:
// - matches sql: '^INSERT (.+)'
// - is without arguments
// - should return Result having:
// LastInsertId: 0
// RowsAffected: 0
}
func TestShouldReturnValidSqlDriverResult(t *testing.T) {
result := NewResult(1, 2)
id, err := result.LastInsertId()
if 1 != id {
t.Errorf("Expected last insert id to be 1, but got: %d", id)
}
if err != nil {
t.Errorf("expected no error, but got: %s", err)
}
affected, err := result.RowsAffected()
if 2 != affected {
t.Errorf("Expected affected rows to be 2, but got: %d", affected)
}
if err != nil {
t.Errorf("expected no error, but got: %s", err)
}
}
<|fim▁hole|> t.Error("expected error, but got none")
}
_, err = result.RowsAffected()
if err == nil {
t.Error("expected error, but got none")
}
}<|fim▁end|> | func TestShouldReturnErroeSqlDriverResult(t *testing.T) {
result := NewErrorResult(fmt.Errorf("some error"))
_, err := result.LastInsertId()
if err == nil { |
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Timesheet on Issues',
'version': '1.0',
'category': 'Project Management',
'description': """
This module adds the Timesheet support for the Issues/Bugs Management in Project.
=================================================================================
Worklogs can be maintained to signify number of hours spent by users to handle an issue.
""",
'website': 'https://www.odoo.com/page/project-management',
'depends': [
'project_issue',
'hr_timesheet_sheet',
],
'data': [
'project_issue_sheet_view.xml',
'security/ir.model.access.csv',
'security/portal_security.xml',<|fim▁hole|> ],
'demo': [],
'installable': True,
'auto_install': False,
}<|fim▁end|> | |
<|file_name|>multi_map_lock_codec.py<|end_file_name|><|fim▁begin|>from hazelcast.serialization.bits import *
from hazelcast.protocol.builtin import FixSizedTypesCodec
from hazelcast.protocol.client_message import OutboundMessage, REQUEST_HEADER_SIZE, create_initial_buffer
from hazelcast.protocol.builtin import StringCodec
from hazelcast.protocol.builtin import DataCodec
# hex: 0x021000
_REQUEST_MESSAGE_TYPE = 135168
# hex: 0x021001
_RESPONSE_MESSAGE_TYPE = 135169
_REQUEST_THREAD_ID_OFFSET = REQUEST_HEADER_SIZE
_REQUEST_TTL_OFFSET = _REQUEST_THREAD_ID_OFFSET + LONG_SIZE_IN_BYTES
_REQUEST_REFERENCE_ID_OFFSET = _REQUEST_TTL_OFFSET + LONG_SIZE_IN_BYTES
_REQUEST_INITIAL_FRAME_SIZE = _REQUEST_REFERENCE_ID_OFFSET + LONG_SIZE_IN_BYTES
<|fim▁hole|>def encode_request(name, key, thread_id, ttl, reference_id):
buf = create_initial_buffer(_REQUEST_INITIAL_FRAME_SIZE, _REQUEST_MESSAGE_TYPE)
FixSizedTypesCodec.encode_long(buf, _REQUEST_THREAD_ID_OFFSET, thread_id)
FixSizedTypesCodec.encode_long(buf, _REQUEST_TTL_OFFSET, ttl)
FixSizedTypesCodec.encode_long(buf, _REQUEST_REFERENCE_ID_OFFSET, reference_id)
StringCodec.encode(buf, name)
DataCodec.encode(buf, key, True)
return OutboundMessage(buf, True)<|fim▁end|> | |
<|file_name|>bitcoin_eo.ts<|end_file_name|><|fim▁begin|><TS language="eo" version="2.1">
<context>
<name>AddressBookPage</name>
<message>
<source>Right-click to edit address or label</source>
<translation>Dekstre-klaku por redakti adreson aŭ etikedon</translation>
</message>
<message>
<source>Create a new address</source>
<translation>Krei novan adreson</translation>
</message>
<message>
<source>&New</source>
<translation>&Nova</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Kopii elektitan adreson al la tondejo</translation>
</message>
<message>
<source>&Copy</source>
<translation>&Kopii</translation>
</message>
<message>
<source>C&lose</source>
<translation>&Fermi</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>Forigi la elektitan adreson el la listo</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Eksporti la datumojn el la aktuala langeto al dosiero</translation>
</message>
<message>
<source>&Export</source>
<translation>&Eksporti</translation>
</message>
<message>
<source>&Delete</source>
<translation>&Forigi</translation>
</message>
<message>
<source>Choose the address to send coins to</source>
<translation>Elekti la adreson por sendi monerojn</translation>
</message>
<message>
<source>Choose the address to receive coins with</source>
<translation>Elekti la adreson ricevi monerojn kun</translation>
</message>
<message>
<source>C&hoose</source>
<translation>&Elekti</translation>
</message>
<message>
<source>Sending addresses</source>
<translation>Sendaj adresoj</translation>
</message>
<message>
<source>Receiving addresses</source>
<translation>Ricevaj adresoj</translation>
</message>
<message>
<source>These are your Globaltoken addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>Jen viaj Bitmon-adresoj por sendi pagojn. Zorge kontrolu la sumon kaj la alsendan adreson antaŭ ol sendi.</translation>
</message>
<message>
<source>These are your Globaltoken addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source>
<translation>Jen viaj bitmonaj adresoj por ricevi pagojn. Estas konsilinde uzi apartan ricevan adreson por ĉiu transakcio.</translation>
</message>
<message>
<source>&Copy Address</source>
<translation>&Kopii Adreson</translation>
</message>
<message>
<source>Copy &Label</source>
<translation>Kopii &Etikedon</translation>
</message>
<message>
<source>&Edit</source>
<translation>&Redakti</translation>
</message>
<message>
<source>Export Address List</source>
<translation>Eksporti Adresliston</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Perkome disigita dosiero (*.csv)</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>ekspotado malsukcesinta</translation>
</message>
<message>
<source>There was an error trying to save the address list to %1. Please try again.</source>
<translation>Okazis eraron dum konservo de adreslisto al %1. Bonvolu provi denove.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<source>Label</source>
<translation>Etikedo</translation>
</message>
<message>
<source>Address</source>
<translation>Adreso</translation>
</message>
<message>
<source>(no label)</source>
<translation>(neniu etikedo)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<source>Passphrase Dialog</source>
<translation>Dialogo pri pasfrazo</translation>
</message>
<message>
<source>Enter passphrase</source>
<translation>Enigu pasfrazon</translation>
</message>
<message>
<source>New passphrase</source>
<translation>Nova pasfrazo</translation>
</message>
<message>
<source>Repeat new passphrase</source>
<translation>Ripetu la novan pasfrazon</translation>
</message>
<message>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>ten or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Enigu novan pasfrazon por la monujo.<br/>Bonvolu uzi pasfrazon kun <b>almenaŭ 10 hazardaj signoj</b>, aŭ <b>almenaŭ ok aŭ pli vortoj</b>.</translation>
</message>
<message>
<source>Encrypt wallet</source>
<translation>Ĉifri la monujon</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Ĉi tiu operacio bezonas vian monujan pasfrazon, por malŝlosi la monujon.</translation>
</message>
<message>
<source>Unlock wallet</source>
<translation>Malŝlosi la monujon</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Ĉi tiu operacio bezonas vian monujan pasfrazon, por malĉifri la monujon.</translation>
</message>
<message>
<source>Decrypt wallet</source>
<translation>Malĉifri la monujon</translation>
</message>
<message>
<source>Change passphrase</source>
<translation>Ŝanĝi la pasfrazon</translation>
</message>
<message>
<source>Enter the old passphrase and new passphrase to the wallet.</source>
<translation>Tajpu la malnovan pasvorton kaj la novan pasvorton por la monujo.</translation>
</message>
<message>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR GLOBALTOKENS</b>!</source>
<translation>Atentu! Se vi ĉifras vian monujon kaj perdas la pasfrazon, vi <b>PERDOS LA TUTON DE VIA BITMONO<b>!</translation>
</message>
<message>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Ĉu vi certas, ke vi volas ĉifri la monujon?</translation>
</message>
<message>
<source>Wallet encrypted</source>
<translation>La monujo estas ĉifrita</translation>
</message>
<message>
<source>%1 will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your globaltokens from being stolen by malware infecting your computer.</source>
<translation>%1 nun fermiĝos por fini la ĉifradon. Memoru, ke eĉ ĉifrado ne protektas kontraŭ ĉiu atako, ekz. se viruso infektus vian komputilon.</translation>
</message>
<message>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>GRAVE: antaŭaj sekur-kopioj de via monujo-dosiero estas forigindaj kiam vi havas nove kreitan ĉifritan monujo-dosieron. Pro sekureco, antaŭaj kopioj de la neĉifrita dosiero ne plu funkcios tuj kiam vi ekuzos la novan ĉifritan dosieron.</translation>
</message>
<message>
<source>Wallet encryption failed</source>
<translation>Ĉifrado de la monujo fiaskis</translation>
</message>
</context>
<context>
<name>BanTableModel</name>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<source>Sign &message...</source>
<translation>Subskribi &mesaĝon...</translation>
</message>
<message>
<source>Synchronizing with network...</source>
<translation>Sinkronigante kun reto...</translation>
</message>
<message>
<source>&Overview</source>
<translation>&Superrigardo</translation>
</message>
<message>
<source>Node</source>
<translation>Nodo</translation>
</message>
<message>
<source>Show general overview of wallet</source>
<translation>Vidigi ĝeneralan superrigardon de la monujo</translation>
</message>
<message>
<source>&Transactions</source>
<translation>&Transakcioj</translation>
</message>
<message>
<source>Browse transaction history</source>
<translation>Esplori historion de transakcioj</translation>
</message>
<message>
<source>E&xit</source>
<translation>&Eliri</translation>
</message>
<message>
<source>Quit application</source>
<translation>Eliri la aplikaĵon</translation>
</message>
<message>
<source>About &Qt</source>
<translation>Pri &Qt</translation>
</message>
<message>
<source>Show information about Qt</source>
<translation>Vidigi informojn pri Qt</translation>
</message>
<message>
<source>&Options...</source>
<translation>&Agordoj...</translation>
</message>
<message>
<source>&Encrypt Wallet...</source>
<translation>Ĉifri &Monujon...</translation>
</message>
<message>
<source>&Backup Wallet...</source>
<translation>&Krei sekurkopion de la monujo...</translation>
</message>
<message>
<source>&Change Passphrase...</source>
<translation>Ŝanĝi &Pasfrazon...</translation>
</message>
<message>
<source>&Sending addresses...</source>
<translation>&Sendaj adresoj...</translation>
</message>
<message>
<source>&Receiving addresses...</source>
<translation>&Ricevaj adresoj...</translation>
</message>
<message>
<source>Open &URI...</source>
<translation>Malfermi &URI-on...</translation>
</message>
<message>
<source>Reindexing blocks on disk...</source>
<translation>Reindeksado de blokoj sur disko...</translation>
</message>
<message>
<source>Send coins to a Globaltoken address</source>
<translation>Sendi monon al Bitmon-adreso</translation>
</message>
<message>
<source>Backup wallet to another location</source>
<translation>Krei alilokan sekurkopion de monujo</translation>
</message>
<message>
<source>Change the passphrase used for wallet encryption</source>
<translation>Ŝanĝi la pasfrazon por ĉifri la monujon</translation>
</message>
<message>
<source>&Debug window</source>
<translation>Sen&cimiga fenestro</translation>
</message>
<message>
<source>Open debugging and diagnostic console</source>
<translation>Malfermi konzolon de sencimigo kaj diagnozo</translation>
</message>
<message>
<source>&Verify message...</source>
<translation>&Kontroli mesaĝon...</translation>
</message>
<message>
<source>Globaltoken</source>
<translation>Bitmono</translation>
</message>
<message>
<source>Wallet</source>
<translation>Monujo</translation>
</message>
<message>
<source>&Send</source>
<translation>&Sendi</translation>
</message>
<message>
<source>&Receive</source>
<translation>&Ricevi</translation>
</message>
<message>
<source>&Show / Hide</source>
<translation>&Montri / Kaŝi</translation>
</message>
<message>
<source>Show or hide the main Window</source>
<translation>Montri aŭ kaŝi la ĉefan fenestron</translation>
</message>
<message>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Ĉifri la privatajn ŝlosilojn de via monujo</translation>
</message>
<message>
<source>Sign messages with your Globaltoken addresses to prove you own them</source>
<translation>Subskribi mesaĝojn per via Bitmon-adresoj por pravigi, ke vi estas la posedanto</translation>
</message>
<message>
<source>Verify messages to ensure they were signed with specified Globaltoken addresses</source>
<translation>Kontroli mesaĝojn por kontroli ĉu ili estas subskribitaj per specifaj Bitmon-adresoj</translation>
</message>
<message>
<source>&File</source>
<translation>&Dosiero</translation>
</message>
<message>
<source>&Settings</source>
<translation>&Agordoj</translation>
</message>
<message>
<source>&Help</source>
<translation>&Helpo</translation>
</message>
<message>
<source>Tabs toolbar</source>
<translation>Langeto-breto</translation>
</message>
<message>
<source>Request payments (generates QR codes and globaltoken: URIs)</source>
<translation>Peti pagon (kreas QR-kodojn kaj URI-ojn kun prefikso globaltoken:)</translation>
</message>
<message>
<source>Show the list of used sending addresses and labels</source>
<translation>Vidigi la liston de uzitaj sendaj adresoj kaj etikedoj</translation>
</message>
<message>
<source>Show the list of used receiving addresses and labels</source>
<translation>Vidigi la liston de uzitaj ricevaj adresoj kaj etikedoj</translation>
</message>
<message>
<source>Open a globaltoken: URI or payment request</source>
<translation>Malfermi globaltoken:-URI-on aŭ pagpeton</translation>
</message>
<message>
<source>&Command-line options</source>
<translation>&Komandliniaj agordaĵoj</translation>
</message>
<message>
<source>%1 behind</source>
<translation>mankas %1</translation>
</message>
<message>
<source>Last received block was generated %1 ago.</source>
<translation>Lasta ricevita bloko kreiĝis antaŭ %1.</translation>
</message>
<message>
<source>Transactions after this will not yet be visible.</source>
<translation>Transakcioj por tio ankoraŭ ne videblas.</translation>
</message>
<message>
<source>Error</source>
<translation>Eraro</translation>
</message>
<message>
<source>Warning</source>
<translation>Averto</translation>
</message>
<message>
<source>Information</source>
<translation>Informoj</translation>
</message>
<message>
<source>Up to date</source>
<translation>Ĝisdata</translation>
</message>
<message>
<source>Catching up...</source>
<translation>Ĝisdatigante...</translation>
</message>
<message>
<source>Date: %1
</source>
<translation>Dato: %1
</translation>
</message>
<message>
<source>Amount: %1
</source>
<translation>Sumo: %1
</translation>
</message>
<message>
<source>Type: %1
</source>
<translation>Tipo: %1
</translation>
</message>
<message>
<source>Label: %1
</source>
<translation>Etikedo: %1
</translation>
</message>
<message>
<source>Address: %1
</source>
<translation>Adreso: %1
</translation>
</message>
<message>
<source>Sent transaction</source>
<translation>Sendita transakcio</translation>
</message>
<message>
<source>Incoming transaction</source>
<translation>Envenanta transakcio</translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Monujo estas <b>ĉifrita</b> kaj aktuale <b>malŝlosita</b></translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Monujo estas <b>ĉifrita</b> kaj aktuale <b>ŝlosita</b></translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>Quantity:</source>
<translation>Kvanto:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Bajtoj:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Sumo:</translation>
</message>
<message>
<source>Fee:</source>
<translation>Krompago:</translation>
</message>
<message>
<source>Dust:</source>
<translation>Polvo:</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Post krompago:</translation>
</message>
<message>
<source>Change:</source>
<translation>Restmono:</translation>
</message>
<message>
<source>(un)select all</source>
<translation>(mal)elekti ĉion</translation>
</message>
<message>
<source>Tree mode</source>
<translation>Arboreĝimo</translation>
</message>
<message>
<source>List mode</source>
<translation>Listreĝimo</translation>
</message>
<message>
<source>Amount</source>
<translation>Sumo</translation>
</message>
<message>
<source>Received with label</source>
<translation>Ricevita kun etikedo</translation>
</message>
<message>
<source>Received with address</source>
<translation>Ricevita kun adreso</translation>
</message>
<message>
<source>Date</source>
<translation>Dato</translation>
</message>
<message>
<source>Confirmations</source>
<translation>Konfirmoj</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Konfirmita</translation>
</message>
<message>
<source>(no label)</source>
<translation>(neniu etikedo)</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<source>Edit Address</source>
<translation>Redakti Adreson</translation>
</message>
<message>
<source>&Label</source>
<translation>&Etikedo</translation>
</message>
<message>
<source>The label associated with this address list entry</source>
<translation>La etikedo ligita al tiu ĉi adreslistero</translation>
</message>
<message>
<source>The address associated with this address list entry. This can only be modified for sending addresses.</source>
<translation>La adreso ligita al tiu ĉi adreslistero. Eblas modifi tion nur por sendaj adresoj.</translation>
</message>
<message>
<source>&Address</source>
<translation>&Adreso</translation>
</message>
</context>
<context>
<name>FreespaceChecker</name>
<message>
<source>A new data directory will be created.</source>
<translation>Kreiĝos nova dosierujo por la datumoj.</translation>
</message>
<message>
<source>name</source>
<translation>nomo</translation>
</message>
<message>
<source>Directory already exists. Add %1 if you intend to create a new directory here.</source>
<translation>Tiu dosierujo jam ekzistas. Aldonu %1 si vi volas krei novan dosierujon ĉi tie.</translation>
</message>
<message>
<source>Path already exists, and is not a directory.</source>
<translation>Vojo jam ekzistas, kaj ne estas dosierujo.</translation>
</message>
<message>
<source>Cannot create data directory here.</source>
<translation>Ne eblas krei dosierujon por datumoj ĉi tie.</translation>
</message>
</context>
<context>
<name>HelpMessageDialog</name>
<message>
<source>version</source>
<translation>versio</translation>
</message>
<message>
<source>Command-line options</source>
<translation>Komandliniaj agordaĵoj</translation>
</message>
<message>
<source>Usage:</source>
<translation>Uzado:</translation>
</message>
<message>
<source>command-line options</source>
<translation>komandliniaj agordaĵoj</translation>
</message>
<message>
<source>UI Options:</source>
<translation>Uzantinterfaco ebloj:</translation>
</message>
</context>
<context>
<name>Intro</name>
<message>
<source>Welcome</source>
<translation>Bonvenon</translation>
</message>
<message>
<source>Use the default data directory</source>
<translation>Uzi la defaŭltan dosierujon por datumoj</translation>
</message>
<message>
<source>Use a custom data directory:</source>
<translation>Uzi alian dosierujon por datumoj:</translation>
</message>
<message>
<source>Bitcoin</source>
<translation>Bitmono</translation>
</message>
<message>
<source>Globaltoken</source>
<translation>Globaltoken</translation>
</message>
<message>
<source>Error</source>
<translation>Eraro</translation>
</message>
<message numerus="yes">
<source>%n GB of free space available</source>
<translation><numerusform>%n gigabajto de libera loko disponeble</numerusform><numerusform>%n gigabajtoj de libera loko disponebla.</numerusform></translation>
</message>
</context>
<context>
<name>ModalOverlay</name>
<message>
<source>Form</source>
<translation>Formularo</translation>
</message>
<message>
<source>Last block time</source>
<translation>Horo de la lasta bloko</translation>
</message>
</context>
<context>
<name>OpenURIDialog</name>
<message>
<source>Open URI</source>
<translation>Malfermi URI-on</translation>
</message>
<message>
<source>Open payment request from URI or file</source>
<translation>Malfermi pagpeton el URI aŭ dosiero</translation>
</message>
<message>
<source>URI:</source>
<translation>URI:</translation>
</message>
<message>
<source>Select payment request file</source>
<translation>Elektu la dosieron de la pagpeto</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<source>Options</source>
<translation>Agordaĵoj</translation>
</message>
<message>
<source>&Main</source>
<translation>Ĉ&efa</translation>
</message>
<message>
<source>Size of &database cache</source>
<translation>Dosiergrando de &datumbasa kaŝmemoro</translation>
</message>
<message>
<source>MB</source>
<translation>MB</translation>
</message>
<message>
<source>Reset all client options to default.</source>
<translation>Reagordi ĉion al defaŭlataj valoroj.</translation>
</message>
<message>
<source>&Reset Options</source>
<translation>&Rekomenci agordadon</translation>
</message>
<message>
<source>&Network</source>
<translation>&Reto</translation>
</message>
<message>
<source>W&allet</source>
<translation>Monujo</translation>
</message>
<message>
<source>Expert</source>
<translation>Fakulo</translation>
</message>
<message>
<source>Automatically open the Globaltoken client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Aŭtomate malfermi la kursilan pordon por Bitmono. Tio funkcias nur se via kursilo havas la UPnP-funkcion, kaj se tiu ĉi estas ŝaltita.</translation>
</message>
<message>
<source>Map port using &UPnP</source>
<translation>Mapigi pordon per &UPnP</translation>
</message>
<message>
<source>Proxy &IP:</source>
<translation>Prokurila &IP:</translation>
</message>
<message>
<source>&Port:</source>
<translation>&Pordo:</translation>
</message>
<message>
<source>Port of the proxy (e.g. 9050)</source>
<translation>la pordo de la prokurilo (ekz. 9050)</translation>
</message>
<message>
<source>IPv4</source>
<translation>IPv4</translation>
</message>
<message>
<source>IPv6</source>
<translation>IPv6</translation>
</message>
<message>
<source>&Window</source>
<translation>&Fenestro</translation>
</message>
<message>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Montri nur sistempletan piktogramon post minimumigo de la fenestro.</translation>
</message>
<message>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Minimumigi al la sistempleto anstataŭ al la taskopleto</translation>
</message>
<message>
<source>M&inimize on close</source>
<translation>M&inimumigi je fermo</translation>
</message>
<message>
<source>&Display</source>
<translation>&Aspekto</translation>
</message>
<message>
<source>User Interface &language:</source>
<translation>&Lingvo de la fasado:</translation>
</message>
<message>
<source>&Unit to show amounts in:</source>
<translation>&Unuo por vidigi sumojn:</translation>
</message>
<message>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Elekti la defaŭltan manieron por montri bitmonajn sumojn en la interfaco, kaj kiam vi sendos bitmonon.</translation>
</message>
<message>
<source>Whether to show coin control features or not.</source>
<translation>Ĉu montri detalan adres-regilon, aŭ ne.</translation>
</message>
<message>
<source>&OK</source>
<translation>&Bone</translation>
</message>
<message>
<source>&Cancel</source>
<translation>&Nuligi</translation>
</message>
<message>
<source>default</source>
<translation>defaŭlta</translation>
</message>
<message>
<source>none</source>
<translation>neniu</translation>
</message>
<message>
<source>Confirm options reset</source>
<translation>Konfirmi reŝargo de agordoj</translation>
</message>
<message>
<source>Error</source>
<translation>Eraro</translation>
</message>
<message>
<source>The supplied proxy address is invalid.</source>
<translation>La prokurila adreso estas malvalida.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<source>Form</source>
<translation>Formularo</translation>
</message>
<message>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Globaltoken network after a connection is established, but this process has not completed yet.</source>
<translation>Eblas, ke la informoj videblaj ĉi tie estas eksdataj. Via monujo aŭtomate sinkoniĝas kun la bitmona reto kiam ili konektiĝas, sed tiu procezo ankoraŭ ne finfariĝis.</translation>
</message>
<message>
<source>Your current spendable balance</source>
<translation>via aktuala elspezebla saldo</translation>
</message>
<message>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the spendable balance</source>
<translation>la sumo de transakcioj ankoraŭ ne konfirmitaj, kiuj ankoraŭ ne elspezeblas</translation>
</message>
<message>
<source>Immature:</source>
<translation>Nematura:</translation>
</message>
<message>
<source>Mined balance that has not yet matured</source>
<translation>Minita saldo, kiu ankoraŭ ne maturiĝis</translation>
</message>
<message>
<source>Balances</source>
<translation>Saldoj</translation>
</message>
<message>
<source>Total:</source>
<translation>Totalo:</translation>
</message>
<message>
<source>Your current total balance</source>
<translation>via aktuala totala saldo</translation>
</message>
<message>
<source>Spendable:</source>
<translation>Elspezebla:</translation>
</message>
<message>
<source>Recent transactions</source>
<translation>Lastaj transakcioj</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
</context>
<context>
<name>PeerTableModel</name>
<message>
<source>User Agent</source>
<translation>Uzanto Agento</translation>
</message>
<message>
<source>Sent</source>
<translation>Sendita</translation>
</message>
<message>
<source>Received</source>
<translation>Ricevita</translation>
</message>
</context>
<context>
<name>QObject</name>
<message>
<source>Amount</source>
<translation>Sumo</translation>
</message>
<message>
<source>%1 h</source>
<translation>%1 h</translation>
</message>
<message>
<source>%1 m</source>
<translation>%1 m</translation>
</message>
<message>
<source>None</source>
<translation>Neniu</translation>
</message>
<message>
<source>N/A</source>
<translation>neaplikebla</translation>
</message>
<message>
<source>%1 and %2</source>
<translation>%1 kaj %2</translation>
</message>
<message>
<source>%1 B</source>
<translation>%1 B</translation>
</message>
<message>
<source>%1 KB</source>
<translation>%1 KB</translation>
</message>
<message>
<source>%1 MB</source>
<translation>%1 MB</translation>
</message>
<message>
<source>%1 GB</source>
<translation>%1 GB</translation>
</message>
<message>
<source>unknown</source>
<translation>nekonata</translation>
</message>
</context>
<context>
<name>QObject::QObject</name>
</context>
<context>
<name>QRImageWidget</name>
<message>
<source>&Save Image...</source>
<translation>&Konservi Bildon...</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<source>N/A</source>
<translation>neaplikebla</translation>
</message>
<message>
<source>Client version</source>
<translation>Versio de kliento</translation>
</message>
<message>
<source>&Information</source>
<translation>&Informoj</translation>
</message>
<message>
<source>Debug window</source>
<translation>Sencimiga fenestro</translation>
</message>
<message>
<source>General</source>
<translation>Ĝenerala</translation>
</message>
<message>
<source>Startup time</source>
<translation>Horo de lanĉo</translation>
</message>
<message>
<source>Network</source>
<translation>Reto</translation>
</message>
<message>
<source>Name</source>
<translation>Nomo</translation>
</message>
<message>
<source>Number of connections</source>
<translation>Nombro de konektoj</translation>
</message>
<message>
<source>Block chain</source>
<translation>Blokĉeno</translation>
</message>
<message>
<source>Current number of blocks</source>
<translation>Aktuala nombro de blokoj</translation>
</message>
<message>
<source>Received</source>
<translation>Ricevita</translation>
</message>
<message>
<source>Sent</source>
<translation>Sendita</translation>
</message>
<message>
<source>&Peers</source>
<translation>&Samuloj</translation>
</message>
<message>
<source>Banned peers</source>
<translation>Malpermesita samuloj.</translation>
</message>
<message>
<source>Version</source>
<translation>Versio</translation>
</message>
<message>
<source>User Agent</source>
<translation>Uzanto Agento</translation>
</message>
<message>
<source>Services</source>
<translation>Servoj</translation>
</message>
<message>
<source>Last block time</source>
<translation>Horo de la lasta bloko</translation>
</message>
<message>
<source>&Open</source>
<translation>&Malfermi</translation>
</message>
<message>
<source>&Console</source>
<translation>&Konzolo</translation>
</message>
<message>
<source>&Network Traffic</source>
<translation>&Reta Trafiko</translation>
</message>
<message>
<source>Totals</source>
<translation>Totaloj</translation>
</message>
<message>
<source>In:</source>
<translation>En:</translation>
</message>
<message>
<source>Out:</source>
<translation>El:</translation>
</message>
<message>
<source>Debug log file</source>
<translation>Sencimiga protokoldosiero</translation>
</message>
<message>
<source>Clear console</source>
<translation>Malplenigi konzolon</translation>
</message>
</context>
<context>
<name>ReceiveCoinsDialog</name>
<message>
<source>&Amount:</source>
<translation>&Kvanto:</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Etikedo:</translation>
</message>
<message>
<source>&Message:</source>
<translation>&Mesaĝo:</translation>
</message>
<message>
<source>Clear all fields of the form.</source>
<translation>Malplenigi ĉiujn kampojn de la formularo.</translation>
</message>
<message>
<source>Clear</source>
<translation>Forigi</translation>
</message>
<message>
<source>&Request payment</source>
<translation>&Peti pagon</translation>
</message>
<message>
<source>Show</source>
<translation>Vidigi</translation>
</message>
<message>
<source>Remove</source>
<translation>Forigi</translation>
</message>
<message>
<source>Copy label</source>
<translation>Kopii etikedon</translation>
</message>
<message>
<source>Copy message</source>
<translation>Kopiu mesaĝon</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Kopii sumon</translation>
</message>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<source>QR Code</source>
<translation>QR-kodo</translation>
</message>
<message>
<source>Copy &URI</source>
<translation>Kopii &URI</translation>
</message>
<message>
<source>Copy &Address</source>
<translation>Kopii &Adreson</translation>
</message>
<message>
<source>&Save Image...</source>
<translation>&Konservi Bildon...</translation>
</message>
<message>
<source>Request payment to %1</source>
<translation>Peti pagon al %1</translation>
</message>
<message>
<source>Payment information</source>
<translation>Paginformoj</translation>
</message>
<message>
<source>URI</source>
<translation>URI</translation>
</message>
<message>
<source>Address</source>
<translation>Adreso</translation>
</message>
<message>
<source>Amount</source>
<translation>Sumo</translation>
</message>
<message>
<source>Label</source>
<translation>Etikedo</translation>
</message>
<message>
<source>Message</source>
<translation>Mesaĝo</translation>
</message>
<message>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>La rezultanta URI estas tro longa. Provu malplilongigi la tekston de la etikedo / mesaĝo.</translation>
</message>
<message>
<source>Error encoding URI into QR Code.</source>
<translation>Eraro de kodigo de URI en la QR-kodon.</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
<message>
<source>Date</source>
<translation>Dato</translation>
</message>
<message>
<source>Label</source>
<translation>Etikedo</translation>
</message>
<message>
<source>Message</source>
<translation>Mesaĝo</translation>
</message>
<message>
<source>(no label)</source>
<translation>(neniu etikedo)</translation>
</message>
<message>
<source>(no message)</source>
<translation>(neniu mesaĝo)</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>Send Coins</source>
<translation>Sendi Bitmonon</translation>
</message>
<message>
<source>Coin Control Features</source>
<translation>Monregaj Opcioj</translation>
</message>
<message>
<source>Inputs...</source>
<translation>Enigoj...</translation>
</message>
<message>
<source>Insufficient funds!</source>
<translation>Nesufiĉa mono!</translation>
</message>
<message>
<source>Quantity:</source>
<translation>Kvanto:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Bajtoj:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Sumo:</translation>
</message>
<message>
<source>Fee:</source>
<translation>Krompago:</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Post krompago:</translation>
</message>
<message>
<source>Change:</source>
<translation>Restmono:</translation>
</message>
<message>
<source>Transaction Fee:</source>
<translation>Krompago:</translation>
</message>
<message>
<source>Send to multiple recipients at once</source>
<translation>Sendi samtempe al pluraj ricevantoj</translation>
</message>
<message>
<source>Add &Recipient</source>
<translation>Aldoni &Ricevonton</translation>
</message>
<message>
<source>Clear all fields of the form.</source>
<translation>Malplenigi ĉiujn kampojn de la formularo.</translation>
</message>
<message>
<source>Dust:</source>
<translation>Polvo:</translation>
</message>
<message>
<source>Clear &All</source>
<translation>&Forigi Ĉion</translation>
</message>
<message>
<source>Balance:</source>
<translation>Saldo:</translation>
</message>
<message>
<source>Confirm the send action</source>
<translation>Konfirmi la sendon</translation>
</message>
<message>
<source>S&end</source>
<translation>Ŝendi</translation>
</message>
<message>
<source>Copy quantity</source>
<translation>Kopii kvanton</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Kopii sumon</translation>
</message>
<message>
<source>Copy fee</source>
<translation>Kopii krompagon</translation>
</message>
<message>
<source>Copy after fee</source>
<translation>Kopii post krompago</translation>
</message>
<message>
<source>Copy bytes</source>
<translation>Kopii bajtojn</translation>
</message>
<message>
<source>Copy dust</source>
<translation>Kopii polvon</translation>
</message>
<message>
<source>Copy change</source>
<translation>Kopii restmonon</translation>
</message>
<message>
<source>%1 to %2</source>
<translation>%1 al %2</translation>
</message>
<message>
<source>Are you sure you want to send?</source>
<translation>Ĉu vi certas, ke vi volas sendi?</translation>
</message>
<message>
<source>added as transaction fee</source>
<translation>aldonita kiel krompago</translation>
</message>
<message>
<source>Total Amount %1</source>
<translation>Totala sumo %1</translation>
</message>
<message>
<source>or</source>
<translation>aŭ</translation>
</message>
<message>
<source>Confirm send coins</source>
<translation>Konfirmi sendon de bitmono</translation>
</message>
<message>
<source>The amount to pay must be larger than 0.</source>
<translation>La pagenda sumo devas esti pli ol 0.</translation>
</message>
<message>
<source>The amount exceeds your balance.</source>
<translation>La sumo estas pli granda ol via saldo.</translation>
</message>
<message>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>La sumo kun la %1 krompago estas pli granda ol via saldo.</translation>
</message>
<message>
<source>Transaction creation failed!</source>
<translation>Kreo de transakcio fiaskis!</translation>
</message>
<message>
<source>Warning: Invalid Globaltoken address</source>
<translation>Averto: Nevalida Globaltoken-adreso</translation>
</message>
<message>
<source>(no label)</source>
<translation>(neniu etikedo)</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<source>A&mount:</source>
<translation>&Sumo:</translation>
</message>
<message>
<source>Pay &To:</source>
<translation>&Ricevonto:</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Etikedo:</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Elektu la jam uzitan adreson</translation>
</message>
<message>
<source>This is a normal payment.</source>
<translation>Tio estas normala pago.</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Alglui adreson de tondejo</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Remove this entry</source>
<translation>Forigu ĉi tiun enskribon</translation>
</message>
<message>
<source>Message:</source>
<translation>Mesaĝo:</translation>
</message>
<message>
<source>Enter a label for this address to add it to the list of used addresses</source>
<translation>Tajpu etikedon por tiu ĉi adreso por aldoni ĝin al la listo de uzitaj adresoj</translation>
</message>
<message>
<source>Pay To:</source>
<translation>Pagi Al:</translation>
</message>
<message>
<source>Memo:</source>
<translation>Memorando:</translation>
</message>
<message>
<source>Enter a label for this address to add it to your address book</source>
<translation>Tajpu etikedon por tiu ĉi adreso kaj aldonu ĝin al via adresaro</translation>
</message>
</context>
<context>
<name>SendConfirmationDialog</name>
</context>
<context>
<name>ShutdownWindow</name>
<message>
<source>Do not shut down the computer until this window disappears.</source>
<translation>Ne sistemfermu ĝis ĉi tiu fenestro malaperas.</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<source>Signatures - Sign / Verify a Message</source>
<translation>Subskriboj - Subskribi / Kontroli mesaĝon</translation>
</message>
<message>
<source>&Sign Message</source>
<translation>&Subskribi Mesaĝon</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Elektu la jam uzitan adreson</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Alglui adreson de tondejo</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Enter the message you want to sign here</source>
<translation>Tajpu la mesaĝon, kiun vi volas sendi, cîi tie</translation>
</message>
<message>
<source>Signature</source>
<translation>Subskribo</translation>
</message>
<message>
<source>Copy the current signature to the system clipboard</source>
<translation>Kopii la aktualan subskribon al la tondejo</translation>
</message>
<message>
<source>Sign the message to prove you own this Globaltoken address</source>
<translation>Subskribi la mesaĝon por pravigi, ke vi estas la posedanto de tiu Bitmon-adreso</translation>
</message>
<message>
<source>Sign &Message</source>
<translation>Subskribi &Mesaĝon</translation>
</message>
<message>
<source>Reset all sign message fields</source>
<translation>Reagordigi ĉiujn prisubskribajn kampojn</translation>
</message>
<message>
<source>Clear &All</source>
<translation>&Forigi Ĉion</translation>
</message>
<message>
<source>&Verify Message</source>
<translation>&Kontroli Mesaĝon</translation>
</message>
<message>
<source>Verify the message to ensure it was signed with the specified Globaltoken address</source>
<translation>Kontroli la mesaĝon por pravigi, ke ĝi ja estas subskribita per la specifa Bitmon-adreso</translation>
</message>
<message>
<source>Verify &Message</source>
<translation>Kontroli &Mesaĝon</translation>
</message>
<message>
<source>Reset all verify message fields</source>
<translation>Reagordigi ĉiujn prikontrolajn kampojn</translation>
</message>
<message>
<source>Click "Sign Message" to generate signature</source>
<translation>Klaku "Subskribi Mesaĝon" por krei subskribon</translation>
</message>
<message>
<source>The entered address is invalid.</source>
<translation>La adreso, kiun vi enmetis, estas nevalida.</translation>
</message>
<message>
<source>Please check the address and try again.</source>
<translation>Bonvolu kontroli la adreson kaj reprovi.</translation>
</message>
<message>
<source>The entered address does not refer to a key.</source>
<translation>La adreso, kiun vi enmetis, referencas neniun ŝlosilon.</translation>
</message>
<message>
<source>Wallet unlock was cancelled.</source>
<translation>Malŝloso de monujo estas nuligita.</translation>
</message>
<message>
<source>Private key for the entered address is not available.</source>
<translation>La privata ŝlosilo por la enigita adreso ne disponeblas.</translation>
</message>
<message>
<source>Message signing failed.</source>
<translation>Subskribo de mesaĝo fiaskis.</translation>
</message>
<message>
<source>Message signed.</source>
<translation>Mesaĝo estas subskribita.</translation>
</message>
<message>
<source>The signature could not be decoded.</source>
<translation>Ne eblis malĉifri la subskribon.</translation>
</message>
<message>
<source>Please check the signature and try again.</source>
<translation>Bonvolu kontroli la subskribon kaj reprovu.</translation>
</message>
<message>
<source>The signature did not match the message digest.</source>
<translation>La subskribo ne kongruis kun la mesaĝ-kompilaĵo.</translation>
</message>
<message>
<source>Message verification failed.</source>
<translation>Kontrolo de mesaĝo malsukcesis.</translation>
</message>
<message>
<source>Message verified.</source>
<translation>Mesaĝo sukcese kontrolita.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
</context>
<context>
<name>TrafficGraphWidget</name>
<message>
<source>KB/s</source>
<translation>KB/s</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<source>Open until %1</source>
<translation>Malferma ĝis %1</translation>
</message>
<message>
<source>%1/offline</source>
<translation>%1/senkonekte</translation>
</message>
<message>
<source>%1/unconfirmed</source>
<translation>%1/nekonfirmite</translation>
</message>
<message>
<source>%1 confirmations</source>
<translation>%1 konfirmoj</translation>
</message>
<message>
<source>Status</source>
<translation>Stato</translation>
</message>
<message>
<source>, has not been successfully broadcast yet</source>
<translation>, ankoraŭ ne elsendita sukcese</translation>
</message>
<message>
<source>Date</source>
<translation>Dato</translation>
</message>
<message>
<source>Source</source>
<translation>Fonto</translation>
</message>
<message>
<source>Generated</source>
<translation>Kreita</translation>
</message>
<message>
<source>From</source>
<translation>De</translation>
</message>
<message>
<source>unknown</source>
<translation>nekonata</translation>
</message>
<message>
<source>To</source>
<translation>Al</translation>
</message>
<message>
<source>own address</source>
<translation>propra adreso</translation>
</message>
<message>
<source>label</source>
<translation>etikedo</translation>
</message>
<message>
<source>Credit</source>
<translation>Kredito</translation>
</message>
<message>
<source>not accepted</source>
<translation>ne akceptita</translation>
</message>
<message>
<source>Debit</source>
<translation>Debeto</translation>
</message>
<message>
<source>Transaction fee</source>
<translation>Krompago</translation>
</message>
<message>
<source>Net amount</source>
<translation>Neta sumo</translation>
</message>
<message>
<source>Message</source>
<translation>Mesaĝo</translation>
</message>
<message>
<source>Comment</source>
<translation>Komento</translation>
</message>
<message>
<source>Transaction ID</source>
<translation>Transakcia ID</translation>
</message>
<message>
<source>Merchant</source>
<translation>Vendisto</translation>
</message>
<message>
<source>Generated coins must mature %1 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Kreitaj moneroj devas esti maturaj je %1 blokoj antaŭ ol eblas elspezi ilin. Kiam vi generis tiun ĉi blokon, ĝi estis elsendita al la reto por aldono al la blokĉeno. Se tiu aldono malsukcesas, ĝia stato ŝanĝiĝos al "neakceptita" kaj ne eblos elspezi ĝin. Tio estas malofta, sed povas okazi se alia bloko estas kreita je preskaŭ la sama momento kiel la via.</translation>
</message>
<message>
<source>Debug information</source>
<translation>Sencimigaj informoj</translation>
</message>
<message>
<source>Transaction</source>
<translation>Transakcio</translation>
</message>
<message>
<source>Inputs</source>
<translation>Enigoj</translation>
</message>
<message>
<source>Amount</source>
<translation>Sumo</translation>
</message>
<message>
<source>true</source>
<translation>vera</translation>
</message>
<message>
<source>false</source>
<translation>malvera</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<source>This pane shows a detailed description of the transaction</source>
<translation>Tiu ĉi panelo montras detalan priskribon de la transakcio</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<source>Date</source>
<translation>Dato</translation>
</message>
<message>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<source>Label</source>
<translation>Etikedo</translation>
</message>
<message>
<source>Open until %1</source>
<translation>Malferma ĝis %1</translation>
</message>
<message>
<source>Offline</source>
<translation>Senkonekte</translation>
</message>
<message>
<source>Unconfirmed</source>
<translation>Nekonfirmita</translation>
</message>
<message>
<source>Confirmed (%1 confirmations)</source>
<translation>Konfirmita (%1 konfirmoj)</translation>
</message>
<message>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Tiun ĉi blokon ne ricevis ajna alia nodo, kaj ĝi verŝajne ne akceptiĝos!</translation>
</message>
<message>
<source>Generated but not accepted</source>
<translation>Kreita sed ne akceptita</translation>
</message>
<message>
<source>Received with</source>
<translation>Ricevita kun</translation>
</message>
<message>
<source>Received from</source>
<translation>Ricevita de</translation>
</message>
<message>
<source>Sent to</source>
<translation>Sendita al</translation>
</message>
<message>
<source>Payment to yourself</source>
<translation>Pago al vi mem</translation>
</message>
<message>
<source>Mined</source>
<translation>Minita</translation>
</message>
<message>
<source>(n/a)</source>
<translation>neaplikebla</translation>
</message>
<message>
<source>(no label)</source>
<translation>(neniu etikedo)</translation>
</message>
<message>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Transakcia stato. Ŝvebi super tiu ĉi kampo por montri la nombron de konfirmoj.</translation>
</message>
<message>
<source>Date and time that the transaction was received.</source>
<translation>Dato kaj horo kiam la transakcio alvenis.</translation>
</message>
<message>
<source>Type of transaction.</source>
<translation>Tipo de transakcio.</translation>
</message>
<message>
<source>Amount removed from or added to balance.</source>
<translation>Sumo elprenita de aŭ aldonita al la saldo.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<source>All</source>
<translation>Ĉiuj</translation>
</message>
<message>
<source>Today</source>
<translation>Hodiaŭ</translation>
</message>
<message>
<source>This week</source>
<translation>Ĉi-semajne</translation>
</message>
<message>
<source>This month</source>
<translation>Ĉi-monate</translation>
</message>
<message>
<source>Last month</source>
<translation>Pasintmonate</translation>
</message>
<message>
<source>This year</source>
<translation>Ĉi-jare</translation>
</message>
<message>
<source>Range...</source>
<translation>Intervalo...</translation>
</message>
<message>
<source>Received with</source>
<translation>Ricevita kun</translation>
</message>
<message>
<source>Sent to</source>
<translation>Sendita al</translation>
</message>
<message>
<source>To yourself</source>
<translation>Al vi mem</translation>
</message>
<message>
<source>Mined</source>
<translation>Minita</translation>
</message>
<message>
<source>Other</source>
<translation>Aliaj</translation>
</message>
<message>
<source>Min amount</source>
<translation>Minimuma sumo</translation>
</message>
<message>
<source>Copy address</source>
<translation>Kopii adreson</translation>
</message>
<message>
<source>Copy label</source>
<translation>Kopii etikedon</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Kopii sumon</translation>
</message>
<message>
<source>Copy transaction ID</source>
<translation>Kopii transakcian ID-on</translation>
</message>
<message>
<source>Edit label</source>
<translation>Redakti etikedon</translation>
</message>
<message>
<source>Show transaction details</source>
<translation>Montri detalojn de transakcio</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Perkome disigita dosiero (*.csv)</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Konfirmita</translation>
</message>
<message>
<source>Date</source>
<translation>Dato</translation>
</message>
<message>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<source>Label</source>
<translation>Etikedo</translation>
</message>
<message>
<source>Address</source>
<translation>Adreso</translation>
</message>
<message>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>ekspotado malsukcesinta</translation>
</message>
<message>
<source>Range:</source>
<translation>Intervalo:</translation>
</message>
<message>
<source>to</source>
<translation>al</translation>
</message>
</context>
<context>
<name>UnitDisplayStatusBarControl</name>
</context>
<context>
<name>WalletFrame</name>
</context>
<context>
<name>WalletModel</name>
<message>
<source>Send Coins</source>
<translation>Sendi Bitmonon</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<source>&Export</source>
<translation>&Eksporti</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Eksporti la datumojn el la aktuala langeto al dosiero</translation>
</message>
<message>
<source>Backup Wallet</source>
<translation>Krei sekurkopion de monujo</translation>
</message>
<message>
<source>Wallet Data (*.dat)</source>
<translation>Monuj-datumoj (*.dat)</translation>
</message>
<message>
<source>Backup Failed</source>
<translation>Malsukcesis sekurkopio</translation>
</message>
<message>
<source>Backup Successful</source>
<translation>Sukcesis krei sekurkopion</translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<source>Options:</source>
<translation>Agordoj:</translation>
</message>
<message>
<source>Specify data directory</source>
<translation>Specifi dosieron por datumoj</translation>
</message>
<message>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Konekti al nodo por ricevi adresojn de samtavolanoj, kaj malkonekti</translation>
</message>
<message>
<source>Specify your own public address</source>
<translation>Specifi vian propran publikan adreson</translation>
</message>
<message>
<source>Accept command line and JSON-RPC commands</source>
<translation>Akcepti komandojn JSON-RPC kaj el komandlinio</translation>
</message>
<message>
<source>Run in the background as a daemon and accept commands</source>
<translation>Ruli fone kiel demono kaj akcepti komandojn</translation>
</message>
<message>
<source>Globaltoken Core</source>
<translation>Kerno de Globaltoken</translation>
</message>
<message>
<source>Bitcoin Core</source>
<translation>Kerno de Bitmono</translation>
</message>
<message>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>Bindi al donita adreso kaj ĉiam aŭskulti per ĝi. Uzu la formaton [gastigo]:pordo por IPv6</translation>
</message>
<message>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Plenumi komandon kiam monuja transakcio ŝanĝiĝas (%s en cmd anstataŭiĝas per TxID)</translation><|fim▁hole|> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation>Tiu ĉi estas antaŭeldona testa versio - uzu laŭ via propra risko - ne uzu por minado aŭ por aplikaĵoj por vendistoj</translation>
</message>
<message>
<source>Warning: The network does not appear to fully agree! Some miners appear to be experiencing issues.</source>
<translation>Averto: La reto ne tute konsentas! Kelkaj minantoj ŝajne spertas problemojn aktuale.</translation>
</message>
<message>
<source>Warning: We do not appear to fully agree with our peers! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation>Averto: ŝajne ni ne tute konsentas kun niaj samtavolanoj! Eble vi devas ĝisdatigi vian klienton, aŭ eble aliaj nodoj faru same.</translation>
</message>
<message>
<source><category> can be:</source>
<translation><category> povas esti:</translation>
</message>
<message>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>Akcepti konektojn el ekstere (defaŭlte: 1 se ne estas -proxy nek -connect)</translation>
</message>
<message>
<source>Block creation options:</source>
<translation>Blok-kreaj agordaĵoj:</translation>
</message>
<message>
<source>Corrupted block database detected</source>
<translation>Difektita blokdatumbazo trovita</translation>
</message>
<message>
<source>Do you want to rebuild the block database now?</source>
<translation>Ĉu vi volas rekonstrui la blokdatumbazon nun?</translation>
</message>
<message>
<source>Error initializing block database</source>
<translation>Eraro dum pravalorizado de blokdatumbazo</translation>
</message>
<message>
<source>Error initializing wallet database environment %s!</source>
<translation>Eraro dum pravalorizado de monuj-datumbaza ĉirkaŭaĵo %s!</translation>
</message>
<message>
<source>Error loading block database</source>
<translation>Eraro dum ŝargado de blokdatumbazo</translation>
</message>
<message>
<source>Error opening block database</source>
<translation>Eraro dum malfermado de blokdatumbazo</translation>
</message>
<message>
<source>Error: Disk space is low!</source>
<translation>Eraro: restas malmulte da diskospaco!</translation>
</message>
<message>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>Ne sukcesis aŭskulti ajnan pordon. Uzu -listen=0 se tion vi volas.</translation>
</message>
<message>
<source>Incorrect or no genesis block found. Wrong datadir for network?</source>
<translation>Geneza bloko aŭ netrovita aŭ neĝusta. Ĉu eble la datadir de la reto malĝustas?</translation>
</message>
<message>
<source>Not enough file descriptors available.</source>
<translation>Nesufiĉa nombro de dosierpriskribiloj disponeblas.</translation>
</message>
<message>
<source>Specify wallet file (within data directory)</source>
<translation>Specifi monujan dosieron (ene de dosierujo por datumoj)</translation>
</message>
<message>
<source>Verifying blocks...</source>
<translation>Kontrolado de blokoj...</translation>
</message>
<message>
<source>Wallet options:</source>
<translation>Monujaj opcioj:</translation>
</message>
<message>
<source>Execute command when a relevant alert is received or we see a really long fork (%s in cmd is replaced by message)</source>
<translation>Plenumi komandon kiam rilata alerto riceviĝas, aŭ kiam ni vidas tre longan forkon (%s en cms anstataŭiĝas per mesaĝo)</translation>
</message>
<message>
<source>Information</source>
<translation>Informoj</translation>
</message>
<message>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Sendi spurajn/sencimigajn informojn al la konzolo anstataŭ al dosiero debug.log</translation>
</message>
<message>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>Malpligrandigi la sencimigan protokol-dosieron kiam kliento lanĉiĝas (defaŭlte: 1 kiam mankas -debug)</translation>
</message>
<message>
<source>Signing transaction failed</source>
<translation>Subskriba transakcio fiaskis</translation>
</message>
<message>
<source>This is experimental software.</source>
<translation>ĝi estas eksperimenta programo</translation>
</message>
<message>
<source>Transaction amount too small</source>
<translation>Transakcia sumo tro malgranda</translation>
</message>
<message>
<source>Transaction too large</source>
<translation>Transakcio estas tro granda</translation>
</message>
<message>
<source>Username for JSON-RPC connections</source>
<translation>Salutnomo por konektoj JSON-RPC</translation>
</message>
<message>
<source>Warning</source>
<translation>Averto</translation>
</message>
<message>
<source>You need to rebuild the database using -reindex to change -txindex</source>
<translation>Vi devas rekontrui la datumbazon kun -reindex por ŝanĝi -txindex</translation>
</message>
<message>
<source>Password for JSON-RPC connections</source>
<translation>Pasvorto por konektoj JSON-RPC</translation>
</message>
<message>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Plenumi komandon kiam plej bona bloko ŝanĝiĝas (%s en cmd anstataŭiĝas per bloka haketaĵo)</translation>
</message>
<message>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Permesi DNS-elserĉojn por -addnote, -seednote kaj -connect</translation>
</message>
<message>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Nekonata reto specifita en -onlynet: '%s'</translation>
</message>
<message>
<source>Insufficient funds</source>
<translation>Nesufiĉa mono</translation>
</message>
<message>
<source>Loading block index...</source>
<translation>Ŝarĝante blok-indekson...</translation>
</message>
<message>
<source>Loading wallet...</source>
<translation>Ŝargado de monujo...</translation>
</message>
<message>
<source>Cannot downgrade wallet</source>
<translation>Ne eblas malpromocii monujon</translation>
</message>
<message>
<source>Rescanning...</source>
<translation>Reskanado...</translation>
</message>
<message>
<source>Done loading</source>
<translation>Ŝargado finiĝis</translation>
</message>
<message>
<source>Error</source>
<translation>Eraro</translation>
</message>
</context>
</TS><|fim▁end|> | </message>
<message> |
<|file_name|>JoinGameResponse.java<|end_file_name|><|fim▁begin|>package com.glazebrook.tictactoe.responses;
import com.fasterxml.jackson.annotation.JsonProperty;
import javax.validation.constraints.NotNull;
import java.util.UUID;
public class JoinGameResponse {
@JsonProperty
@NotNull
private UUID gameId;
@JsonProperty
@NotNull
private UUID playerId;
@JsonProperty
@NotNull
private UUID token;
public JoinGameResponse() {
}
public JoinGameResponse(UUID gameId, UUID playerId, UUID token) {
this.gameId = gameId;
this.playerId = playerId;
this.token = token;
}
public UUID getGameId() {
return gameId;
}
public void setGameId(UUID gameId) {
this.gameId = gameId;
}
public UUID getToken() {
return token;<|fim▁hole|>
public void setToken(UUID token) {
this.token = token;
}
public UUID getPlayerId() {
return playerId;
}
public void setPlayerId(UUID playerId) {
this.playerId = playerId;
}
}<|fim▁end|> | } |
<|file_name|>ClientManager.java<|end_file_name|><|fim▁begin|>package de.superioz.moo.network.client;
import de.superioz.moo.network.server.NetworkServer;
import lombok.Getter;
import de.superioz.moo.api.collection.UnmodifiableList;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* The hub for storing the client connections
*
* @see MooClient
*/
public final class ClientManager {
/**
* It is only necessary to only have one ClientHub instance so this is the static access for this
* class after it has been initialised by the network server
*/
@Getter
private static ClientManager instance;
/**
* The connected {@link MooClient}'s by the type of them
*/
private ConcurrentMap<ClientType, Map<InetSocketAddress, MooClient>> clientsByType = new ConcurrentHashMap<>();
/**
* The ram usage of every daemon (as socketaddress) as percent
*/
@Getter
private Map<InetSocketAddress, Integer> daemonRamUsage = new HashMap<>();
/**
* The netty server the clients are connected to
*/
private NetworkServer netServer;
public ClientManager(NetworkServer netServer) {
instance = this;
this.netServer = netServer;
for(ClientType clientType : ClientType.values()) {
clientsByType.put(clientType, new HashMap<>());
}
}
/**
* Updates the ramUsage of a daemon
*
* @param address The address of the daemon/server
* @param ramUsage The ramUsage in per cent
*/
public void updateRamUsage(InetSocketAddress address, int ramUsage) {
Map<InetSocketAddress, MooClient> daemonClients = clientsByType.get(ClientType.DAEMON);
if(!daemonClients.containsKey(address)) return;
daemonRamUsage.put(address, ramUsage);
}
/**
* Gets the best available daemon where the ram usage is the lowest
*
* @return The client
*/
public MooClient getBestDaemon() {
Map<InetSocketAddress, MooClient> daemonClients = clientsByType.get(ClientType.DAEMON);
if(daemonClients.isEmpty()) return null;
int lowesRamUsage = -1;
MooClient lowestRamUsageClient = null;
for(InetSocketAddress address : daemonClients.keySet()) {
if(!daemonRamUsage.containsKey(address)) continue;
MooClient client = daemonClients.get(address);
int ramUsage = daemonRamUsage.get(address);
if((lowesRamUsage == -1 || lowesRamUsage > ramUsage)
&& !((lowesRamUsage = ramUsage) >= (Integer) netServer.getConfig().get("slots-ram-usage"))) {
lowestRamUsageClient = client;
}
}
return lowestRamUsageClient;
}
/**
* Adds a client to the hub
*
* @param cl The client
* @return The size of the map
*/
public int add(MooClient cl) {
Map<InetSocketAddress, MooClient> map = clientsByType.get(cl.getType());
map.put(cl.getAddress(), cl);
if(cl.getType() == ClientType.DAEMON) {
daemonRamUsage.put(cl.getAddress(), 0);
}
return map.size();
}
/**
* Removes a client from the hub
*
* @param address The address (the key)
* @return This
*/
public ClientManager remove(InetSocketAddress address) {
for(Map<InetSocketAddress, MooClient> m : clientsByType.values()) {
m.entrySet().removeIf(entry -> entry.getKey().equals(address));
}
return this;
}
public ClientManager remove(MooClient cl) {
return remove(cl.getAddress());
}
/**
* Gets a client from address
*
* @param address The address
* @return The client
*/
public MooClient get(InetSocketAddress address) {
MooClient client = null;
for(Map.Entry<ClientType, Map<InetSocketAddress, MooClient>> entry : clientsByType.entrySet()) {
if(entry.getValue().containsKey(address)) {
client = entry.getValue().get(address);
}
}
return client;
}
public boolean contains(InetSocketAddress address) {
return get(address) != null;
}
/**
* Get clients (from type)
*
* @param type The type
* @return The list of clients (unmodifiable)
*/
public UnmodifiableList<MooClient> getClients(ClientType type) {
Map<InetSocketAddress, MooClient> map = clientsByType.get(type);
return new UnmodifiableList<>(map.values());
}
/**
* Get all clients inside one list
*
* @return The list of clients
*/
public List<MooClient> getAll() {
List<MooClient> clients = new ArrayList<>();
for(ClientType clientType : ClientType.values()) {
clients.addAll(getClients(clientType));
}
return clients;
}
public UnmodifiableList<MooClient> getMinecraftClients() {
List<MooClient> clients = new ArrayList<>();
clients.addAll(getClients(ClientType.PROXY));
clients.addAll(getClients(ClientType.SERVER));
return new UnmodifiableList<>(clients);
}<|fim▁hole|> return getClients(ClientType.SERVER);
}
public UnmodifiableList<MooClient> getProxyClients() {
return getClients(ClientType.PROXY);
}
public UnmodifiableList<MooClient> getCustomClients() {
return getClients(ClientType.CUSTOM);
}
public UnmodifiableList<MooClient> getDaemonClients() {
return getClients(ClientType.DAEMON);
}
}<|fim▁end|> |
public UnmodifiableList<MooClient> getServerClients() { |
<|file_name|>eventtarget.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::utils::WrapperCache;
pub struct EventTarget {
wrapper: WrapperCache
}
pub impl EventTarget {
fn new() -> ~EventTarget {<|fim▁hole|> ~EventTarget {
wrapper: WrapperCache::new()
}
}
}<|fim▁end|> | |
<|file_name|>eip-1344.spec.ts<|end_file_name|><|fim▁begin|>import tape from 'tape'
import { BN } from 'ethereumjs-util'
import Common, { Chain, Hardfork } from '@ethereumjs/common'
import VM from '../../../src'
import { ERROR } from '../../../src/exceptions'
const testCases = [<|fim▁hole|>]
// CHAINID PUSH8 0x00 MSTORE8 PUSH8 0x01 PUSH8 0x00 RETURN
const code = ['46', '60', '00', '53', '60', '01', '60', '00', 'f3']
tape('Istanbul: EIP-1344', async (t) => {
t.test('CHAINID', async (st) => {
const runCodeArgs = {
code: Buffer.from(code.join(''), 'hex'),
gasLimit: new BN(0xffff),
}
for (const testCase of testCases) {
const { chain, hardfork } = testCase
const common = new Common({ chain, hardfork })
const vm = new VM({ common })
try {
const res = await vm.runCode(runCodeArgs)
if (testCase.err) {
st.equal(res.exceptionError?.error, testCase.err)
} else {
st.assert(res.exceptionError === undefined)
st.assert(testCase.chainId.eq(new BN(res.returnValue)))
}
} catch (e: any) {
st.fail(e.message)
}
}
st.end()
})
})<|fim▁end|> | { chain: Chain.Mainnet, hardfork: Hardfork.Istanbul, chainId: new BN(1) },
{ chain: Chain.Mainnet, hardfork: Hardfork.Constantinople, err: ERROR.INVALID_OPCODE },
{ chain: Chain.Ropsten, hardfork: Hardfork.Istanbul, chainId: new BN(3) }, |
<|file_name|>edit_case.js<|end_file_name|><|fim▁begin|>/**
* Created by Paul on 24/01/2015.
*/
var install_url = $("#install_url").val();
$("#updateCase").click(function () {
var case_id = $('#case_id').val();
var case_priority = $('#case_priority').val();
var case_status = $('#case_status').val();
var case_type = $('#case_type').val();
var assignedTo = $('#assignedTo').val();
var case_subject = $('#case_subject').val();
var case_description = $('#case_description').val();
var case_resolution = $('#case_resolution').val();
$.ajax({
url: install_url + '/web/front.php/tickets/update_case_ajax',
type: 'POST',
data: "id=" + case_id + "&case_priority=" + case_priority +
"&case_status=" + case_status + "&case_type=" + case_type + "&assignedTo=" + assignedTo + "&case_subject=" + case_subject
+ "&case_description=" + case_description + "&case_resolution=" + case_resolution,
dataType: 'json',
success: function (data) {
if (data.error == "no") {
toastr.options = {<|fim▁hole|>
} else {
toastr.options = {
"closeButton": true,
"showDuration": 3
};
toastr['error']('MyCRM', "Error while updating case");
}
},
error: function (data) {
toastr.options = {
"closeButton": true,
"showDuration": 3
};
toastr['error']('MyCRM', "Error while updating case : " + data);
}
});
});<|fim▁end|> | "closeButton": true,
"showDuration": 3
};
toastr['success']('MyCRM', "Case updated"); |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>"""litchi URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views<|fim▁hole|> 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^captcha/', include('captcha.urls')),
url(r'^session/', include('apps.session.urls', namespace='session')),
]<|fim▁end|> | 1. Add an import: from my_app import views |
<|file_name|>orca_projection.py<|end_file_name|><|fim▁begin|>"""
Tri-Polar Grid Projected Plotting
=================================
This example demonstrates cell plots of data on the semi-structured ORCA2 model
grid.
First, the data is projected into the PlateCarree coordinate reference system.
Second four pcolormesh plots are created from this projected dataset,
using different projections for the output image.
"""
import matplotlib.pyplot as plt
import cartopy.crs as ccrs
import iris
import iris.analysis.cartography
import iris.plot as iplt
import iris.quickplot as qplt
def main():
# Load data
filepath = iris.sample_data_path("orca2_votemper.nc")
cube = iris.load_cube(filepath)
<|fim▁hole|> projections["NorthPolarStereo"] = ccrs.NorthPolarStereo()
projections["Orthographic"] = ccrs.Orthographic(
central_longitude=-90, central_latitude=45
)
pcarree = projections["PlateCarree"]
# Transform cube to target projection
new_cube, extent = iris.analysis.cartography.project(
cube, pcarree, nx=400, ny=200
)
# Plot data in each projection
for name in sorted(projections):
fig = plt.figure()
fig.suptitle("ORCA2 Data Projected to {}".format(name))
# Set up axes and title
ax = plt.subplot(projection=projections[name])
# Set limits
ax.set_global()
# plot with Iris quickplot pcolormesh
qplt.pcolormesh(new_cube)
# Draw coastlines
ax.coastlines()
iplt.show()
if __name__ == "__main__":
main()<|fim▁end|> | # Choose plot projections
projections = {}
projections["Mollweide"] = ccrs.Mollweide()
projections["PlateCarree"] = ccrs.PlateCarree() |
<|file_name|>ExclusiveOrOperator.java<|end_file_name|><|fim▁begin|>package bits;
/**
* Created by krzysztofkaczor on 3/10/15.
*/
public class ExclusiveOrOperator implements BinaryOperator
{
@Override
public BitArray combine(BitArray operand1, BitArray operand2) {
if(operand1.size() != operand2.size()) {
throw new IllegalArgumentException("ExclusiveOrOperator operands must have same size");
}
int size = operand1.size();
BitArray result = new BitArray(size);
for (int i = 0;i < size;i++) {<|fim▁hole|> }
return result;
}
}<|fim▁end|> | boolean a = operand1.get(i);
boolean b = operand2.get(i);
result.set(i, a != b ); |
<|file_name|>gmultiprocessing.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Add compatibility for gevent and multiprocessing.
Source based on project GIPC 0.6.0
https://bitbucket.org/jgehrcke/gipc/
"""
import os, sys, signal, multiprocessing, multiprocessing.process, multiprocessing.reduction
gevent=None
geventEvent=None
def _tryGevent():
global gevent, geventEvent
if gevent and geventEvent: return False
try:
import gevent
from gevent import event as geventEvent
return True
except ImportError:
raise ValueError('gevent not found')
def Process(target, args=(), kwargs={}, name=None): # daemon=None
# check if gevent availible
try: _tryGevent()
except ValueError:
print 'Gevent not founded, switching to native'
return multiprocessing.Process(target=target, args=args, kwargs=kwargs, name=name)
if int(gevent.__version__[0])<1:
raise NotImplementedError('Gmultiprocessing supports only gevent>=1.0, your version %s'%gevent.__version__)
if not isinstance(args, tuple):
raise TypeError('<args> must be a tuple')
if not isinstance(kwargs, dict):
raise TypeError('<kwargs> must be a dict')
p = _GProcess(
target=_child,
name=name,
kwargs={"target": target, "args": args, "kwargs": kwargs}
)
# if daemon is not None: p.daemon = daemon
return p
def _child(target, args, kwargs):
"""Wrapper function that runs in child process. Resets gevent/libev state
and executes user-given function.
"""
_tryGevent()
_reset_signal_handlers()
gevent.reinit()
hub = gevent.get_hub()
del hub.threadpool
hub._threadpool = None
hub.destroy(destroy_loop=True)
h = gevent.get_hub(default=True)
assert h.loop.default, 'Could not create libev default event loop.'
target(*args, **kwargs)
class _GProcess(multiprocessing.Process):
"""
Compatible with the ``multiprocessing.Process`` API.
"""
try:
from multiprocessing.forking import Popen as mp_Popen
except ImportError:
# multiprocessing's internal structure has changed from 3.3 to 3.4.
from multiprocessing.popen_fork import Popen as mp_Popen
# Monkey-patch and forget about the name.
mp_Popen.poll = lambda *a, **b: None
del mp_Popen
def start(self):
_tryGevent()
# Start grabbing SIGCHLD within libev event loop.
gevent.get_hub().loop.install_sigchld()
# Run new process (based on `fork()` on POSIX-compliant systems).
super(_GProcess, self).start()
# The occurrence of SIGCHLD is recorded asynchronously in libev.
# This guarantees proper behavior even if the child watcher is
# started after the child exits. Start child watcher now.
self._sigchld_watcher = gevent.get_hub().loop.child(self.pid)
self._returnevent = gevent.event.Event()
self._sigchld_watcher.start(self._on_sigchld, self._sigchld_watcher)
def _on_sigchld(self, watcher):
"""Callback of libev child watcher. Called when libev event loop
catches corresponding SIGCHLD signal.
"""
watcher.stop()
# Status evaluation copied from `multiprocessing.forking` in Py2.7.
if os.WIFSIGNALED(watcher.rstatus):
self._popen.returncode = -os.WTERMSIG(watcher.rstatus)
else:
assert os.WIFEXITED(watcher.rstatus)
self._popen.returncode = os.WEXITSTATUS(watcher.rstatus)
self._returnevent.set()
def is_alive(self):
assert self._popen is not None, "Process not yet started."
if self._popen.returncode is None:
return True
return False
@property
def exitcode(self):
if self._popen is None:
return None
return self._popen.returncode
def __repr__(self):
exitcodedict = multiprocessing.process._exitcode_to_name<|fim▁hole|> if self._parent_pid != os.getpid(): status = 'unknown'
elif self.exitcode is not None: status = self.exitcode
if status == 0: status = 'stopped'
elif isinstance(status, int):
status = 'stopped[%s]' % exitcodedict.get(status, status)
return '<%s(%s, %s%s)>' % (type(self).__name__, self._name, status, self.daemon and ' daemon' or '')
def join(self, timeout=None):
"""
Wait cooperatively until child process terminates or timeout occurs.
:arg timeout: ``None`` (default) or a a time in seconds. The method
simply returns upon timeout expiration. The state of the process
has to be identified via ``is_alive()``.
"""
assert self._parent_pid == os.getpid(), "I'm not parent of this child."
assert self._popen is not None, 'Can only join a started process.'
# Resemble multiprocessing's join() method while replacing
# `self._popen.wait(timeout)` with
# `self._returnevent.wait(timeout)`
self._returnevent.wait(timeout)
if self._popen.returncode is not None:
if hasattr(multiprocessing.process, '_children'): # This is for Python 3.4.
kids = multiprocessing.process._children
else: # For Python 2.6, 2.7, 3.3.
kids = multiprocessing.process._current_process._children
kids.discard(self)
# Inspect signal module for signals whose action is to be restored to the default action right after fork.
_signals_to_reset = [getattr(signal, s) for s in
set([s for s in dir(signal) if s.startswith("SIG")]) -
# Exclude constants that are not signals such as SIG_DFL and SIG_BLOCK.
set([s for s in dir(signal) if s.startswith("SIG_")]) -
# Leave handlers for SIG(STOP/KILL/PIPE) untouched.
set(['SIGSTOP', 'SIGKILL', 'SIGPIPE'])]
def _reset_signal_handlers():
for s in _signals_to_reset:
if s < signal.NSIG:
signal.signal(s, signal.SIG_DFL)
PY3 = sys.version_info[0] == 3
if PY3:
def _reraise(tp, value, tb=None):
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
def __exec(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
__exec("""def _reraise(tp, value, tb=None): raise tp, value, tb""")<|fim▁end|> | status = 'started' |
<|file_name|>CircularDependenciesDiscovery.java<|end_file_name|><|fim▁begin|>package org.adligo.tests4j_4jacoco.plugin.discovery;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.adligo.tests4j.models.shared.association.ClassAssociationsLocal;
import org.adligo.tests4j.models.shared.association.ClassAssociationsLocalMutant;
import org.adligo.tests4j.models.shared.association.I_ClassAssociationsCache;
import org.adligo.tests4j.models.shared.association.I_ClassAssociationsLocal;
import org.adligo.tests4j.models.shared.association.I_ClassParentsLocal;
import org.adligo.tests4j.run.helpers.I_ClassFilter;
import org.adligo.tests4j.shared.asserts.reference.ClassAliasLocal;
import org.adligo.tests4j.shared.asserts.reference.I_ClassAliasLocal;
import org.adligo.tests4j.shared.output.I_Tests4J_Log;
<|fim▁hole|> * the class loader, discovers
* references.
*
* references and dependencies are very similar concepts in this package as follows;
* references illuminate that one class references another.
* dependencies illuminate that one class depends on another,
* and indicate
* The I_CachedClassBytesClassLoader is shared memory between threads.
* Also this model keeps a cache of the references for classes
* it reads, so it doesn't need to re-ASM byte code read them.
*
*
* @author scott
*
*/
public class CircularDependenciesDiscovery implements I_ClassDependenciesDiscovery {
private I_Tests4J_Log log;
private I_ClassFilter classFilter;
private I_ClassAssociationsCache cache;
/**
* this contains the initial references
*/
private Map<I_ClassAliasLocal, I_ClassAssociationsLocal> refMap = new HashMap<I_ClassAliasLocal,I_ClassAssociationsLocal>();
private Set<I_ClassParentsLocal> initalRefsToIdentify = new HashSet<I_ClassParentsLocal>();
private Set<I_ClassParentsLocal> fullRefsFound = new HashSet<I_ClassParentsLocal>();
private I_ClassDependenciesDiscovery fullDependenciesDiscovery;
public CircularDependenciesDiscovery() {}
/**
* @diagram_sync with DiscoveryOverview.seq on 8/17/2014
*/
public I_ClassAssociationsLocal findOrLoad(Class<?> c) throws IOException, ClassNotFoundException {
if (log.isLogEnabled(CircularDependenciesDiscovery.class)) {
log.log(".discoverAndLoad " + c.getName());
}
String className = c.getName();
refMap.clear();
initalRefsToIdentify.clear();
fullRefsFound.clear();
I_ClassAssociationsLocal crefs = cache.getDependencies(className);
if (crefs != null) {
return crefs;
}
if (classFilter.isFiltered(c)) {
I_ClassAssociationsLocal toRet = new ClassAssociationsLocal(fullDependenciesDiscovery.findOrLoad(c));
cache.putDependenciesIfAbsent(toRet);
return toRet;
}
I_ClassAssociationsLocal preCircleRefs = fullDependenciesDiscovery.findOrLoad(c);
refMap.put(new ClassAliasLocal(preCircleRefs), preCircleRefs);
Set<I_ClassParentsLocal> refs = preCircleRefs.getDependenciesLocal();
for (I_ClassParentsLocal ref: refs) {
I_ClassAssociationsLocal preCircleDelegate = fullDependenciesDiscovery.findOrLoad(ref.getTarget());
refMap.put(new ClassAliasLocal(preCircleDelegate), preCircleDelegate);
}
ClassAssociationsLocal toRet = calcCircles(preCircleRefs);
cache.putDependenciesIfAbsent(toRet);;
return toRet;
}
/**
* @diagram_sync with DiscoveryOverview.seq on 8/17/2014
* @param preCircleRefs
* @return
*/
private ClassAssociationsLocal calcCircles(I_ClassAssociationsLocal preCircleRefs) {
ClassAssociationsLocalMutant crlm = new ClassAssociationsLocalMutant(preCircleRefs);
Collection<I_ClassAssociationsLocal> entries = refMap.values();
Set<I_ClassAssociationsLocal> copy =
new HashSet<I_ClassAssociationsLocal>(entries);
copy.remove(new ClassAliasLocal(crlm.getTarget()));
for (I_ClassAssociationsLocal cr: copy) {
Set<I_ClassParentsLocal> refs = cr.getDependenciesLocal();
if (refs != null) {
if (refs.contains(crlm)) {
crlm.addCircularReferences(cr);
}
}
}
return new ClassAssociationsLocal(crlm);
}
public I_Tests4J_Log getLog() {
return log;
}
public I_ClassFilter getClassFilter() {
return classFilter;
}
public I_ClassAssociationsCache getCache() {
return cache;
}
public I_ClassDependenciesDiscovery getFullDependenciesDiscovery() {
return fullDependenciesDiscovery;
}
public void setLog(I_Tests4J_Log log) {
this.log = log;
}
public void setClassFilter(I_ClassFilter classFilter) {
this.classFilter = classFilter;
}
public void setCache(I_ClassAssociationsCache cache) {
this.cache = cache;
}
public void setFullDependenciesDiscovery(
I_ClassDependenciesDiscovery classDependenciesDiscovery) {
this.fullDependenciesDiscovery = classDependenciesDiscovery;
}
}<|fim▁end|> | /**
* a model like (non thread safe) class that loads classes into |
<|file_name|>change-header-image.client.controller.js<|end_file_name|><|fim▁begin|>'use strict';
angular.module('articles').controller('ChangeHeaderImageController', ['$scope', '$timeout', '$stateParams', '$window', 'Authentication', 'FileUploader', 'Articles',
function ($scope, $timeout, $stateParams, $window, Authentication, FileUploader, Articles) {
$scope.user = Authentication.user;
$scope.article = Articles.get({
articleId: $stateParams.articleId
});
$scope.imageURL = $scope.article.headerMedia || null;
// Create file uploader instance
$scope.uploader = new FileUploader({
url: 'api/articles/' + $stateParams.articleId + '/headerimage',
alias: 'newHeaderImage'
});
// Set file uploader image filter
$scope.uploader.filters.push({
name: 'imageFilter',
fn: function (item, options) {
var type = '|' + item.type.slice(item.type.lastIndexOf('/') + 1) + '|';
return '|jpg|png|jpeg|bmp|gif|'.indexOf(type) !== -1;
}
});
// Called after the user selected a new picture file
$scope.uploader.onAfterAddingFile = function (fileItem) {
if ($window.FileReader) {
var fileReader = new FileReader();
fileReader.readAsDataURL(fileItem._file);
fileReader.onload = function (fileReaderEvent) {
$timeout(function () {
$scope.imageURL = fileReaderEvent.target.result;
}, 0);
};
}
};
// Called after the article has been assigned a new header image
$scope.uploader.onSuccessItem = function (fileItem, response, status, headers) {
// Show success message
$scope.success = true;
// Populate user object
$scope.user = Authentication.user = response;
// Clear upload buttons
$scope.cancelUpload();
};
// Called after the user has failed to upload a new picture
$scope.uploader.onErrorItem = function (fileItem, response, status, headers) {
// Clear upload buttons
$scope.cancelUpload();
// Show error message
$scope.error = response.message;
};
// Change article header image
$scope.uploadHeaderImage = function () {
console.log($scope);<|fim▁hole|> // Clear messages
$scope.success = $scope.error = null;
// Start upload
$scope.uploader.uploadAll();
};
// Cancel the upload process
$scope.cancelUpload = function () {
$scope.uploader.clearQueue();
//$scope.imageURL = $scope.article.profileImageURL;
};
}
]);<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//-
// Copyright (c) 2016, 2017, Jason Lingle
//
// This file is part of Ensync.
//
// Ensync is free software: you can redistribute it and/or modify it under the
// terms of the GNU General Public License as published by the Free Software
// Foundation, either version 3 of the License, or (at your option) any later<|fim▁hole|>// version.
//
// Ensync is distributed in the hope that it will be useful, but WITHOUT ANY
// WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
// FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
// details.
//
// You should have received a copy of the GNU General Public License along with
// Ensync. If not, see <http://www.gnu.org/licenses/>.
#[macro_use]
mod context;
pub use self::context::{Context, UnqueuedTasks};
pub mod compute;
pub mod mutate;
pub mod tree_walk;<|fim▁end|> | |
<|file_name|>polyfills.js<|end_file_name|><|fim▁begin|>// fetch() polyfill for making API calls.
import 'whatwg-fetch';
// Object.assign() is commonly used with React.<|fim▁hole|>Object.assign = objectAssign;<|fim▁end|> | // It will use the native implementation if it's present and isn't buggy.
import objectAssign from 'object-assign'; |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from distutils.core import setup, Extension
from distutils.sysconfig import get_python_lib
import os, os.path
import sys
if 'sdist' in sys.argv and sys.platform != "win32":
assert os.system("git show-ref -s HEAD > .gitrev") == 0
if sys.platform == "darwin":
# Don't create resource files on OS X tar.
os.environ['COPY_EXTENDED_ATTRIBUTES_DISABLE'] = 'true'
os.environ['COPYFILE_DISABLE'] = 'true'
setup_args = {}
def add_command_class(name, cls):
cmdclasses = setup_args.get('cmdclass', {})
cmdclasses[name] = cls
setup_args['cmdclass'] = cmdclasses
if sys.version_info[0] >= 3:
import lib2to3.refactor
from distutils.command.build_py \
import build_py_2to3 as build_py
# need to convert sources to Py3 on installation
fixers = [ fix for fix in lib2to3.refactor.get_fixers_from_package("lib2to3.fixes")
if fix.split('fix_')[-1] not in ('next',)
]
build_py.fixer_names = fixers
add_command_class("build_py", build_py)
pxd_include_dirs = [
directory for directory, dirs, files in os.walk('Cython/Includes')
if '__init__.pyx' in files or '__init__.pxd' in files
or directory == 'Cython/Includes' or directory == 'Cython/Includes/Deprecated']
pxd_include_patterns = [
p+'/*.pxd' for p in pxd_include_dirs ] + [
p+'/*.pyx' for p in pxd_include_dirs ]
if sys.version_info < (2,4):
install_base_dir = get_python_lib(prefix='')
import glob
patterns = pxd_include_patterns + [
'Cython/Plex/*.pxd',
'Cython/Compiler/*.pxd',
'Cython/Runtime/*.pyx'
]
setup_args['data_files'] = [
(os.path.dirname(os.path.join(install_base_dir, pattern)),
[ f for f in glob.glob(pattern) ])
for pattern in patterns
]
else:
setup_args['package_data'] = {
'Cython.Plex' : ['*.pxd'],
'Cython.Compiler' : ['*.pxd'],
'Cython.Runtime' : ['*.pyx', '*.pxd'],
'Cython' : [ p[7:] for p in pxd_include_patterns ],
}
# This dict is used for passing extra arguments that are setuptools
# specific to setup
setuptools_extra_args = {}
# tells whether to include cygdb (the script and the Cython.Debugger package
include_debugger = sys.version_info[:2] > (2, 5)
if 'setuptools' in sys.modules:
setuptools_extra_args['zip_safe'] = False
setuptools_extra_args['entry_points'] = {
'console_scripts': [
'cython = Cython.Compiler.Main:setuptools_main',
]
}
scripts = []
else:
if os.name == "posix":
scripts = ["bin/cython"]
if include_debugger:
scripts.append('bin/cygdb')
else:
scripts = ["cython.py"]
if include_debugger:
scripts.append('cygdb.py')
def compile_cython_modules(profile=False, compile_more=False, cython_with_refnanny=False):
source_root = os.path.abspath(os.path.dirname(__file__))
compiled_modules = ["Cython.Plex.Scanners",
"Cython.Plex.Actions",
"Cython.Compiler.Lexicon",
"Cython.Compiler.Scanning",
"Cython.Compiler.Parsing",
"Cython.Compiler.Visitor",
"Cython.Compiler.Code",
"Cython.Runtime.refnanny",]
if compile_more:
compiled_modules.extend([
"Cython.Compiler.ParseTreeTransforms",
"Cython.Compiler.Nodes",
"Cython.Compiler.ExprNodes",
"Cython.Compiler.ModuleNode",
"Cython.Compiler.Optimize",
])
defines = []
if cython_with_refnanny:
defines.append(('CYTHON_REFNANNY', '1'))
extensions = []
if sys.version_info[0] >= 3:
from Cython.Distutils import build_ext as build_ext_orig
for module in compiled_modules:
source_file = os.path.join(source_root, *module.split('.'))
if os.path.exists(source_file + ".py"):
pyx_source_file = source_file + ".py"
else:
pyx_source_file = source_file + ".pyx"
dep_files = []
if os.path.exists(source_file + '.pxd'):
dep_files.append(source_file + '.pxd')
if '.refnanny' in module:
defines_for_module = []
else:
defines_for_module = defines
extensions.append(
Extension(module, sources = [pyx_source_file],
define_macros = defines_for_module,
depends = dep_files)
)
class build_ext(build_ext_orig):
# we must keep the original modules alive to make sure
# their code keeps working when we remove them from
# sys.modules
dead_modules = []
def build_extensions(self):
# add path where 2to3 installed the transformed sources
# and make sure Python (re-)imports them from there
already_imported = [ module for module in sys.modules
if module == 'Cython' or module.startswith('Cython.') ]
keep_alive = self.dead_modules.append
for module in already_imported:
keep_alive(sys.modules[module])
del sys.modules[module]
sys.path.insert(0, os.path.join(source_root, self.build_lib))
if profile:
from Cython.Compiler.Options import directive_defaults
directive_defaults['profile'] = True
print("Enabled profiling for the Cython binary modules")
build_ext_orig.build_extensions(self)
setup_args['ext_modules'] = extensions
add_command_class("build_ext", build_ext)
else: # Python 2.x
from distutils.command.build_ext import build_ext as build_ext_orig
try:<|fim▁hole|> class build_ext(build_ext_orig):
def build_extension(self, ext, *args, **kargs):
try:
build_ext_orig.build_extension(self, ext, *args, **kargs)
except StandardError:
print("Compilation of '%s' failed" % ext.sources[0])
from Cython.Compiler.Main import compile
from Cython import Utils
if profile:
from Cython.Compiler.Options import directive_defaults
directive_defaults['profile'] = True
print("Enabled profiling for the Cython binary modules")
source_root = os.path.dirname(__file__)
for module in compiled_modules:
source_file = os.path.join(source_root, *module.split('.'))
if os.path.exists(source_file + ".py"):
pyx_source_file = source_file + ".py"
else:
pyx_source_file = source_file + ".pyx"
c_source_file = source_file + ".c"
source_is_newer = False
if not os.path.exists(c_source_file):
source_is_newer = True
else:
c_last_modified = Utils.modification_time(c_source_file)
if Utils.file_newer_than(pyx_source_file, c_last_modified):
source_is_newer = True
else:
pxd_source_file = source_file + ".pxd"
if os.path.exists(pxd_source_file) and Utils.file_newer_than(pxd_source_file, c_last_modified):
source_is_newer = True
if source_is_newer:
print("Compiling module %s ..." % module)
result = compile(pyx_source_file)
c_source_file = result.c_file
if c_source_file:
# Py2 distutils can't handle unicode file paths
if isinstance(c_source_file, unicode):
filename_encoding = sys.getfilesystemencoding()
if filename_encoding is None:
filename_encoding = sys.getdefaultencoding()
c_source_file = c_source_file.encode(filename_encoding)
if '.refnanny' in module:
defines_for_module = []
else:
defines_for_module = defines
extensions.append(
Extension(module, sources = [c_source_file],
define_macros = defines_for_module)
)
else:
print("Compilation failed")
if extensions:
setup_args['ext_modules'] = extensions
add_command_class("build_ext", build_ext)
except Exception:
print('''
ERROR: %s
Extension module compilation failed, looks like Cython cannot run
properly on this system. To work around this, pass the option
"--no-cython-compile". This will install a pure Python version of
Cython without compiling its own sources.
''' % sys.exc_info()[1])
raise
cython_profile = '--cython-profile' in sys.argv
if cython_profile:
sys.argv.remove('--cython-profile')
try:
sys.argv.remove("--cython-compile-all")
cython_compile_more = True
except ValueError:
cython_compile_more = False
try:
sys.argv.remove("--cython-with-refnanny")
cython_with_refnanny = True
except ValueError:
cython_with_refnanny = False
try:
sys.argv.remove("--no-cython-compile")
except ValueError:
compile_cython_modules(cython_profile, cython_compile_more, cython_with_refnanny)
setup_args.update(setuptools_extra_args)
from Cython import __version__ as version
packages = [
'Cython',
'Cython.Build',
'Cython.Compiler',
'Cython.Runtime',
'Cython.Distutils',
'Cython.Plex',
'Cython.Tests',
'Cython.Build.Tests',
'Cython.Compiler.Tests',
]
if include_debugger:
packages.append('Cython.Debugger')
packages.append('Cython.Debugger.Tests')
# it's enough to do this for Py2.5+:
setup_args['package_data']['Cython.Debugger.Tests'] = ['codefile', 'cfuncs.c']
setup(
name = 'Cython',
version = version,
url = 'http://www.cython.org',
author = 'Greg Ewing, Robert Bradshaw, Stefan Behnel, Dag Seljebotn, et al.',
author_email = '[email protected]',
description = "The Cython compiler for writing C extensions for the Python language.",
long_description = """\
The Cython language makes writing C extensions for the Python language as
easy as Python itself. Cython is a source code translator based on the
well-known Pyrex_, but supports more cutting edge functionality and
optimizations.
The Cython language is very close to the Python language (and most Python
code is also valid Cython code), but Cython additionally supports calling C
functions and declaring C types on variables and class attributes. This
allows the compiler to generate very efficient C code from Cython code.
This makes Cython the ideal language for writing glue code for external C
libraries, and for fast C modules that speed up the execution of Python
code.
.. _Pyrex: http://www.cosc.canterbury.ac.nz/greg.ewing/python/Pyrex/
""",
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Programming Language :: C",
"Programming Language :: Cython",
"Topic :: Software Development :: Code Generators",
"Topic :: Software Development :: Compilers",
"Topic :: Software Development :: Libraries :: Python Modules"
],
scripts = scripts,
packages=packages,
# pyximport
py_modules = ["pyximport/__init__",
"pyximport/pyximport",
"pyximport/pyxbuild",
"cython"],
**setup_args
)<|fim▁end|> | |
<|file_name|>vmscaleset.go<|end_file_name|><|fim▁begin|>/*
Copyright 2020 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package azuretasks
import (
"context"
"encoding/base64"
"fmt"
"strings"
"github.com/Azure/azure-sdk-for-go/services/compute/mgmt/2020-06-01/compute"
"github.com/Azure/go-autorest/autorest/to"
"k8s.io/klog/v2"
"k8s.io/kops/upup/pkg/fi"
"k8s.io/kops/upup/pkg/fi/cloudup/azure"
)
// SubnetID contains the resource ID/names required to construct a subnet ID.
type SubnetID struct {
SubscriptionID string
ResourceGroupName string
VirtualNetworkName string
SubnetName string
}
// String returns the subnet ID in the path format.
func (s *SubnetID) String() string {
return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/virtualNetworks/%s/subnets/%s",
s.SubscriptionID,
s.ResourceGroupName,
s.VirtualNetworkName,
s.SubnetName)
}
// ParseSubnetID parses a given subnet ID string and returns a SubnetID.
func ParseSubnetID(s string) (*SubnetID, error) {
l := strings.Split(s, "/")
if len(l) != 11 {
return nil, fmt.Errorf("malformed format of subnet ID: %s, %d", s, len(l))
}
return &SubnetID{
SubscriptionID: l[2],
ResourceGroupName: l[4],
VirtualNetworkName: l[8],
SubnetName: l[10],
}, nil
}
// loadBalancerID contains the resource ID/names required to construct a loadbalancer ID.
type loadBalancerID struct {
SubscriptionID string
ResourceGroupName string
LoadBalancerName string
}
// String returns the loadbalancer ID in the path format.
func (lb *loadBalancerID) String() string {
return fmt.Sprintf("/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/loadbalancers/%s/backendAddressPools/LoadBalancerBackEnd",
lb.SubscriptionID,
lb.ResourceGroupName,
lb.LoadBalancerName,
)
}
// parseLoadBalancerID parses a given loadbalancer ID string and returns a loadBalancerID.
func parseLoadBalancerID(lb string) (*loadBalancerID, error) {
l := strings.Split(lb, "/")
if len(l) != 11 {
return nil, fmt.Errorf("malformed format of loadbalancer ID: %s, %d", lb, len(l))
}
return &loadBalancerID{
SubscriptionID: l[2],
ResourceGroupName: l[4],
LoadBalancerName: l[8],
}, nil
}
// VMScaleSet is an Azure VM Scale Set.
// +kops:fitask
type VMScaleSet struct {
Name *string
Lifecycle fi.Lifecycle
ResourceGroup *ResourceGroup
VirtualNetwork *VirtualNetwork
Subnet *Subnet
StorageProfile *VMScaleSetStorageProfile
// RequirePublicIP is set to true when VMs require public IPs.
RequirePublicIP *bool
// LoadBalancer is the Load Balancer object the VMs will use.
LoadBalancer *LoadBalancer
// SKUName specifies the SKU of of the VM Scale Set
SKUName *string
// Capacity specifies the number of virtual machines the VM Scale Set.
Capacity *int64
// ComputerNamePrefix is the prefix of each VM name of the form <prefix><base-36-instance-id>.
// See https://docs.microsoft.com/en-us/azure/virtual-machine-scale-sets/virtual-machine-scale-sets-instance-ids.
ComputerNamePrefix *string
// AdmnUser specifies the name of the administrative account.
AdminUser *string
SSHPublicKey *string
// CustomData is the user data configuration
CustomData fi.Resource
Tags map[string]*string
Zones []string
PrincipalID *string
}
// VMScaleSetStorageProfile wraps *compute.VirtualMachineScaleSetStorageProfile
// and implements fi.HasDependencies.
//
// If we don't implement the interface and directly use
// compute.VirtualMachineScaleSetStorageProfile in VMScaleSet, the
// topological sort on VMScaleSet will fail as StorageProfile doesn't
// implement a proper interface.
type VMScaleSetStorageProfile struct {
*compute.VirtualMachineScaleSetStorageProfile
}
var _ fi.HasDependencies = &VMScaleSetStorageProfile{}
// GetDependencies returns a slice of tasks on which the tasks depends on.
func (p *VMScaleSetStorageProfile) GetDependencies(tasks map[string]fi.Task) []fi.Task {
return nil
}
var (
_ fi.Task = &VMScaleSet{}
_ fi.CompareWithID = &VMScaleSet{}
)
// CompareWithID returns the Name of the VM Scale Set.
func (s *VMScaleSet) CompareWithID() *string {
return s.Name
}
// Find discovers the VMScaleSet in the cloud provider.
func (s *VMScaleSet) Find(c *fi.Context) (*VMScaleSet, error) {
cloud := c.Cloud.(azure.AzureCloud)
l, err := cloud.VMScaleSet().List(context.TODO(), *s.ResourceGroup.Name)
if err != nil {
return nil, err
}
var found *compute.VirtualMachineScaleSet
for _, v := range l {
if *v.Name == *s.Name {
found = &v
break
}
}
if found == nil {
return nil, nil
}
profile := found.VirtualMachineProfile
nwConfigs := *profile.NetworkProfile.NetworkInterfaceConfigurations
if len(nwConfigs) != 1 {
return nil, fmt.Errorf("unexpected number of network configs found for VM ScaleSet %s: %d", *s.Name, len(nwConfigs))
}
nwConfig := nwConfigs[0]
ipConfigs := *nwConfig.VirtualMachineScaleSetNetworkConfigurationProperties.IPConfigurations
if len(ipConfigs) != 1 {
return nil, fmt.Errorf("unexpected number of IP configs found for VM ScaleSet %s: %d", *s.Name, len(ipConfigs))
}
ipConfig := ipConfigs[0]
subnetID, err := ParseSubnetID(*ipConfig.Subnet.ID)
if err != nil {
return nil, fmt.Errorf("failed to parse subnet ID %s", *ipConfig.Subnet.ID)
}
var loadBalancerID *loadBalancerID
if ipConfig.LoadBalancerBackendAddressPools != nil {
for _, i := range *ipConfig.LoadBalancerBackendAddressPools {
if !strings.Contains(*i.ID, "api") {
continue
}
loadBalancerID, err = parseLoadBalancerID(*i.ID)
if err != nil {
return nil, fmt.Errorf("failed to parse loadbalancer ID %s", *ipConfig.Subnet.ID)
}
}
}
osProfile := profile.OsProfile
sshKeys := *osProfile.LinuxConfiguration.SSH.PublicKeys
if len(sshKeys) != 1 {
return nil, fmt.Errorf("unexpected number of SSH keys found for VM ScaleSet %s: %d", *s.Name, len(sshKeys))
}
// TODO(kenji): Do not check custom data as Azure doesn't
// populate (https://github.com/Azure/azure-cli/issues/5866).
// Find a way to work around this.
vmss := &VMScaleSet{
Name: s.Name,
Lifecycle: s.Lifecycle,
ResourceGroup: &ResourceGroup{
Name: s.ResourceGroup.Name,
},
VirtualNetwork: &VirtualNetwork{
Name: to.StringPtr(subnetID.VirtualNetworkName),
},
Subnet: &Subnet{
Name: to.StringPtr(subnetID.SubnetName),
},
StorageProfile: &VMScaleSetStorageProfile{
VirtualMachineScaleSetStorageProfile: profile.StorageProfile,
},
RequirePublicIP: to.BoolPtr(ipConfig.PublicIPAddressConfiguration != nil),
SKUName: found.Sku.Name,
Capacity: found.Sku.Capacity,
ComputerNamePrefix: osProfile.ComputerNamePrefix,
AdminUser: osProfile.AdminUsername,
SSHPublicKey: sshKeys[0].KeyData,
Tags: found.Tags,
PrincipalID: found.Identity.PrincipalID,
}
if loadBalancerID != nil {
vmss.LoadBalancer = &LoadBalancer{
Name: to.StringPtr(loadBalancerID.LoadBalancerName),
}
}
if found.Zones != nil {
vmss.Zones = *found.Zones
}
return vmss, nil
}
// Run implements fi.Task.Run.
func (s *VMScaleSet) Run(c *fi.Context) error {
c.Cloud.(azure.AzureCloud).AddClusterTags(s.Tags)
return fi.DefaultDeltaRunMethod(s, c)
}
// CheckChanges returns an error if a change is not allowed.
func (s *VMScaleSet) CheckChanges(a, e, changes *VMScaleSet) error {
if a == nil {
// Check if required fields are set when a new resource is created.
if e.Name == nil {
return fi.RequiredField("Name")
}
return nil
}
// Check if unchangeable fields won't be changed.
if changes.Name != nil {
return fi.CannotChangeField("Name")
}
return nil
}
// RenderAzure creates or updates a VM Scale Set.
func (s *VMScaleSet) RenderAzure(t *azure.AzureAPITarget, a, e, changes *VMScaleSet) error {
if a == nil {
klog.Infof("Creating a new VM Scale Set with name: %s", fi.StringValue(e.Name))
} else {
klog.Infof("Updating a VM Scale Set with name: %s", fi.StringValue(e.Name))
}
name := *e.Name
var customData *string
if e.CustomData != nil {
d, err := fi.ResourceAsBytes(e.CustomData)
if err != nil {
return fmt.Errorf("error rendering CustomData: %s", err)
}
customData = to.StringPtr(base64.StdEncoding.EncodeToString(d))
}
osProfile := &compute.VirtualMachineScaleSetOSProfile{
ComputerNamePrefix: e.ComputerNamePrefix,
AdminUsername: e.AdminUser,
CustomData: customData,
LinuxConfiguration: &compute.LinuxConfiguration{
SSH: &compute.SSHConfiguration{
PublicKeys: &[]compute.SSHPublicKey{
{
Path: to.StringPtr(fmt.Sprintf("/home/%s/.ssh/authorized_keys", *e.AdminUser)),
KeyData: to.StringPtr(*e.SSHPublicKey),
},
},
},
DisablePasswordAuthentication: to.BoolPtr(true),
},<|fim▁hole|>
subnetID := SubnetID{
SubscriptionID: t.Cloud.SubscriptionID(),
ResourceGroupName: *e.ResourceGroup.Name,
VirtualNetworkName: *e.VirtualNetwork.Name,
SubnetName: *e.Subnet.Name,
}
ipConfigProperties := &compute.VirtualMachineScaleSetIPConfigurationProperties{
Subnet: &compute.APIEntityReference{
ID: to.StringPtr(subnetID.String()),
},
Primary: to.BoolPtr(true),
PrivateIPAddressVersion: compute.IPv4,
}
if *e.RequirePublicIP {
ipConfigProperties.PublicIPAddressConfiguration = &compute.VirtualMachineScaleSetPublicIPAddressConfiguration{
Name: to.StringPtr(name + "-publicipconfig"),
VirtualMachineScaleSetPublicIPAddressConfigurationProperties: &compute.VirtualMachineScaleSetPublicIPAddressConfigurationProperties{
PublicIPAddressVersion: compute.IPv4,
},
}
}
if e.LoadBalancer != nil {
loadBalancerID := loadBalancerID{
SubscriptionID: t.Cloud.SubscriptionID(),
ResourceGroupName: *e.ResourceGroup.Name,
LoadBalancerName: *e.LoadBalancer.Name,
}
ipConfigProperties.LoadBalancerBackendAddressPools = &[]compute.SubResource{
{
ID: to.StringPtr(loadBalancerID.String()),
},
}
}
networkConfig := compute.VirtualMachineScaleSetNetworkConfiguration{
Name: to.StringPtr(name + "-netconfig"),
VirtualMachineScaleSetNetworkConfigurationProperties: &compute.VirtualMachineScaleSetNetworkConfigurationProperties{
Primary: to.BoolPtr(true),
EnableIPForwarding: to.BoolPtr(true),
IPConfigurations: &[]compute.VirtualMachineScaleSetIPConfiguration{
{
Name: to.StringPtr(name + "-ipconfig"),
VirtualMachineScaleSetIPConfigurationProperties: ipConfigProperties,
},
},
},
}
vmss := compute.VirtualMachineScaleSet{
Location: to.StringPtr(t.Cloud.Region()),
Sku: &compute.Sku{
Name: e.SKUName,
Capacity: e.Capacity,
},
VirtualMachineScaleSetProperties: &compute.VirtualMachineScaleSetProperties{
UpgradePolicy: &compute.UpgradePolicy{
Mode: compute.UpgradeModeManual,
},
VirtualMachineProfile: &compute.VirtualMachineScaleSetVMProfile{
OsProfile: osProfile,
StorageProfile: e.StorageProfile.VirtualMachineScaleSetStorageProfile,
NetworkProfile: &compute.VirtualMachineScaleSetNetworkProfile{
NetworkInterfaceConfigurations: &[]compute.VirtualMachineScaleSetNetworkConfiguration{
networkConfig,
},
},
},
},
// Assign a system-assigned managed identity so that
// Azure creates an identity for VMs and provision
// its credentials on the VMs.
Identity: &compute.VirtualMachineScaleSetIdentity{
Type: compute.ResourceIdentityTypeSystemAssigned,
},
Tags: e.Tags,
Zones: &e.Zones,
}
result, err := t.Cloud.VMScaleSet().CreateOrUpdate(
context.TODO(),
*e.ResourceGroup.Name,
name,
vmss)
if err != nil {
return err
}
e.PrincipalID = result.Identity.PrincipalID
return nil
}<|fim▁end|> | } |
<|file_name|>hummus-recipe-tests.ts<|end_file_name|><|fim▁begin|>import Recipe = require('hummus-recipe');
import fs = require('fs');
// $ExpectType Recipe
const newDoc = new Recipe('new', 'test.pdf', {
version: 1.6,
author: 'John Doe',
title: 'Hummus Recipe',
subject: 'A brand new PDF',
});
// $ExpectType Recipe
newDoc
.createPage(595, 842)
.text('Memento Mori', 100, 100)
.endPage()
.endPDF();
<|fim▁hole|> .text('Memento Mori', 100, 100)
.endPage()
.endPDF();
const inBuffer: Buffer = fs.readFileSync('test.pdf');
const bufferDoc = new Recipe(inBuffer);
bufferDoc
.createPage(595, 842)
.text('Memento Mori', 100, 100)
.endPage()
.endPDF(
(outBuffer: Buffer) =>
// $ExpectType Buffer
outBuffer,
);
// $ExpectType Metadata
bufferDoc.metadata;
// $ExpectType PageInfo
bufferDoc.metadata[1];<|fim▁end|> | // $ExpectError
newDoc.createPage('A5') |
<|file_name|>run_presubmit_checks.py<|end_file_name|><|fim▁begin|># Copyright 2019 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS-IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script runs the following tests in all cases.
- Javascript and Python Linting
- Backend Python tests
Only when frontend files are changed will it run Frontend Karma unit tests.
"""
from __future__ import annotations
import argparse
import subprocess
from . import common
from . import run_backend_tests
from . import run_frontend_tests
from .linters import pre_commit_linter
_PARSER = argparse.ArgumentParser(
description="""
Run this script from the oppia root folder prior to opening a PR:
python -m scripts.run_presubmit_checks
Set the origin branch to compare against by adding
--branch=your_branch or -b=your_branch
By default, if the current branch tip exists on remote origin,
the current branch is compared against its tip on GitHub.
Otherwise it's compared against 'develop'.
This script runs the following tests in all cases.
- Javascript and Python Linting
- Backend Python tests
Only when frontend files are changed will it run Frontend Karma unit tests.
If any of these tests result in errors, this script will terminate.
Note: The test scripts are arranged in increasing order of time taken. This
enables a broken build to be detected as quickly as possible.
""")
_PARSER.add_argument(
'--branch', '-b',
help='optional; if specified, the origin branch to compare against.')
def main(args=None):
"""Run the presubmit checks."""
parsed_args = _PARSER.parse_args(args=args)
# Run Javascript and Python linters.
print('Linting files since the last commit')
pre_commit_linter.main(args=[])
print('Linting passed.')
print('')
current_branch = subprocess.check_output([
'git', 'rev-parse', '--abbrev-ref', 'HEAD'])
# If the current branch exists on remote origin, matched_branch_num=1
# else matched_branch_num=0.
matched_branch_num = subprocess.check_output([
'git', 'ls-remote', '--heads', 'origin', current_branch, '|', 'wc',
'-l'])
# Set the origin branch to develop if it's not specified.
if parsed_args.branch:
branch = parsed_args.branch
elif matched_branch_num == '1':
branch = 'origin/%s' % current_branch
else:
branch = 'develop'
print('Comparing the current branch with %s' % branch)
all_changed_files = subprocess.check_output([
'git', 'diff', '--cached', '--name-only', '--diff-filter=ACM', branch])
if common.FRONTEND_DIR in all_changed_files:
# Run frontend unit tests.<|fim▁hole|> run_frontend_tests.main(args=['--run_minified_tests'])
print('Frontend tests passed.')
else:
# If files in common.FRONTEND_DIR were not changed, skip the tests.
common.print_each_string_after_two_new_lines([
'No frontend files were changed.',
'Skipped frontend tests'])
# Run backend tests.
print('Running backend tests')
run_backend_tests.main(args=[])
print('Backend tests passed.')
if __name__ == '__main__':
main()<|fim▁end|> | print('Running frontend unit tests') |
<|file_name|>temperaturas.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
'''
Crea un programa que analice el fichero y muestre:
- Los años y sus temperaturas (máxima, mínima y media), ordenados por año
- Los años y su tempertura media, ordenados por temperatura en orden descendente
- Crea un fichero html:
Encabezado: Temperaturas de Zaragoza
Fuente: (la url, como un enlace)
Tabla con las temperaturas (media, máxima y mínima)
Los encabezados de la tabla serán claros.
'''
def obtener_listado(f):
listado = []<|fim▁hole|> return listado
def listado_anio(f):
listado = obtener_listado(f)
listado.sort()
for x in listado:
print x[0:4]
def listado_temp(f):
listado = obtener_listado(f)
listado.sort(key=itemgetter(1), reverse=True)
for x in listado:
if not '-' in x[1:4]:
print x[0:4]
def crear_html(f):
import sys
from operator import itemgetter
try:
# Instrucción con riesgo
f = open('temperaturas_zaragoza.txt')
except IOError:
print 'Error, el fichero temperaturas_zaragoza no existe'
sys.exit()
opcion = int(raw_input('''¿Qué quieres hacer?:
1 - Listado ordenado por año (1)
2 - Listado ordenado por temperatura media (2)
3 - Crear archivo html (3)
>> '''))
if opcion == 1:
listado_anio(f)
if opcion == 2:
listado_temp(f)
if opcion == 3:
crear_html(f)<|fim▁end|> | for n,linea in enumerate(f):
if n != 0:
registro = linea.split()
listado.append(registro) |
<|file_name|>InsertOrUpdateGeneratorOracleTest.java<|end_file_name|><|fim▁begin|>package liquibase.sqlgenerator.core;
import liquibase.change.ColumnConfig;
import liquibase.database.ObjectQuotingStrategy;
import liquibase.database.core.OracleDatabase;
import liquibase.sql.Sql;
import liquibase.statement.DatabaseFunction;
import liquibase.statement.SequenceCurrentValueFunction;
import liquibase.statement.SequenceNextValueFunction;
import liquibase.statement.core.InsertOrUpdateStatement;
import liquibase.statement.core.InsertStatement;
import liquibase.statement.core.UpdateStatement;
import org.junit.Test;
import static org.junit.Assert.*;
public class InsertOrUpdateGeneratorOracleTest {
@Test
public void ContainsInsertStatement() {
OracleDatabase database = new OracleDatabase();
InsertOrUpdateGeneratorOracle generator = new InsertOrUpdateGeneratorOracle();
InsertOrUpdateStatement statement = new InsertOrUpdateStatement("mycatalog", "myschema","mytable","pk_col1");
statement.addColumnValue("pk_col1","value1");
statement.addColumnValue("col2","value2");
statement.addColumnValue("pk_col1","value1");
statement.addColumnValue("col2","value2");
Sql[] sql = generator.generateSql( statement, database, null);
String theSql = sql[0].toSql();
assertTrue(theSql.contains("INSERT INTO mycatalog.mytable (pk_col1, col2) VALUES ('value1', 'value2');"));
assertTrue(theSql.contains("UPDATE mycatalog.mytable"));
String[] sqlLines = theSql.split("\n");
int lineToCheck = 0;
assertEquals("DECLARE", sqlLines[lineToCheck].trim());
lineToCheck++;
assertEquals("v_reccount NUMBER := 0;", sqlLines[lineToCheck].trim());
lineToCheck++;
assertEquals("BEGIN", sqlLines[lineToCheck].trim());
lineToCheck++;
assertEquals("SELECT COUNT(*) INTO v_reccount FROM mycatalog.mytable WHERE pk_col1 = 'value1';", sqlLines[lineToCheck].trim());
lineToCheck++;
assertEquals("IF v_reccount = 0 THEN", sqlLines[lineToCheck].trim());
lineToCheck++;
assertEquals("INSERT INTO mycatalog.mytable (pk_col1, col2) VALUES ('value1', 'value2');", sqlLines[lineToCheck]);
lineToCheck++;
assertEquals("ELSIF v_reccount = 1 THEN", sqlLines[lineToCheck].trim());
lineToCheck++;
assertEquals("UPDATE mycatalog.mytable SET col2 = 'value2' WHERE pk_col1 = 'value1';", sqlLines[lineToCheck].trim());
lineToCheck++;
assertEquals("END IF;", sqlLines[lineToCheck].trim());
lineToCheck++;
assertEquals("END;", sqlLines[lineToCheck].trim());
/*
DECLARE<|fim▁hole|> -- Check if product with this name already exists
SELECT COUNT (*)
INTO v_prodcount
FROM books WHERE isbn = 12345678;
-- Product does not exist
IF v_prodcount = 0 THEN
-- Insert row into PRODUCT based on arguments passed
INSERT INTO books
VALUES
( 12345678,
98765432,
'Working with Liquibase');
-- Product with this name already exists
ELSIF v_prodcount = 1 THEN
-- Update the existing product with values
-- passed as arguments
UPDATE books
SET author_id = 98765432,
title = 'Working with liquibase'
WHERE isbn = 12345678;
END IF;
END;*/
}
@Test
public void testOnlyUpdateFlag() {
OracleDatabase database = new OracleDatabase();
InsertOrUpdateGeneratorOracle generator = new InsertOrUpdateGeneratorOracle();
InsertOrUpdateStatement statement = new InsertOrUpdateStatement("mycatalog", "myschema", "mytable", "pk_col1", true);
statement.addColumnValue("pk_col1", "value1");
statement.addColumnValue("col2", "value2");
Sql[] sql = generator.generateSql(statement, database, null);
String theSql = sql[0].toSql();
assertFalse("should not have had insert statement", theSql.contains("INSERT INTO mycatalog.mytable (pk_col1, col2) VALUES ('value1', 'value2');"));
assertTrue("missing update statement", theSql.contains("UPDATE mycatalog.mytable"));
String[] sqlLines = theSql.split("\n");
int lineToCheck = 0;
assertEquals("UPDATE mycatalog.mytable SET col2 = 'value2' WHERE pk_col1 = 'value1'", sqlLines[lineToCheck].trim());
assertEquals("Wrong number of lines", 1, sqlLines.length);
}
private String prepareInsertStatement(DatabaseFunction databaseSchemaBasedFunction) {
OracleDatabase database = new OracleDatabase();
database.setObjectQuotingStrategy(ObjectQuotingStrategy.LEGACY);
InsertGenerator generator = new InsertGenerator();
InsertStatement statement = new InsertStatement("mycatalog", "myschema", "mytable");
ColumnConfig columnConfig = new ColumnConfig();
if (databaseSchemaBasedFunction instanceof SequenceNextValueFunction) {
columnConfig.setValueSequenceNext((SequenceNextValueFunction) databaseSchemaBasedFunction);
} else if (databaseSchemaBasedFunction instanceof SequenceCurrentValueFunction) {
columnConfig.setValueSequenceCurrent((SequenceCurrentValueFunction) databaseSchemaBasedFunction);
}
columnConfig.setName("col3");
statement.addColumn(columnConfig);
Sql[] sql = generator.generateSql(statement, database, null);
return sql[0].toSql();
}
private String prepareUpdateStatement(SequenceNextValueFunction sequenceNextValueFunction) {
OracleDatabase database = new OracleDatabase();
database.setObjectQuotingStrategy(ObjectQuotingStrategy.LEGACY);
UpdateGenerator generator = new UpdateGenerator();
UpdateStatement statement = new UpdateStatement("mycatalog", "myschema", "mytable");
statement.addNewColumnValue("col3", sequenceNextValueFunction);
Sql[] sql = generator.generateSql(statement, database, null);
return sql[0].toSql();
}
@Test
public void testInsertSequenceValWithSchema() {
SequenceNextValueFunction sequenceNext = new SequenceNextValueFunction("myschema", "my_seq");
assertEquals(
"INSERT INTO mycatalog.mytable (col3) VALUES (myschema.my_seq.nextval)",
prepareInsertStatement(sequenceNext));
}
@Test
public void testInsertSequenceValWithSchemaInWholeStatement() {
SequenceNextValueFunction sequenceNext = new SequenceNextValueFunction("myschema", "my_seq");
assertEquals(
"INSERT INTO mycatalog.mytable (col3) VALUES (myschema.my_seq.nextval)",
prepareInsertStatement(sequenceNext));
}
@Test
public void testUpdateSequenceValWithSchema() {
SequenceNextValueFunction sequenceNext = new SequenceNextValueFunction("myschema", "my_seq");
assertEquals(
"UPDATE mycatalog.mytable SET col3 = myschema.my_seq.nextval",
prepareUpdateStatement(sequenceNext));
}
@Test
public void testUpdateSequenceValWithSchemaInWholeStatement() {
SequenceNextValueFunction sequenceNext = new SequenceNextValueFunction("myschema", "my_seq");
assertEquals(
"UPDATE mycatalog.mytable SET col3 = myschema.my_seq.nextval",
prepareUpdateStatement(sequenceNext));
}
}<|fim▁end|> | v_prodcount NUMBER := 0;
BEGIN |
<|file_name|>git_actions.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from peyotl.utility import get_logger
import re
from peyotl.git_storage import GitActionBase
# extract an amendment id from a git repo path (as returned by git-tree)
_LOG = get_logger(__name__)
class MergeException(Exception):
pass
def get_filepath_for_id(repo_dir, amendment_id):
from peyotl.amendments import AMENDMENT_ID_PATTERN
assert bool(AMENDMENT_ID_PATTERN.match(amendment_id))
return '{r}/amendments/{s}.json'.format(r=repo_dir, s=amendment_id)
def amendment_id_from_repo_path(path):
doc_parent_dir = 'amendments/'
if path.startswith(doc_parent_dir):
try:
amendment_id = path.split(doc_parent_dir)[1]
return amendment_id
except:
return None
class TaxonomicAmendmentsGitAction(GitActionBase):
def __init__(self,
repo,
remote=None,
git_ssh=None,
pkey=None,
cache=None, # pylint: disable=W0613
path_for_doc_fn=None,
max_file_size=None):
"""GitActionBase subclass to interact with a Git repository
Example:
gd = TaxonomicAmendmentsGitAction(repo="/home/user/git/foo")
Note that this requires write access to the
git repository directory, so it can create a
lockfile in the .git directory.
"""
GitActionBase.__init__(self,
'amendment',
repo,
remote,
git_ssh,
pkey,
cache,
path_for_doc_fn,
max_file_size,
path_for_doc_id_fn=get_filepath_for_id)
# rename some generic members in the base class, for clarity and backward compatibility
@property
def path_for_amendment(self):
return self.path_for_doc
<|fim▁hole|> def return_amendment(self):
return self.return_document
def get_changed_docs(self,
ancestral_commit_sha,
doc_ids_to_check=None):
return self._get_changed_docs(ancestral_commit_sha,
doc_id_from_repo_path=amendment_id_from_repo_path,
doc_ids_to_check=doc_ids_to_check)
def find_WIP_branches(self, amendment_id):
pat = re.compile(r'.*_amendment_{i}_[0-9]+'.format(i=amendment_id))
return self._find_WIP_branches(amendment_id, branch_pattern=pat)
def create_or_checkout_branch(self,
gh_user,
amendment_id,
parent_sha,
force_branch_name=False):
return self._create_or_checkout_branch(gh_user,
amendment_id,
parent_sha,
branch_name_template="{ghu}_amendment_{rid}",
force_branch_name=force_branch_name)
def remove_amendment(self, first_arg, sec_arg, third_arg, fourth_arg=None, commit_msg=None):
"""Remove an amendment
Given a amendment_id, branch and optionally an
author, remove an amendment on the given branch
and attribute the commit to author.
Returns the SHA of the commit on branch.
"""
if fourth_arg is None:
amendment_id, branch_name, author = first_arg, sec_arg, third_arg
gh_user = branch_name.split('_amendment_')[0]
parent_sha = self.get_master_sha()
else:
gh_user, amendment_id, parent_sha, author = first_arg, sec_arg, third_arg, fourth_arg
if commit_msg is None:
commit_msg = "Delete Amendment '%s' via OpenTree API" % amendment_id
return self._remove_document(gh_user, amendment_id, parent_sha, author, commit_msg)
def write_amendment(self, amendment_id, file_content, branch, author):
"""Given an amendment_id, temporary filename of content, branch and auth_info
Deprecated but needed until we merge api local-dep to master...
"""
gh_user = branch.split('_amendment_')[0]
msg = "Update Amendment '%s' via OpenTree API" % amendment_id
return self.write_document(gh_user,
amendment_id,
file_content,
branch, author,
commit_msg=msg)
def write_amendment_from_tmpfile(self, amendment_id, tmpfi, parent_sha, auth_info, commit_msg=''):
"""Given an amendment_id, temporary filename of content, branch and auth_info
"""
return self.write_doc_from_tmpfile(amendment_id,
tmpfi,
parent_sha,
auth_info,
commit_msg,
doctype_display_name="amendment")<|fim▁end|> | @property |
<|file_name|>LiveTv.js<|end_file_name|><|fim▁begin|>import * as React from 'react';
import createSvgIcon from './utils/createSvgIcon';
<|fim▁hole|><|fim▁end|> | export default createSvgIcon(
<path d="M21 6h-7.59l3.29-3.29L16 2l-4 4-4-4-.71.71L10.59 6H3c-1.1 0-2 .89-2 2v12c0 1.1.9 2 2 2h18c1.1 0 2-.9 2-2V8c0-1.11-.9-2-2-2zm0 14H3V8h18v12zM9 10v8l7-4z" />
, 'LiveTv'); |
<|file_name|>datalog.py<|end_file_name|><|fim▁begin|>"""
This file shows how to use pyDatalog using facts stored in datalog.
It has 3 parts:
1. create facts for 2 employees in the datalog engine
2. define business rules
3. Query the datalog engine
"""
from pyDatalog import pyDatalog
""" 1. create facts for 3 employees in the datalog engine """
pyDatalog.create_atoms('salary', 'manager')
# John is the manager of Mary, who is the manager of Sam
+ (salary['John'] == 6800)
+ (manager['Mary'] == 'John')
+ (salary['Mary'] == 6300)
+ (manager['Sam'] == 'Mary')
+ (salary['Sam'] == 5900)
""" 2. define business rules """
pyDatalog.create_atoms('salary_class', 'indirect_manager', 'report_count', 'budget', 'lowest',
'X', 'Y', 'Z', 'N')
# the salary class of employee X is computed as a function of his/her salary
salary_class[X] = salary[X]//1000
# all the indirect managers of employee X are derived from his manager, recursively
indirect_manager(X,Y) <= (manager[X] == Y) & (Y != None)
indirect_manager(X,Y) <= (manager[X] == Z) & indirect_manager(Z,Y) & (Y != None)
# count the number of reports of X
(report_count[X] == len_(Y)) <= indirect_manager(Y,X)
""" 3. Query the datalog engine """
# what is the salary class of John ?
print(salary_class['John'] == Y) # Y is 6
# who has a salary of 6300 ?
print(salary[X] == 6300) # X is Mary
# who are the indirect managers of Mary ?
print(indirect_manager('Mary', X)) # X is John
# Who are the employees of John with a salary below 6000 ?
print((salary[X] < 6000) & indirect_manager(X, 'John')) # X is Sam
# who is his own indirect manager ?
print(indirect_manager('X', X)) # prints []
# who has 2 reports ?
print(report_count[X] == 2) # X is John
# what is the total salary of the employees of John ?
(budget[X] == sum_(N, for_each=Y)) <= (indirect_manager(Y, X)) & (salary[Y]==N)
print(budget['John']==N) # N is 12200
# who has the lowest salary ?
(lowest[1] == min_(X, order_by=N)) <= (salary[X]==N)
print(lowest[1]==X) # X is Sam
# start the datalog console, for interactive querying
from pyDatalog.examples import console<|fim▁hole|><|fim▁end|> | console = console.datalogConsole(locals=locals())
console.interact('Type exit() when done.') |
<|file_name|>hosts.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | ../../../../../share/pyshared/twisted/names/hosts.py |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|># Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software<|fim▁hole|># distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate.versioning.shell import main
if __name__ == '__main__':
main(debug='False', repository='.')<|fim▁end|> | |
<|file_name|>lexical-scope-in-parameterless-closure.rs<|end_file_name|><|fim▁begin|>// min-lldb-version: 310
// compile-flags:-C debuginfo=1
// gdb-command:run
// lldb-command:run
<|fim▁hole|> let _ = (1_usize..3).map(|_| 5);
}<|fim▁end|> | // Nothing to do here really, just make sure it compiles. See issue #8513.
fn main() {
let _ = ||(); |
<|file_name|>registry_access.py<|end_file_name|><|fim▁begin|># Copyright 2014 ARM Limited
#
# Licensed under the Apache License, Version 2.0
# See LICENSE file for details.
# standard library modules, , ,
import re<|fim▁hole|>import functools
import binascii
import calendar
import datetime
import hashlib
import base64
import os
try:
from urllib import quote as quoteURL
except ImportError:
from urllib.parse import quote as quoteURL #pylint: disable=no-name-in-module,import-error
# requests, apache2
import requests
# PyJWT, MIT, Jason Web Tokens, pip install PyJWT
import jwt
# cryptography, Apache License, Python Cryptography library,
#import cryptography
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives import serialization
# settings, , load and save settings, internal
from yotta.lib import settings
# access_common, , things shared between different component access modules, internal
from yotta.lib import access_common
# Ordered JSON, , read & write json, internal
from yotta.lib import ordered_json
# export key, , export pycrypto keys, internal
from yotta.lib import exportkey
# globalconf, share global arguments between modules, internal
from yotta.lib import globalconf
Registry_Base_URL = 'https://registry.yottabuild.org'
Website_Base_URL = 'https://yotta.mbed.com'
_OpenSSH_Keyfile_Strip = re.compile(b"^(ssh-[a-z0-9]*\s+)|(\s+.+\@.+)|\n", re.MULTILINE)
logger = logging.getLogger('access')
# suppress logging from the requests library
logging.getLogger("requests").setLevel(logging.WARNING)
class AuthError(RuntimeError):
pass
# Internal functions
def generate_jwt_token(private_key, registry=None):
registry = registry or Registry_Base_URL
expires = calendar.timegm((datetime.datetime.utcnow() + datetime.timedelta(minutes=2)).timetuple())
prn = _fingerprint(private_key.public_key())
logger.debug('fingerprint: %s' % prn)
token_fields = {
"iss": 'yotta',
"aud": registry,
"prn": prn,
"exp": expires
}
logger.debug('token fields: %s' % token_fields)
private_key_pem = private_key.private_bytes(
serialization.Encoding.PEM,
serialization.PrivateFormat.PKCS8,
serialization.NoEncryption()
)
token = jwt.encode(token_fields, private_key_pem.decode('ascii'), 'RS256').decode('ascii')
logger.debug('encoded token: %s' % token)
return token
def _pubkeyWireFormat(pubkey):
pubk_numbers = pubkey.public_numbers()
logger.debug('openssh format publickey:\n%s' % exportkey.openSSH(pubk_numbers))
return quoteURL(_OpenSSH_Keyfile_Strip.sub(b'', exportkey.openSSH(pubk_numbers)))
def _fingerprint(pubkey):
stripped = _OpenSSH_Keyfile_Strip.sub(b'', exportkey.openSSH(pubkey.public_numbers()))
decoded = base64.b64decode(stripped)
khash = hashlib.md5(decoded).hexdigest()
return ':'.join([khash[i:i+2] for i in range(0, len(khash), 2)])
def _retryConnectionErrors(fn):
@functools.wraps(fn)
def wrapped(*args, **kwargs):
attempts_remaining = 5
delay = 0.1
while True:
attempts_remaining -= 1
try:
return fn(*args, **kwargs)
except requests.exceptions.ConnectionError as e:
errmessage = e.message
import socket
# try to format re-packaged get-address-info exceptions
# into a nice message (this will be the normal exception
# you see if you aren't connected to the internet)
try:
errmessage = str(e.message[1])
except Exception as e:
pass
if attempts_remaining:
logger.warning('connection error: %s, retrying...', errmessage)
else:
logger.error('connection error: %s', errmessage)
raise
except requests.exceptions.Timeout as e:
if attempts_remaining:
logger.warning('request timed out: %s, retrying...', e.message)
else:
logger.error('request timed out: %s', e.message)
raise
import time
time.sleep(delay)
delay = delay * 1.6 + 0.1
return wrapped
def _returnRequestError(fn):
''' Decorator that captures requests.exceptions.RequestException errors
and returns them as an error message. If no error occurs the reture
value of the wrapped function is returned (normally None). '''
@functools.wraps(fn)
def wrapped(*args, **kwargs):
try:
return fn(*args, **kwargs)
except requests.exceptions.RequestException as e:
return "server returned status %s: %s" % (e.response.status_code, e.message)
return wrapped
def _handleAuth(fn):
''' Decorator to re-try API calls after asking the user for authentication. '''
@functools.wraps(fn)
def wrapped(*args, **kwargs):
# auth, , authenticate users, internal
from yotta.lib import auth
# if yotta is being run noninteractively, then we never retry, but we
# do call auth.authorizeUser, so that a login URL can be displayed:
interactive = globalconf.get('interactive')
try:
return fn(*args, **kwargs)
except requests.exceptions.HTTPError as e:
if e.response.status_code == requests.codes.unauthorized: #pylint: disable=no-member
logger.debug('%s unauthorised', fn)
# any provider is sufficient for registry auth
auth.authorizeUser(provider=None, interactive=interactive)
if interactive:
logger.debug('retrying after authentication...')
return fn(*args, **kwargs)
raise
return wrapped
def _friendlyAuthError(fn):
''' Decorator to print a friendly you-are-not-authorised message. Use
**outside** the _handleAuth decorator to only print the message after
the user has been given a chance to login. '''
@functools.wraps(fn)
def wrapped(*args, **kwargs):
try:
return fn(*args, **kwargs)
except requests.exceptions.HTTPError as e:
if e.response.status_code == requests.codes.unauthorized: #pylint: disable=no-member
logger.error('insufficient permission')
elif e.response.status_code == requests.codes.bad and 'jwt has expired' in e.response.text.lower(): #pylint: disable=no-member
logger.error('server returned status %s: %s', e.response.status_code, e.response.text)
logger.error('Check that your system clock is set accurately!')
else:
logger.error('server returned status %s: %s', e.response.status_code, e.response.text)
raise
return wrapped
def _raiseUnavailableFor401(message):
''' Returns a decorator to swallow a requests exception for modules that
are not accessible without logging in, and turn it into an Unavailable
exception.
'''
def __raiseUnavailableFor401(fn):
def wrapped(*args, **kwargs):
try:
return fn(*args, **kwargs)
except requests.exceptions.HTTPError as e:
if e.response.status_code == requests.codes.unauthorized:
raise access_common.Unavailable(message)
else:
raise
return wrapped
return __raiseUnavailableFor401
def _swallowRequestExceptions(fail_return=None):
def __swallowRequestExceptions(fn):
''' Decorator to swallow known exceptions: use with _friendlyAuthError,
returns non-None if an exception occurred
'''
@functools.wraps(fn)
def wrapped(*args, **kwargs):
try:
return fn(*args, **kwargs)
except requests.exceptions.HTTPError as e:
return fail_return
return wrapped
return __swallowRequestExceptions
def _getPrivateRegistryKey():
if 'YOTTA_PRIVATE_REGISTRY_API_KEY' in os.environ:
return os.environ['YOTTA_PRIVATE_REGISTRY_API_KEY']
return None
@_retryConnectionErrors
def _listVersions(namespace, name):
sources = _getSources()
registry_urls = [s['url'] for s in sources if 'type' in s and s['type'] == 'registry']
# look in the public registry last
registry_urls.append(Registry_Base_URL)
versions = []
for registry in registry_urls:
# list versions of the package:
url = '%s/%s/%s/versions' % (
registry,
namespace,
name
)
request_headers = _headersForRegistry(registry)
logger.debug("GET %s, %s", url, request_headers)
response = requests.get(url, headers=request_headers)
if response.status_code == 404:
continue
# raise any other HTTP errors
response.raise_for_status()
for x in ordered_json.loads(response.text):
rtv = RegistryThingVersion(x, namespace, name, registry=registry)
if not rtv in versions:
versions.append(rtv)
if not len(versions):
raise access_common.Unavailable(
('%s does not exist in the %s registry. '+
'Check that the name is correct, and that it has been published.') % (name, namespace)
)
return versions
def _tarballURL(namespace, name, version, registry=None):
registry = registry or Registry_Base_URL
return '%s/%s/%s/versions/%s/tarball' % (
registry, namespace, name, version
)
@_retryConnectionErrors
@_raiseUnavailableFor401("dependency is not available without logging in")
@_friendlyAuthError
@_handleAuth
def _getTarball(url, directory, sha256):
logger.debug('registry: get: %s' % url)
if not sha256:
logger.warn('tarball %s has no hash to check' % url)
try:
access_common.unpackFromCache(sha256, directory)
except KeyError as e:
# figure out which registry we're fetching this tarball from (if any)
# and add appropriate headers
registry = Registry_Base_URL
for source in _getSources():
if ('type' in source and source['type'] == 'registry' and
'url' in source and url.startswith(source['url'])):
registry = source['url']
break
request_headers = _headersForRegistry(registry)
logger.debug('GET %s, %s', url, request_headers)
response = requests.get(url, headers=request_headers, allow_redirects=True, stream=True)
response.raise_for_status()
access_common.unpackTarballStream(
stream = response,
into_directory = directory,
hash = {'sha256':sha256},
cache_key = sha256,
origin_info = {'url':url}
)
def _getSources():
sources = settings.get('sources')
if sources is None:
sources = []
return sources
def _isPublicRegistry(registry):
return (registry is None) or (registry == Registry_Base_URL)
def friendlyRegistryName(registry, short=False):
if registry.startswith(Registry_Base_URL):
if short:
return 'public registry'
else:
return 'the public module registry'
else:
return registry
def _getPrivateKey(registry):
if _isPublicRegistry(registry):
return settings.getProperty('keys', 'private')
else:
for s in _getSources():
if _sourceMatches(s, registry):
if 'keys' in s and s['keys'] and 'private' in s['keys']:
return s['keys']['private']
return None
def _sourceMatches(source, registry):
return ('type' in source and source['type'] == 'registry' and
'url' in source and source['url'] == registry)
def _generateAndSaveKeys(registry=None):
registry = registry or Registry_Base_URL
k = rsa.generate_private_key(
public_exponent=65537, key_size=2048, backend=default_backend()
)
privatekey_pem = k.private_bytes(
serialization.Encoding.PEM,
serialization.PrivateFormat.PKCS8,
serialization.NoEncryption()
)
pubkey_pem = k.public_key().public_bytes(
serialization.Encoding.PEM,
serialization.PublicFormat.SubjectPublicKeyInfo
)
if _isPublicRegistry(registry):
settings.setProperty('keys', 'private', privatekey_pem.decode('ascii'))
settings.setProperty('keys', 'public', pubkey_pem.decode('ascii'))
else:
sources = _getSources()
keys = None
for s in sources:
if _sourceMatches(s, registry):
if not 'keys' in s:
s['keys'] = dict()
keys = s['keys']
break
if keys is None:
keys = dict()
sources.append({
'type':'registry',
'url':registry,
'keys':keys
})
keys['private'] = privatekey_pem.decode('ascii')
keys['public'] = pubkey_pem.decode('ascii')
settings.set('sources', sources)
return pubkey_pem, privatekey_pem
def _getPrivateKeyObject(registry=None):
registry = registry or Registry_Base_URL
privatekey_pem = _getPrivateKey(registry)
if not privatekey_pem:
pubkey_pem, privatekey_pem = _generateAndSaveKeys(registry)
else:
# settings are unicode, we should be able to safely decode to ascii for
# the key though, as it will either be hex or PEM encoded:
privatekey_pem = privatekey_pem.encode('ascii')
# if the key doesn't look like PEM, it might be hex-encided-DER (which we
# used historically), so try loading that:
if b'-----BEGIN PRIVATE KEY-----' in privatekey_pem:
return serialization.load_pem_private_key(
privatekey_pem, None, default_backend()
)
else:
privatekey_der = binascii.unhexlify(privatekey_pem)
return serialization.load_der_private_key(
privatekey_der, None, default_backend()
)
def _getYottaVersion():
import yotta
return yotta.__version__
def _getYottaClientUUID():
import uuid
current_uuid = settings.get('uuid')
if current_uuid is None:
current_uuid = u'%s' % uuid.uuid4()
settings.set('uuid', current_uuid)
return current_uuid
def _headersForRegistry(registry):
registry = registry or Registry_Base_URL
auth_token = generate_jwt_token(_getPrivateKeyObject(registry), registry)
mbed_user_id = os.environ.get('MBED_USER_ID', None)
r = {
'Authorization': 'Bearer %s' % auth_token,
'X-Yotta-Client-Version': _getYottaVersion(),
'X-Yotta-Client-ID': _getYottaClientUUID()
}
if mbed_user_id is not None:
r['X-Yotta-MBED-User-ID'] = mbed_user_id
if registry == Registry_Base_URL:
return r
for s in _getSources():
if _sourceMatches(s, registry):
if 'apikey' in s:
r['X-Api-Key'] = s['apikey']
break
return r
# API
class RegistryThingVersion(access_common.RemoteVersion):
def __init__(self, data, namespace, name, registry=None):
logger.debug('RegistryThingVersion %s/%s data: %s' % (namespace, name, data))
version = data['version']
self.namespace = namespace
self.name = name
self.version = version
if 'hash' in data and 'sha256' in data['hash']:
self.sha256 = data['hash']['sha256']
else:
self.sha256 = None
url = _tarballURL(self.namespace, self.name, version, registry)
super(RegistryThingVersion, self).__init__(
version, url, name=name, friendly_source=friendlyRegistryName(registry)
)
def unpackInto(self, directory):
assert(self.url)
_getTarball(self.url, directory, self.sha256)
class RegistryThing(access_common.RemoteComponent):
def __init__(self, name, version_spec, namespace):
self.name = name
self.spec = version_spec
self.namespace = namespace
@classmethod
def createFromSource(cls, vs, name, registry):
''' returns a registry component for anything that's a valid package
name (this does not guarantee that the component actually exists in
the registry: use availableVersions() for that).
'''
# we deliberately allow only lowercase, hyphen, and (unfortunately)
# numbers in package names, to reduce the possibility of confusingly
# similar names: if the name doesn't match this then escalate to make
# the user fix it. Targets also allow +
if registry == 'targets':
name_match = re.match('^[a-z]+[a-z0-9+-]*$', name)
if not name_match:
raise access_common.AccessException(
'Target name "%s" is not valid (must contain only lowercase letters, hyphen, plus, and numbers)' % name
)
else:
name_match = re.match('^[a-z]+[a-z0-9-]*$', name)
if not name_match:
raise access_common.AccessException(
'Module name "%s" is not valid (must contain only lowercase letters, hyphen, and numbers)' % name
)
assert(vs.semantic_spec)
return RegistryThing(name, vs.semantic_spec, registry)
def versionSpec(self):
return self.spec
def availableVersions(self):
''' return a list of Version objects, each able to retrieve a tarball '''
return _listVersions(self.namespace, self.name)
def tipVersion(self):
raise NotImplementedError()
@classmethod
def remoteType(cls):
return 'registry'
@_swallowRequestExceptions(fail_return="request exception occurred")
@_retryConnectionErrors
@_friendlyAuthError
@_handleAuth
def publish(namespace, name, version, description_file, tar_file, readme_file,
readme_file_ext, registry=None):
''' Publish a tarblob to the registry, if the request fails, an exception
is raised, which either triggers re-authentication, or is turned into a
return value by the decorators. (If successful, the decorated function
returns None)
'''
registry = registry or Registry_Base_URL
url = '%s/%s/%s/versions/%s' % (
registry,
namespace,
name,
version
)
if readme_file_ext == '.md':
readme_section_name = 'readme.md'
elif readme_file_ext == '':
readme_section_name = 'readme'
else:
raise ValueError('unsupported readme type: "%s"' % readme_file_ext)
# description file is in place as text (so read it), tar file is a file
body = OrderedDict([('metadata', (None, description_file.read(),'application/json')),
('tarball',('tarball', tar_file)),
(readme_section_name, (readme_section_name, readme_file))])
headers = _headersForRegistry(registry)
response = requests.put(url, headers=headers, files=body)
response.raise_for_status()
return None
@_swallowRequestExceptions(fail_return="request exception occurred")
@_retryConnectionErrors
@_friendlyAuthError
@_handleAuth
def unpublish(namespace, name, version, registry=None):
''' Try to unpublish a recently published version. Return any errors that
occur.
'''
registry = registry or Registry_Base_URL
url = '%s/%s/%s/versions/%s' % (
registry,
namespace,
name,
version
)
headers = _headersForRegistry(registry)
response = requests.delete(url, headers=headers)
response.raise_for_status()
return None
@_swallowRequestExceptions(fail_return=None)
@_retryConnectionErrors
@_friendlyAuthError
@_handleAuth
def listOwners(namespace, name, registry=None):
''' List the owners of a module or target (owners are the people with
permission to publish versions and add/remove the owners).
'''
registry = registry or Registry_Base_URL
url = '%s/%s/%s/owners' % (
registry,
namespace,
name
)
request_headers = _headersForRegistry(registry)
response = requests.get(url, headers=request_headers)
if response.status_code == 404:
logger.error('no such %s, "%s"' % (namespace[:-1], name))
return None
# raise exceptions for other errors - the auth decorators handle these and
# re-try if appropriate
response.raise_for_status()
return ordered_json.loads(response.text)
@_swallowRequestExceptions(fail_return=None)
@_retryConnectionErrors
@_friendlyAuthError
@_handleAuth
def addOwner(namespace, name, owner, registry=None):
''' Add an owner for a module or target (owners are the people with
permission to publish versions and add/remove the owners).
'''
registry = registry or Registry_Base_URL
url = '%s/%s/%s/owners/%s' % (
registry,
namespace,
name,
owner
)
request_headers = _headersForRegistry(registry)
response = requests.put(url, headers=request_headers)
if response.status_code == 404:
logger.error('no such %s, "%s"' % (namespace[:-1], name))
return
# raise exceptions for other errors - the auth decorators handle these and
# re-try if appropriate
response.raise_for_status()
return True
@_swallowRequestExceptions(fail_return=None)
@_retryConnectionErrors
@_friendlyAuthError
@_handleAuth
def removeOwner(namespace, name, owner, registry=None):
''' Remove an owner for a module or target (owners are the people with
permission to publish versions and add/remove the owners).
'''
registry = registry or Registry_Base_URL
url = '%s/%s/%s/owners/%s' % (
registry,
namespace,
name,
owner
)
request_headers = _headersForRegistry(registry)
response = requests.delete(url, headers=request_headers)
if response.status_code == 404:
logger.error('no such %s, "%s"' % (namespace[:-1], name))
return
# raise exceptions for other errors - the auth decorators handle these and
# re-try if appropriate
response.raise_for_status()
return True
@_friendlyAuthError
@_retryConnectionErrors
def whoami(registry=None):
registry = registry or Registry_Base_URL
url = '%s/users/me' % (
registry
)
request_headers = _headersForRegistry(registry)
logger.debug('test login...')
response = requests.get(url, headers=request_headers)
if response.status_code == 401:
# not logged in
return None
else:
response.raise_for_status()
return ', '.join(ordered_json.loads(response.text).get('primary_emails', {}).values())
@_retryConnectionErrors
def search(query='', keywords=[], registry=None):
''' generator of objects returned by the search endpoint (both modules and
targets).
Query is a full-text search (description, name, keywords), keywords
search only the module/target description keywords lists.
If both parameters are specified the search is the intersection of the
two queries.
'''
registry = registry or Registry_Base_URL
url = '%s/search' % registry
headers = _headersForRegistry(registry)
params = {
'skip': 0,
'limit': 50
}
if len(query):
params['query'] = query
if len(keywords):
params['keywords[]'] = keywords
while True:
response = requests.get(url, headers=headers, params=params)
response.raise_for_status()
objects = ordered_json.loads(response.text)
if len(objects):
for o in objects:
yield o
params['skip'] += params['limit']
else:
break
def deauthorize(registry=None):
registry = registry or Registry_Base_URL
if _isPublicRegistry(registry):
if settings.get('keys'):
settings.set('keys', dict())
else:
sources = [s for s in _getSources() if not _sourceMatches(s, registry)]
settings.set('sources', sources)
def setAPIKey(registry, api_key):
''' Set the api key for accessing a registry. This is only necessary for
development/test registries.
'''
if (registry is None) or (registry == Registry_Base_URL):
return
sources = _getSources()
source = None
for s in sources:
if _sourceMatches(s, registry):
source = s
if source is None:
source = {
'type':'registry',
'url':registry,
}
sources.append(source)
source['apikey'] = api_key
settings.set('sources', sources)
def getPublicKey(registry=None):
''' Return the user's public key (generating and saving a new key pair if necessary) '''
registry = registry or Registry_Base_URL
pubkey_pem = None
if _isPublicRegistry(registry):
pubkey_pem = settings.getProperty('keys', 'public')
else:
for s in _getSources():
if _sourceMatches(s, registry):
if 'keys' in s and s['keys'] and 'public' in s['keys']:
pubkey_pem = s['keys']['public']
break
if not pubkey_pem:
pubkey_pem, privatekey_pem = _generateAndSaveKeys()
else:
# settings are unicode, we should be able to safely decode to ascii for
# the key though, as it will either be hex or PEM encoded:
pubkey_pem = pubkey_pem.encode('ascii')
# if the key doesn't look like PEM, it might be hex-encided-DER (which we
# used historically), so try loading that:
if b'-----BEGIN PUBLIC KEY-----' in pubkey_pem:
pubkey = serialization.load_pem_public_key(pubkey_pem, default_backend())
else:
pubkey_der = binascii.unhexlify(pubkey_pem)
pubkey = serialization.load_der_public_key(pubkey_der, default_backend())
return _pubkeyWireFormat(pubkey)
@_retryConnectionErrors
def getAuthData(registry=None):
''' Poll the registry to get the result of a completed authentication
(which, depending on the authentication the user chose or was directed
to, will include a github or other access token)
'''
registry = registry or Registry_Base_URL
url = '%s/tokens' % (
registry
)
request_headers = _headersForRegistry(registry)
logger.debug('poll for tokens... %s', request_headers)
try:
response = requests.get(url, headers=request_headers)
except requests.RequestException as e:
logger.debug(str(e))
return None
if response.status_code == requests.codes.unauthorized: #pylint: disable=no-member
logger.debug('Unauthorised')
return None
elif response.status_code == requests.codes.not_found: #pylint: disable=no-member
logger.debug('Not Found')
return None
body = response.text
logger.debug('auth data response: %s' % body);
r = {}
parsed_response = ordered_json.loads(body)
if 'error' in parsed_response:
raise AuthError(parsed_response['error'])
for token in parsed_response:
if 'provider' in token and token['provider'] and 'accessToken' in token:
r[token['provider']] = token['accessToken']
break
logger.debug('parsed auth tokens %s' % r);
return r
def getLoginURL(provider=None, registry=None):
registry = registry or Registry_Base_URL
if provider:
query = ('?provider=%s' % provider)
else:
query = ''
if not _isPublicRegistry(registry):
if not len(query):
query = '?'
query += '&private=1'
return Website_Base_URL + '/' + query + '#login/' + getPublicKey(registry)<|fim▁end|> | import logging
from collections import OrderedDict |
<|file_name|>parser.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Higher-level Rust constructs for http_parser
use std::vec::raw::from_buf_raw;
use std::libc::{c_int, c_void, c_char, size_t};
use std::ptr::{null, to_unsafe_ptr};
use std::str;
use http_parser;
use http_parser::{http_parser_settings, HTTP_REQUEST};
use http_parser::{http_parser_init, http_parser_execute};
use http_parser::{enum_http_errno, http_errno_name, http_errno_description};
// pub type HttpCallback = || -> bool;
// pub type HttpDataCallback = |data: ~[u8]| -> bool;
pub struct ParserCallbacks<'self> {
on_message_begin: &'self fn () -> bool,
on_url: &'self fn (data: ~[u8]) -> bool,
on_status_complete: &'self fn () -> bool,
on_header_field: &'self fn (data: ~[u8]) -> bool,
on_header_value: &'self fn (data: ~[u8]) -> bool,
on_headers_complete: &'self fn () -> bool,
on_body: &'self fn (data: ~[u8]) -> bool,
on_message_complete: &'self fn () -> bool
}
pub struct Parser {
http_parser: http_parser::http_parser,
settings: http_parser_settings
}
pub fn Parser() -> Parser {
#[fixed_stack_segment];
let http_parser = http_parser::struct_http_parser {
_type_flags: 0,
state: 0,
header_state: 0,
index: 0,
nread: 0,
content_length: 0,
http_major: 0,
http_minor: 0,
status_code: 0,
method: 0,
http_errno_upgrade: 0,
data: null()
};
unsafe {
http_parser_init(&http_parser, HTTP_REQUEST);
}
<|fim▁hole|> on_status_complete: on_status_complete as *u8,
on_header_field: on_header_field as *u8,
on_header_value: on_header_value as *u8,
on_headers_complete: on_headers_complete as *u8,
on_body: on_body as *u8,
on_message_complete: on_message_complete as *u8
};
Parser {
http_parser: http_parser,
settings: settings
}
}
impl Parser {
pub fn execute(&mut self, data: &[u8], callbacks: &ParserCallbacks) -> uint {
#[fixed_stack_segment];
unsafe {
self.http_parser.data = to_unsafe_ptr(callbacks) as *c_void;
do data.as_imm_buf |buf, _| {
http_parser_execute(&self.http_parser,
&self.settings,
buf as *c_char,
data.len() as size_t) as uint
}
}
}
pub fn status_code(&self) -> uint {
self.http_parser.status_code as uint
}
pub fn method(&self) -> uint {
self.http_parser.method as uint
}
pub fn error(&self) -> (~str, ~str) {
#[fixed_stack_segment];
let err = (self.http_parser.http_errno_upgrade & 0x7f) as enum_http_errno;
unsafe {
(str::raw::from_c_str(http_errno_name(err)),
str::raw::from_c_str(http_errno_description(err)))
}
}
}
fn callbacks(http_parser: *http_parser::http_parser) -> *ParserCallbacks {
unsafe {
assert!((*http_parser).data.is_not_null());
return (*http_parser).data as *ParserCallbacks;
}
}
extern fn on_message_begin(http_parser: *http_parser::http_parser) -> c_int {
unsafe {
(!((*callbacks(http_parser)).on_message_begin)()) as c_int
}
}
extern fn on_url(http_parser: *http_parser::http_parser, at: *u8, length: size_t) -> c_int {
unsafe {
(!(((*callbacks(http_parser)).on_url)(from_buf_raw(at, length as uint)))) as c_int
}
}
extern fn on_status_complete(http_parser: *http_parser::http_parser) -> c_int {
unsafe {
(!((*callbacks(http_parser)).on_status_complete)()) as c_int
}
}
extern fn on_header_field(http_parser: *http_parser::http_parser, at: *u8, length: size_t) ->
c_int {
unsafe {
(!((*callbacks(http_parser)).on_header_field)(from_buf_raw(at, length as uint))) as c_int
}
}
extern fn on_header_value(http_parser: *http_parser::http_parser, at: *u8, length: size_t) ->
c_int {
unsafe {
(!((*callbacks(http_parser)).on_header_value)(from_buf_raw(at, length as uint))) as c_int
}
}
extern fn on_headers_complete(http_parser: *http_parser::http_parser) -> c_int {
unsafe {
(!((*callbacks(http_parser)).on_headers_complete)()) as c_int
}
}
extern fn on_body(http_parser: *http_parser::http_parser, at: *u8, length: size_t) -> c_int {
unsafe {
(!((*callbacks(http_parser)).on_body)(from_buf_raw(at, length as uint))) as c_int
}
}
extern fn on_message_complete(http_parser: *http_parser::http_parser) -> c_int {
unsafe {
(!((*callbacks(http_parser)).on_message_complete)()) as c_int
}
}<|fim▁end|> | let settings = http_parser::struct_http_parser_settings {
on_message_begin: on_message_begin as *u8,
on_url: on_url as *u8, |
<|file_name|>gps.js<|end_file_name|><|fim▁begin|>var GPSClient = {};
GPSClient.send = function(exeurl, latitude,longitude,actionname,username) {
var client = Titanium.Network.createHTTPClient({timeout : 100000});
var paramater = '&intaliouser=' + username + '¶meter=latitude:' + longitude + ',';
//var paramater = '&username=' + username + '¶meter=latitude:' + longitude + ',';
var paramater1 = 'longitude:' + latitude + ',';
var paramater2 = 'timestamp:timestamp,';
var paramater3 = 'actionname:' + actionname + ',';
var paramater4 = 'name:' + username + ',';
var url = exeurl + paramater + paramater1 + paramater2 + paramater3 + paramater4;
client.open(GET_REC, url);
client.onload = function() {
try {
var resData = eval("("+this.responseText+")");
if (resData[0].error == 'Yes') {
var dialog = Titanium.UI.createAlertDialog({});
dialog.title = Titanium.Locale.getString("gps_yes_title");
dialog.message = resData[0].contents;
dialog.show();
return;
} else {
return;
}
} catch (e) {
Titanium.API.error(e);
var dialog = Titanium.UI.createAlertDialog({});
dialog.title = Titanium.Locale.getString("gps_catch_title");
dialog.message = Titanium.Locale.getString("gps_catch_message");
dialog.show();
return;
}<|fim▁hole|> client.onerror = function() {
if (client.status == 401) {
var dialog = Titanium.UI.createAlertDialog({});
dialog.title = Titanium.Locale.getString("gps_connect_title");
dialog.message = Titanium.Locale.getString("gps_connect_message");
dialog.show();
return;
}
var dialog = Titanium.UI.createAlertDialog({});
dialog.title = Titanium.Locale.getString("gps_network_title");
dialog.message = Titanium.Locale.getString("gps_network_message");
dialog.show();
return;
};
client.send();
};<|fim▁end|> | }; |
<|file_name|>0006_auto_20160907_2016.py<|end_file_name|><|fim▁begin|># Generated by Django 1.10.1 on 2016-09-07 20:16
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("institutions", "0005_auto_20160907_1814")]
<|fim▁hole|> options={
"ordering": ["priority", "institution"],
"verbose_name": "Email",
"verbose_name_plural": "Emails",
},
),
migrations.AlterModelOptions(
name="tag",
options={
"ordering": ["name"],
"verbose_name": "Tag",
"verbose_name_plural": "Tags",
},
),
migrations.AlterUniqueTogether(
name="email", unique_together={("institution", "email")}
),
]<|fim▁end|> | operations = [
migrations.AlterModelOptions(
name="email", |
<|file_name|>yii.validation.test.js<|end_file_name|><|fim▁begin|>var assert = require('chai').assert;
assert.isDeferred = function (object) {<|fim▁hole|> return String(object.resolve) === String($.Deferred().resolve);
};
var sinon;
var withData = require('leche').withData;
var StringUtils = {
repeatString: function (value, times) {
return (new Array(times + 1)).join(value);
}
};
var jsdom = require('mocha-jsdom');
var punycode = require('../../../vendor/bower-asset/punycode/punycode');
var fs = require('fs');
var vm = require('vm');
var yii;
describe('yii.validation', function () {
var VALIDATOR_SUCCESS_MESSAGE = 'should leave messages as is';
var VALIDATOR_ERROR_MESSAGE = 'should add appropriate errors(s) to messages';
function getValidatorMessage(expectedResult) {
var isTrueBoolean = typeof expectedResult === 'boolean' && expectedResult === true;
var isEmptyArray = Array.isArray(expectedResult) && expectedResult.length === 0;
return isTrueBoolean || isEmptyArray ? VALIDATOR_SUCCESS_MESSAGE : VALIDATOR_ERROR_MESSAGE;
}
var $;
var code;
var script;
function FileReader() {
this.readAsDataURL = function() {
};
}
function Image() {
}
function registerTestableCode(customSandbox) {
if (customSandbox === undefined) {
customSandbox = {
File: {},
FileReader: FileReader,
Image: Image,
punycode: punycode
};
}
var path = 'framework/assets/yii.validation.js';
if (code === undefined) {
code = fs.readFileSync(path);
}
if (script === undefined) {
script = new vm.Script(code);
}
var defaultSandbox = {yii: {}, jQuery: $};
var sandbox = $.extend({}, defaultSandbox, customSandbox);
var context = new vm.createContext(sandbox);
script.runInContext(context);
yii = sandbox.yii;
}
jsdom({
src: fs.readFileSync('vendor/bower-asset/jquery/dist/jquery.js', 'utf-8')
});
before(function () {
$ = window.$;
registerTestableCode();
sinon = require('sinon');
});
it('should exist', function () {
assert.isObject(yii.validation);
});
describe('isEmpty method', function () {
withData({
'undefined': [undefined, true],
'null': [null, true],
'empty array': [[], true],
'empty string': ['', true],
'string containing whitespace': [' ', false],
'empty object': [{}, false],
'non-zero integer': [1, false],
'non-empty string': ['a', false],
'non-empty array': [[1], false]
}, function (value, expectedValue) {
var message = expectedValue ? 'should return "true"' : 'should return "false"';
it(message, function () {
assert.strictEqual(yii.validation.isEmpty(value), expectedValue);
});
});
});
describe('addMessage method', function () {
withData({
'empty messages': [[], 'Message', 1, ['Message']],
'non-empty messages': [['Message 1'], 'Message 2', 1, ['Message 1', 'Message 2']],
'message as template': [[], 'Message with value {value}', 1, ['Message with value 1']]
}, function (messages, message, value, expectedMessages) {
it('should extend messages and replace value in template', function () {
yii.validation.addMessage(messages, message, value);
assert.deepEqual(messages, expectedMessages);
});
});
});
describe('required validator', function () {
withData({
'empty string': ['', {}, false],
'empty string, strict mode': ['', {strict: true}, true],
'string containing whitespace': [' ', {}, false],
'string containing whitespace, strict mode': [' ', {strict: true}, true],
'non-empty string': ['a', {}, true],
'undefined': [undefined, {}, false],
'undefined, strict mode': [undefined, {strict: true}, false],
// requiredValue
'integer and required value set to different integer': [1, {requiredValue: 2}, false],
'string and required value set to integer with the same value': ['1', {requiredValue: 1}, true],
'string and required value set to integer with the same value, strict mode': [
'1',
{requiredValue: 1, strict: true},
false
],
'integer and required value set to same integer, strict mode': [
1,
{requiredValue: 1, strict: true},
true
]
}, function (value, options, expectValid) {
it(getValidatorMessage(expectValid), function () {
options.message = 'This field is required.';
var messages = [];
var expectedMessages = expectValid ? [] : ['This field is required.'];
yii.validation.required(value, messages, options);
assert.deepEqual(messages, expectedMessages);
});
});
});
describe('boolean validator', function () {
var defaultOptions = {
message: 'The value must have a boolean type.',
trueValue: '1',
falseValue: '0'
};
withData({
'empty string': ['', {}, false],
'empty string, skip on empty': ['', {skipOnEmpty: true}, true],
'non-empty string, does not equal neither trueValue no falseValue': ['a', {}, false],
'integer, value equals falseValue': [0, {}, true],
'integer, value equals trueValue': [1, {}, true],
'string equals falseValue': ['0', {}, true],
'string equals trueValue': ['1', {}, true],
'integer, value equals falseValue, strict mode': [0, {strict: true}, false],
'integer, value equals trueValue, strict mode': [1, {strict: true}, false],
// trueValue, falseValue
'string equals custom trueValue, custom trueValue is set': ['yes', {trueValue: 'yes'}, true],
'string does not equal neither trueValue no falseValue, custom trueValue is set': [
'no',
{trueValue: 'yes'},
false
],
'string equals custom falseValue, custom falseValue is set': ['no', {falseValue: 'no'}, true],
'string does not equal neither trueValue no falseValue, custom falseValue is set': [
'yes',
{falseValue: 'no'},
false
],
'string equals custom trueValue, custom trueValue and falseValue are set': [
'yes',
{trueValue: 'yes', falseValue: 'no'},
true
],
'string equals custom falseValue, custom trueValue and falseValue are set': [
'no',
{trueValue: 'yes', falseValue: 'no'},
true
],
'string does not equal neither custom trueValue no falseValue, custom trueValue and falseValue are set': [
'a',
{trueValue: 'yes', falseValue: 'no'},
false
]
}, function (value, customOptions, expectValid) {
it(getValidatorMessage(expectValid), function () {
var options = $.extend({}, defaultOptions, customOptions);
var messages = [];
var expectedMessages = expectValid ? [] : ['The value must have a boolean type.'];
yii.validation.boolean(value, messages, options);
assert.deepEqual(messages, expectedMessages);
});
});
});
describe('string validator', function () {
var defaultOptions = {
message: 'Invalid type.',
tooShort: 'Too short.',
tooLong: 'Too long.',
notEqual: 'Not equal.'
};
withData({
'empty string': ['', {}, []],
'empty string, skip on empty': ['', {skipOnEmpty: true}, []],
'non-empty string': ['a', {}, []],
'integer': [1, {}, ['Invalid type.']],
// min
'string less than min': ['Word', {min: 5}, ['Too short.']],
'string more than min': ['Some string', {min: 5}, []],
'string equals min': ['Equal', {min: 5}, []],
// max
'string less than max': ['Word', {max: 5}, []],
'string more than max': ['Some string', {max: 5}, ['Too long.']],
'string equals max': ['Equal', {max: 5}, []],
// is
'string equals exact length': ['Equal', {is: 5}, []],
'string does not equal exact length': ['Does not equal', {is: 5}, ['Not equal.']],
'string does not equal exact length and less than min': ['Word', {is: 5, min: 5}, ['Not equal.']],
// min and max
'string less than min, both min and max are set': ['Word', {min: 5, max: 10}, ['Too short.']],
'string in between of min and max, both min and max are set': ['Between', {min: 5, max: 10}, []],
'string more than max, both min and max are set': ['Some string', {min: 5, max: 10}, ['Too long.']]
}, function (value, customOptions, expectedMessages) {
it(getValidatorMessage(expectedMessages), function () {
var options = $.extend({}, defaultOptions, customOptions);
var messages = [];
yii.validation.string(value, messages, options);
assert.deepEqual(messages, expectedMessages);
});
});
});
describe('file validator', function () {
var defaultOptions = {
message: 'Unable to upload a file.',
uploadRequired: 'Upload is required.',
tooMany: 'Too many files.',
wrongExtension: 'File {file} has wrong extension.',
wrongMimeType: 'File {file} has wrong mime type.',
tooSmall: 'File {file} is too small.',
tooBig: 'File {file} is too big.'
};
var attribute = {
input: '#input-id',
$form: 'jQuery form object'
};
var files;
var filesService = {
getFiles: function () {
return files;
}
};
var $input = {
get: function (value) {
return value === 0 ? {files: filesService.getFiles()} : undefined;
}
};
var jQueryInitStub;
var inputGetSpy;
var filesServiceSpy;
beforeEach(function () {
jQueryInitStub = sinon.stub($.fn, 'init');
jQueryInitStub.withArgs(attribute.input, attribute.$form).returns($input);
inputGetSpy = sinon.spy($input, 'get');
filesServiceSpy = sinon.spy(filesService, 'getFiles');
});
afterEach(function () {
jQueryInitStub.restore();
inputGetSpy.restore();
filesServiceSpy.restore();
});
describe('with File API is not available', function () {
beforeEach(function () {
registerTestableCode({File: undefined});
});
afterEach(function () {
registerTestableCode();
});
it(VALIDATOR_SUCCESS_MESSAGE, function () {
var messages = [];
yii.validation.file(attribute, messages, defaultOptions);
assert.deepEqual(messages, []);
assert.isFalse(jQueryInitStub.called);
assert.isFalse(inputGetSpy.called);
assert.isFalse(filesServiceSpy.called);
});
});
describe('with File API is available', function () {
withData({
'files are not available': [undefined, {}, ['Unable to upload a file.']],
'no files': [[], {}, ['Upload is required.']],
'no files, skip on empty': [[], {skipOnEmpty: true}, []],
// maxFiles
'number of files less than maximum': [
{name: 'file.jpg', type: 'image/jpeg', size: 100 * 1024},
{maxFiles: 2},
[]
],
'number of files equals maximum': [
[
{name: 'file.jpg', type: 'image/jpeg', size: 100 * 1024},
{name: 'file.png', type: 'image/png', size: 150 * 1024}
],
{maxFiles: 2},
[]
],
'number of files more than maximum': [
[
{name: 'file.jpg', type: 'image/jpeg', size: 100 * 1024},
{name: 'file.png', type: 'image/png', size: 150 * 1024},
{name: 'file.bmp', type: 'image/bmp', size: 200 * 1024}
],
{maxFiles: 2},
['Too many files.']
],
// extensions
'files in extensions list': [
[
{name: 'file.jpg', type: 'image/jpeg', size: 100 * 1024},
{name: 'file.png', type: 'image/png', size: 150 * 1024}
],
{extensions: ['jpg', 'png']},
[]
],
'file not in extensions list': [
[
{name: 'file.jpg', type: 'image/jpeg', size: 100 * 1024},
{name: 'file.bmp', type: 'image/bmp', size: 150 * 1024}
],
{extensions: ['jpg', 'png']},
['File file.bmp has wrong extension.']
],
// mimeTypes
'mime type in mime types list': [
[
{name: 'file.jpg', type: 'image/jpeg', size: 100 * 1024},
{name: 'file.png', type: 'image/png', size: 150 * 1024}
],
{mimeTypes: ['image/jpeg', 'image/png']},
[]
],
'mime type not in mime types list': [
[
{name: 'file.jpg', type: 'image/jpeg', size: 100 * 1024},
{name: 'file.bmp', type: 'image/bmp', size: 150 * 1024}
],
{mimeTypes: ['image/jpeg', 'image/png']},
['File file.bmp has wrong mime type.']
],
// maxSize
'size less than maximum size': [
[
{name: 'file.jpg', type: 'image/jpeg', size: 100 * 1024},
{name: 'file.png', type: 'image/png', size: 150 * 1024}
],
{maxSize: 200 * 1024},
[]
],
'size equals maximum size': [
[
{name: 'file.jpg', type: 'image/jpeg', size: 100 * 1024},
{name: 'file.png', type: 'image/png', size: 100 * 1024}
],
{maxSize: 100 * 1024},
[]
],
'size more than maximum size': [
[
{name: 'file.jpg', type: 'image/jpeg', size: 100 * 1024},
{name: 'file.png', type: 'image/png', size: 150 * 1024}
],
{maxSize: 50 * 1024},
['File file.jpg is too big.', 'File file.png is too big.']
],
// minSize
'size less than minimum size': [
[
{name: 'file.jpg', type: 'image/jpeg', size: 100 * 1024},
{name: 'file.png', type: 'image/png', size: 150 * 1024}
],
{minSize: 120 * 1024},
['File file.jpg is too small.']
],
'size equals minimum size': [
[
{name: 'file.jpg', type: 'image/bmp', size: 100 * 1024},
{name: 'file.png', type: 'image/png', size: 100 * 1024}
],
{maxSize: 100 * 1024},
[]
],
'size more than minimum size': [
[
{name: 'file.jpg', type: 'image/jpeg', size: 100 * 1024},
{name: 'file.bmp', type: 'image/bmp', size: 150 * 1024}
],
{minSize: 80 * 1024},
[]
],
'one file is less than minimum size, one file is more than maximum size': [
[
{name: 'file.jpg', type: 'image/jpeg', size: 100 * 1024},
{name: 'file.png', type: 'image/png', size: 250 * 1024}
],
{minSize: 150 * 1024, maxSize: 200 * 1024},
['File file.jpg is too small.', 'File file.png is too big.']
]
}, function (uploadedFiles, customOptions, expectedMessages) {
it(getValidatorMessage(expectedMessages), function () {
files = uploadedFiles;
var options = $.extend({}, defaultOptions, customOptions);
var messages = [];
yii.validation.file(attribute, messages, options);
assert.deepEqual(messages, expectedMessages);
assert.isTrue(jQueryInitStub.calledOnce);
assert.deepEqual(jQueryInitStub.getCall(0).args, [attribute.input, attribute.$form]);
assert.isTrue(inputGetSpy.calledOnce);
assert.deepEqual(inputGetSpy.getCall(0).args, [0]);
assert.isTrue(filesServiceSpy.calledOnce);
});
});
});
});
describe('image validator', function () {
var attribute = {
input: '#input-id',
$form: 'jQuery form object'
};
var files;
var filesService = {
getFiles: function () {
return files;
}
};
var $input = {
get: function (value) {
return value === 0 ? {files: filesService.getFiles()} : undefined;
}
};
var deferred;
var jQueryInitStub;
var inputGetSpy;
var filesServiceSpy;
var validateImageStub;
var deferredStub;
beforeEach(function () {
jQueryInitStub = sinon.stub($.fn, 'init');
jQueryInitStub.withArgs(attribute.input, attribute.$form).returns($input);
inputGetSpy = sinon.spy($input, 'get');
filesServiceSpy = sinon.spy(filesService, 'getFiles');
validateImageStub = sinon.stub(yii.validation, 'validateImage');
deferred = $.Deferred();
deferredStub = sinon.stub(deferred, 'resolve');
});
afterEach(function () {
jQueryInitStub.restore();
inputGetSpy.restore();
filesServiceSpy.restore();
validateImageStub.restore();
deferredStub.restore();
});
describe('with FileReader API is not available', function () {
beforeEach(function () {
registerTestableCode({FileReader: undefined});
});
afterEach(function () {
registerTestableCode();
});
it(VALIDATOR_SUCCESS_MESSAGE, function () {
files = [
{name: 'file.jpg', type: 'image/jpeg', size: 100 * 1024, width: 100, height: 100},
{name: 'file.png', type: 'image/png', size: 150 * 1024, width: 250, height: 250}
];
var messages = [];
var deferredList = [];
yii.validation.image(attribute, messages, {}, deferredList);
assert.deepEqual(messages, []);
assert.isFalse(validateImageStub.called);
assert.isFalse(deferredStub.called);
assert.deepEqual(deferredList, []);
});
});
describe('with FileReader API is available', function () {
it(VALIDATOR_ERROR_MESSAGE, function () {
files = [
{name: 'file.jpg', type: 'image/jpeg', size: 100 * 1024, width: 100, height: 100},
{name: 'file.bmp', type: 'image/bmp', size: 150 * 1024, width: 250, height: 250}
];
var options = {
extensions: ['jpg', 'png'],
wrongExtension: 'File {file} has wrong extension.',
minWidth: 200,
underWidth: 'File {file} has small width.'
};
var messages = [];
var deferredList = [];
yii.validation.image(attribute, messages, options, deferredList);
assert.deepEqual(messages, ['File file.bmp has wrong extension.']);
assert.equal(validateImageStub.callCount, files.length);
for (var i = 0; i < validateImageStub.callCount; i++) {
assert.equal(validateImageStub.getCall(i).args.length, 6);
assert.deepEqual(validateImageStub.getCall(i).args[0], files[i]);
assert.deepEqual(validateImageStub.getCall(i).args[1], ['File file.bmp has wrong extension.']);
assert.deepEqual(validateImageStub.getCall(i).args[2], options);
assert.isDeferred(validateImageStub.getCall(i).args[3]);
assert.instanceOf(validateImageStub.getCall(i).args[4], FileReader);
assert.instanceOf(validateImageStub.getCall(i).args[5], Image);
}
assert.equal(deferredList.length, files.length);
for (i = 0; i < deferredList.length; i++) {
assert.isDeferred(deferredList[i]);
}
});
});
});
describe('validateImage method', function () {
var file = {name: 'file.jpg', type: 'image/jpeg', size: 100 * 1024};
var image = new Image();
var deferred;
var fileReader = new FileReader();
var deferredStub;
var fileReaderStub;
beforeEach(function () {
deferred = $.Deferred();
deferredStub = sinon.stub(deferred, 'resolve');
});
afterEach(function () {
deferredStub.restore();
fileReaderStub.restore();
});
function verifyStubs() {
assert.isTrue(fileReaderStub.calledOnce);
assert.isTrue(deferredStub.calledOnce);
}
describe('with error while reading data', function () {
beforeEach(function () {
fileReaderStub = sinon.stub(fileReader, 'readAsDataURL', function () {
this.onerror();
});
});
it(VALIDATOR_SUCCESS_MESSAGE, function () {
var messages = [];
yii.validation.validateImage(file, messages, {}, deferred, fileReader, image);
assert.deepEqual(messages, []);
verifyStubs();
});
});
describe('with error while reading image', function () {
beforeEach(function () {
fileReaderStub = sinon.stub(fileReader, 'readAsDataURL', function () {
this.onload = function () {
image.onerror();
};
this.onload();
});
});
it(VALIDATOR_ERROR_MESSAGE, function () {
var messages = [];
var options = {notImage: 'File {file} is not an image.'};
yii.validation.validateImage(file, messages, options, deferred, fileReader, image);
assert.deepEqual(messages, ['File file.jpg is not an image.']);
verifyStubs();
});
});
describe('with successfully read image', function () {
var defaultOptions = {
underWidth: 'File {file} has small width.',
overWidth: 'File {file} has big width.',
underHeight: 'File {file} has small height.',
overHeight: 'File {file} has big height.'
};
beforeEach(function () {
fileReaderStub = sinon.stub(fileReader, 'readAsDataURL', function () {
this.onload = function () {
image.onload();
};
this.onload();
});
});
withData({
// minWidth
'width less than minimum width': [
{width: 100, height: 100},
{minWidth: 200},
['File file.jpg has small width.']
],
'width equals minimum width': [{width: 100, height: 100}, {minWidth: 100}, []],
'width more than minimum width': [{width: 200, height: 200}, {minWidth: 100}, []],
// maxWidth
'width less than maximum width': [{width: 100, height: 100}, {maxWidth: 200}, []],
'width equals maximum width': [{width: 100, height: 100}, {maxWidth: 100}, []],
'width more than maximum width': [
{width: 200, height: 200},
{maxWidth: 100},
['File file.jpg has big width.']
],
// minHeight
'height less than minimum height': [
{width: 100, height: 100},
{minHeight: 200},
['File file.jpg has small height.']
],
'height equals minimum height': [{width: 100, height: 100}, {minHeight: 100}, []],
'height more than minimum height': [{width: 200, height: 200}, {minHeight: 100}, []],
// maxHeight
'height less than maximum height': [{width: 100, height: 100}, {maxHeight: 200}, []],
'height equals maximum height': [{width: 100, height: 100}, {maxHeight: 100}, []],
'height more than maximum height': [
{width: 200, height: 200},
{maxHeight: 100},
['File file.jpg has big height.']
],
// minWidth and minHeight
'width less than minimum width and height less than minimum height': [
{width: 100, height: 100},
{minWidth: 200, minHeight: 200},
['File file.jpg has small width.', 'File file.jpg has small height.']
]
}, function (imageSize, customOptions, expectedMessages) {
it(getValidatorMessage(expectedMessages), function () {
image.width = imageSize.width;
image.height = imageSize.height;
var options = $.extend({}, defaultOptions, customOptions);
var messages = [];
yii.validation.validateImage(file, messages, options, deferred, fileReader, image);
assert.deepEqual(messages, expectedMessages);
verifyStubs();
});
});
});
});
describe('number validator', function () {
var integerPattern = /^\s*[+-]?\d+\s*$/;
var numberPattern = /^\s*[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?\s*$/;
var defaultOptions = {
message: 'Not a number.',
tooSmall: 'Number is too small.',
tooBig: 'Number is too big.'
};
describe('with integer pattern', function () {
withData({
'empty string': ['', false],
'non-empty string': ['a', false],
'zero': ['0', true],
'positive integer, no sign': ['2', true],
'positive integer with sign': ['+2', true],
'negative integer': ['-2', true],
'decimal fraction with dot': ['2.5', false],
'decimal fraction with comma': ['2,5', false]
}, function (value, expectValid) {
it(getValidatorMessage(expectValid), function () {
var options = $.extend({}, defaultOptions, {pattern: integerPattern});
var messages = [];
var expectedMessages = expectValid ? [] : ['Not a number.'];
yii.validation.number(value, messages, options);
assert.deepEqual(messages, expectedMessages);
});
});
});
describe('with number pattern', function () {
withData({
'empty string': ['', false],
'non-empty string': ['a', false],
'zero': ['0', true],
'positive integer, no sign': ['2', true],
'positive integer with sign': ['+2', true],
'negative integer': ['-2', true],
'decimal fraction with dot, no sign': ['2.5', true],
'positive decimal fraction with dot and sign': ['+2.5', true],
'negative decimal fraction with dot': ['-2.5', true],
'decimal fraction with comma': ['2,5', false],
'floating number with exponential part': ['-1.23e-10', true]
}, function (value, expectValid) {
it(getValidatorMessage(expectValid), function () {
var options = $.extend({}, defaultOptions, {pattern: numberPattern});
var messages = [];
var expectedMessages = expectValid ? [] : ['Not a number.'];
yii.validation.number(value, messages, options);
assert.deepEqual(messages, expectedMessages);
});
});
});
describe('with different options, integer pattern', function () {
withData({
'empty string, skip on empty': ['', {skipOnEmpty: true}, []],
// Not a string
'undefined': [undefined, {}, []],
'integer, fits pattern': [2, {}, []],
'integer, does not fit pattern': [2.5, {}, []],
// min
'less than minimum': ['1', {min: 2}, ['Number is too small.']],
'equals minimum': ['2', {min: 2}, []],
'more than minimum': ['3', {min: 2}, []],
'wrong integer and less than min': ['1.5', {min: 2}, ['Not a number.']],
// max
'less than maximum': ['1', {max: 2}, []],
'equals maximum': ['2', {max: 2}, []],
'more than maximum': ['3', {max: 2}, ['Number is too big.']]
}, function (value, customOptions, expectedMessages) {
it(getValidatorMessage(expectedMessages), function () {
customOptions.pattern = integerPattern;
var options = $.extend({}, defaultOptions, customOptions);
var messages = [];
yii.validation.number(value, messages, options);
assert.deepEqual(messages, expectedMessages);
});
});
});
});
describe('range validator', function () {
withData({
'empty string, skip on empty': ['', {skipOnEmpty: true}, []],
'array and arrays are not allowed': [['a', 'b'], {}, ['Invalid value.']],
'string in array': ['a', {range: ['a', 'b', 'c']}, []],
'string not in array': ['d', {range: ['a', 'b', 'c']}, ['Invalid value.']],
'array in array': [['a', 'b'], {range: ['a', 'b', 'c'], allowArray: true}, []],
'array not in array': [['a', 'd'], {range: ['a', 'b', 'c'], allowArray: true}, ['Invalid value.']],
'string in array and inverted logic': ['a', {range: ['a', 'b', 'c'], not: true}, ['Invalid value.']],
'string not in array and inverted logic': ['d', {range: ['a', 'b', 'c'], not: true}, []],
'array in array and inverted logic': [
['a', 'b'],
{range: ['a', 'b', 'c'], allowArray: true, not: true},
['Invalid value.']
],
'array not in array and inverted logic': [
['a', 'd'],
{range: ['a', 'b', 'c'], allowArray: true, not: true},
[]
]
}, function (value, options, expectedMessages) {
it(getValidatorMessage(expectedMessages), function () {
options.message = 'Invalid value.';
var messages = [];
yii.validation.range(value, messages, options);
assert.deepEqual(messages, expectedMessages);
});
});
});
describe('regular expression validator', function () {
var integerPattern = /^\s*[+-]?\d+\s*$/;
describe('with integer pattern', function () {
withData({
'empty string, skip on empty': ['', {skipOnEmpty: true}, []],
'regular integer': ['2', {}, []],
'non-integer': ['2.5', {}, ['Invalid value.']],
'regular integer, inverted logic': ['2', {not: true}, ['Invalid value.']],
'integer pattern, non-integer, inverted logic': ['2.5', {pattern: integerPattern, not: true}, []]
}, function (value, options, expectedMessages) {
it(getValidatorMessage(expectedMessages), function () {
options.message = 'Invalid value.';
options.pattern = integerPattern;
var messages = [];
yii.validation.regularExpression(value, messages, options);
assert.deepEqual(messages, expectedMessages);
});
});
});
});
describe('email validator', function () {
var pattern = "^[a-zA-Z0-9!#$%&'*+\\/=?^_`{|}~-]+(?:\\.[a-zA-Z0-9!#$%&'*+\\/=?^_`{|}~-]+)*@(?:[a-zA-Z0-9]" +
"(?:[a-zA-Z0-9-]*[a-zA-Z0-9])?\\.)+[a-zA-Z0-9](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$";
pattern = new RegExp(pattern);
var fullPattern = "^[^@]*<[a-zA-Z0-9!#$%&'*+\\/=?^_`{|}~-]+(?:\\.[a-zA-Z0-9!#$%&'*+\\/=?^_`{|}~-]+)*@" +
"(?:[a-zA-Z0-9](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?\\.)+[a-zA-Z0-9](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?>$";
fullPattern = new RegExp(fullPattern);
var defaultOptions = {
pattern: pattern,
fullPattern: fullPattern,
message: 'Invalid value.'
};
describe('with empty string, skip on empty', function () {
it(VALIDATOR_SUCCESS_MESSAGE, function () {
var messages = [];
var options = $.extend({}, defaultOptions, {skipOnEmpty: true});
yii.validation.email('', messages, options);
assert.deepEqual(messages, []);
});
});
describe('with basic configuration', function () {
withData({
'letters only': ['[email protected]', true],
'numbers in local-part': ['[email protected]', true],
'uppercase and lowercase letters, dot and numbers in local-part': ['[email protected]', true],
'user mailbox': ['user+mailbox/[email protected]', true],
'special symbols in local-part': ['!#$%&\'*+-/=?^_`.{|}[email protected]', true],
'domain only': ['rmcreative.ru', false],
'double dot': ['[email protected]', false],
'unicode in domain': ['example@äüößìà.de', false],
'unicode (russian characters) in domain': ['sam@рмкреатиф.ru', false],
'ASCII in domain': ['[email protected]', true],
'angle brackets, name': ['Carsten Brandt <[email protected]>', false],
'angle brackets, quoted name': ['"Carsten Brandt" <[email protected]>', false],
'angle brackets, no name': ['<[email protected]>', false],
'angle brackets, name, dot in local-part': ['John Smith <[email protected]>', false],
'angle brackets, name, domain only': ['John Smith <example.com>', false],
'no angle brackets, name': ['Information [email protected]', false],
'no angle brackets, name, unicode in domain': ['Information info@örtliches.de', false],
'angle brackets, long quoted name': [
'"' + StringUtils.repeatString('a', 300) + '" <[email protected]>',
false
],
'angle brackets, name, local part more than 64 characters': [
'Short Name <' + StringUtils.repeatString('a', 65) + '@example.com>',
false
],
'angle brackets, name, domain more than 254 characters': [
'Short Name <' + StringUtils.repeatString('a', 255) + '.com>',
false
],
'angle brackets, name, unicode in domain': ['Information <info@örtliches.de>', false],
'angle brackets, name, unicode, local-part length is close to 64 characters': [
// 21 * 3 = 63
'Короткое имя <' + StringUtils.repeatString('бла', 21) + '@пример.com>',
false
],
'angle brackets, name, unicode, domain length is close to 254 characters': [
// 83 * 3 + 4 = 253
'Короткое имя <тест@' + StringUtils.repeatString('бла', 83) + '.com>',
false
]
}, function (value, expectValid) {
it(getValidatorMessage(expectValid), function () {
var messages = [];
var expectedMessages = expectValid ? [] : ['Invalid value.'];
yii.validation.email(value, messages, defaultOptions);
assert.deepEqual(messages, expectedMessages);
});
});
});
describe('with allowed name', function () {
withData({
'letters only': ['[email protected]', true],
'numbers in local-part': ['[email protected]', true],
'uppercase and lowercase letters, dot and numbers in local-part': ['[email protected]', true],
'user mailbox': ['user+mailbox/[email protected]', true],
'special symbols in local-part': ['!#$%&\'*+-/=?^_`.{|}[email protected]', true],
'domain only': ['rmcreative.ru', false],
'unicode in domain': ['example@äüößìà.de', false],
'unicode (russian characters) in domain': ['sam@рмкреатиф.ru', false],
'ASCII in domain': ['[email protected]', true],
'angle brackets, name': ['Carsten Brandt <[email protected]>', true],
'angle brackets, quoted name': ['"Carsten Brandt" <[email protected]>', true],
'angle brackets, no name': ['<[email protected]>', true],
'angle brackets, name, dot in local-part': ['John Smith <[email protected]>', true],
'angle brackets, name, domain only': ['John Smith <example.com>', false],
'no angle brackets, name': ['Information [email protected]', false],
'no angle brackets, name, unicode in domain': ['Information info@örtliches.de', false],
'angle brackets, long quoted name': [
'"' + StringUtils.repeatString('a', 300) + '" <[email protected]>',
true
],
'angle brackets, name, local part more than 64 characters': [
'Short Name <' + StringUtils.repeatString('a', 65) + '@example.com>',
false
],
'angle brackets, name, domain more than 254 characters': [
'Short Name <' + StringUtils.repeatString('a', 255) + '.com>',
false
],
'angle brackets, name, unicode in domain': ['Information <info@örtliches.de>', false],
'angle brackets, name, unicode, local-part length is close to 64 characters': [
// 21 * 3 = 63
'Короткое имя <' + StringUtils.repeatString('бла', 21) + '@пример.com>',
false
],
'angle brackets, name, unicode, domain length is close to 254 characters': [
// 83 * 3 + 4 = 253
'Короткое имя <тест@' + StringUtils.repeatString('бла', 83) + '.com>',
false
]
}, function (value, expectValid) {
it(getValidatorMessage(expectValid), function () {
var options = $.extend({}, defaultOptions, {allowName: true});
var messages = [];
var expectedMessages = expectValid ? [] : ['Invalid value.'];
yii.validation.email(value, messages, options);
assert.deepEqual(messages, expectedMessages);
});
});
});
describe('with enabled IDN', function () {
withData({
'letters only': ['[email protected]', true],
'numbers in local-part': ['[email protected]', true],
'uppercase and lowercase letters, dot and numbers in local-part': ['[email protected]', true],
'user mailbox': ['user+mailbox/[email protected]', true],
'special symbols in local-part': ['!#$%&\'*+-/=?^_`.{|}[email protected]', true],
'domain only': ['rmcreative.ru', false],
'unicode in domain': ['example@äüößìà.de', true],
'unicode (russian characters) in domain': ['sam@рмкреатиф.ru', true],
'ASCII in domain': ['[email protected]', true],
'angle brackets, name': ['Carsten Brandt <[email protected]>', false],
'angle brackets, quoted name': ['"Carsten Brandt" <[email protected]>', false],
'angle brackets, no name': ['<[email protected]>', false],
'angle brackets, name, dot in local-part': ['John Smith <[email protected]>', false],
'angle brackets, name, domain only': ['John Smith <example.com>', false],
'no angle brackets, name': ['Information [email protected]', false],
'no angle brackets, name, unicode in domain': ['Information info@örtliches.de', false],
'angle brackets, long quoted name': [
'"' + StringUtils.repeatString('a', 300) + '" <[email protected]>',
false
],
'angle brackets, name, local part more than 64 characters': [
'Short Name <' + StringUtils.repeatString('a', 65) + '@example.com>',
false
],
'angle brackets, name, domain more than 254 characters': [
'Short Name <' + StringUtils.repeatString('a', 255) + '.com>',
false
],
'angle brackets, name, unicode in domain': ['Information <info@örtliches.de>', false],
'angle brackets, name, unicode, local-part length is close to 64 characters': [
// 21 * 3 = 63
'Короткое имя <' + StringUtils.repeatString('бла', 21) + '@пример.com>',
false
],
'angle brackets, name, unicode, domain length is close to 254 characters': [
// 83 * 3 + 4 = 253
'Короткое имя <тест@' + StringUtils.repeatString('бла', 83) + '.com>',
false
]
}, function (value, expectValid) {
it(getValidatorMessage(expectValid), function () {
var options = $.extend({}, defaultOptions, {enableIDN: true});
var messages = [];
var expectedMessages = expectValid ? [] : ['Invalid value.'];
yii.validation.email(value, messages, options);
assert.deepEqual(messages, expectedMessages);
});
});
});
describe('with allowed name and enabled IDN', function () {
withData({
'letters only': ['[email protected]', true],
'numbers in local-part': ['[email protected]', true],
'uppercase and lowercase letters, dot and numbers in local-part': ['[email protected]', true],
'user mailbox': ['user+mailbox/[email protected]', true],
'special symbols in local-part': ['!#$%&\'*+-/=?^_`.{|}[email protected]', true],
'domain only': ['rmcreative.ru', false],
'unicode in domain': ['example@äüößìà.de', true],
'unicode (russian characters) in domain': ['sam@рмкреатиф.ru', true],
'ASCII in domain': ['[email protected]', true],
'angle brackets, name': ['Carsten Brandt <[email protected]>', true],
'angle brackets, quoted name': ['"Carsten Brandt" <[email protected]>', true],
'angle brackets, no name': ['<[email protected]>', true],
'angle brackets, name, dot in local-part': ['John Smith <[email protected]>', true],
'angle brackets, name, domain only': ['John Smith <example.com>', false],
'no angle brackets, name': ['Information [email protected]', false],
'no angle brackets, name, unicode in domain': ['Information info@örtliches.de', false],
'angle brackets, long quoted name': [
'"' + StringUtils.repeatString('a', 300) + '" <[email protected]>',
true
],
'angle brackets, name, local part more than 64 characters': [
'Short Name <' + StringUtils.repeatString('a', 65) + '@example.com>',
false
],
'angle brackets, name, domain more than 254 characters': [
'Short Name <' + StringUtils.repeatString('a', 255) + '.com>',
false
],
'angle brackets, name, unicode in domain': ['Information <info@örtliches.de>', true],
'angle brackets, name, unicode, local-part length is close to 64 characters': [
// 21 * 3 = 63
'Короткое имя <' + StringUtils.repeatString('бла', 21) + '@пример.com>',
false
],
'angle brackets, name, unicode, domain length is close to 254 characters': [
// 83 * 3 + 4 = 253
'Короткое имя <тест@' + StringUtils.repeatString('бла', 83) + '.com>',
false
]
}, function (value, expectValid) {
it(getValidatorMessage(expectValid), function () {
var options = $.extend({}, defaultOptions, {allowName: true, enableIDN: true});
var messages = [];
var expectedMessages = expectValid ? [] : ['Invalid value.'];
yii.validation.email(value, messages, options);
assert.deepEqual(messages, expectedMessages);
});
});
});
});
describe('url validator', function () {
function getPattern(validSchemes) {
if (validSchemes === undefined) {
validSchemes = ['http', 'https'];
}
var pattern = '^{schemes}://(([A-Z0-9][A-Z0-9_-]*)(\\.[A-Z0-9][A-Z0-9_-]*)+)(?::\\d{1,5})?(?:$|[?\\/#])';
pattern = pattern.replace('{schemes}', '(' + validSchemes.join('|') + ')');
return new RegExp(pattern, 'i');
}
var defaultOptions = {
pattern: getPattern(),
message: 'Invalid value.'
};
describe('with empty string, skip on empty', function () {
it(VALIDATOR_SUCCESS_MESSAGE, function () {
var messages = [];
var options = $.extend({}, defaultOptions, {skipOnEmpty: true});
yii.validation.url('', messages, options);
assert.deepEqual(messages, []);
});
});
describe('with basic configuration', function () {
withData({
'domain only': ['google.de', false],
'http': ['http://google.de', true],
'https': ['https://google.de', true],
'scheme with typo': ['htp://yiiframework.com', false],
'https, action with get parameters': [
'https://www.google.de/search?q=yii+framework&ie=utf-8&oe=utf-8&rls=org.mozilla:de:official' +
'&client=firefox-a&gws_rd=cr',
true
],
'scheme not in valid schemes': ['ftp://ftp.ruhr-uni-bochum.de/', false],
'invalid domain': ['http://invalid,domain', false],
'not allowed symbol (comma) after domain': ['http://example.com,', false],
'not allowed symbol (star) after domain': ['http://example.com*12', false],
'symbols after slash': ['http://example.com/*12', true],
'get parameter without value': ['http://example.com/?test', true],
'anchor': ['http://example.com/#test', true],
'port, anchor': ['http://example.com:80/#test', true],
'port (length equals limit), anchor': ['http://example.com:65535/#test', true],
'port, get parameter without value': ['http://example.com:81/?good', true],
'get parameter without value and slash': ['http://example.com?test', true],
'anchor without slash': ['http://example.com#test', true],
'port and anchor without slash': ['http://example.com:81#test', true],
'port and get parameter without value and slash': ['http://example.com:81?good', true],
'not allowed symbol after domain followed by get parameter without value': [
'http://example.com,?test',
false
],
'skipped port and get parameter without value': ['http://example.com:?test', false],
'skipped port and action': ['http://example.com:test', false],
'port (length more than limit) and action': ['http://example.com:123456/test', false],
'unicode, special symbols': ['http://äüö?=!"§$%&/()=}][{³²€.edu', false]
}, function (value, expectValid) {
it(getValidatorMessage(expectValid), function () {
var messages = [];
var expectedMessages = expectValid ? [] : ['Invalid value.'];
yii.validation.url(value, messages, defaultOptions);
assert.deepEqual(messages, expectedMessages);
});
});
});
describe('with default scheme', function () {
withData({
'no scheme': ['yiiframework.com', true],
'http': ['http://yiiframework.com', true]
}, function (value, expectValid) {
it(getValidatorMessage(expectValid), function () {
var messages = [];
var expectedMessages = expectValid ? [] : ['Invalid value.'];
var options = $.extend({}, defaultOptions, {defaultScheme: 'https'});
yii.validation.url(value, messages, options);
assert.deepEqual(messages, expectedMessages);
});
});
});
describe('without scheme', function () {
it(VALIDATOR_SUCCESS_MESSAGE, function () {
var messages = [];
var options = $.extend({}, defaultOptions, {
pattern: /(([A-Z0-9][A-Z0-9_-]*)(\.[A-Z0-9][A-Z0-9_-]*)+)/i
});
yii.validation.url('yiiframework.com', messages, options);
assert.deepEqual(messages, []);
});
});
describe('with default scheme and custom schemes', function () {
withData({
'ftp': ['ftp://ftp.ruhr-uni-bochum.de/', true],
'no scheme': ['google.de', true],
'http': ['http://google.de', true],
'https': ['https://google.de', true],
'scheme with typo': ['htp://yiiframework.com', false],
'relative url': ['//yiiframework.com', false]
}, function (value, expectValid) {
it(getValidatorMessage(expectValid), function () {
var messages = [];
var expectedMessages = expectValid ? [] : ['Invalid value.'];
var options = $.extend({}, defaultOptions, {
pattern: getPattern(['http', 'https', 'ftp', 'ftps']),
defaultScheme: 'http'
});
yii.validation.url(value, messages, options);
assert.deepEqual(messages, expectedMessages);
});
});
});
describe('with enabled IDN', function () {
withData({
'unicode in domain': ['http://äüößìà.de', true],
// converted via http://mct.verisign-grs.com/convertServlet
'ASCII in domain': ['http://xn--zcack7ayc9a.de', true]
}, function (value, expectValid) {
it(getValidatorMessage(expectValid), function () {
var messages = [];
var expectedMessages = expectValid ? [] : ['Invalid value.'];
var options = $.extend({}, defaultOptions, {enableIDN: true});
yii.validation.url(value, messages, options);
assert.deepEqual(messages, expectedMessages);
});
});
});
});
describe('trim filter', function () {
var attribute = {input: '#input-id'};
var getInputVal;
var $input = {
val: function () {
return getInputVal();
},
is: function () {
return false;
}
};
var $form = {
find: function () {
return $input;
}
};
var formSpy;
var inputSpy;
beforeEach(function () {
formSpy = sinon.spy($form, 'find');
inputSpy = sinon.spy($input, 'val');
});
afterEach(function () {
formSpy.restore();
inputSpy.restore();
});
describe('with empty string, skip on empty', function () {
it('should leave value and element value as is and return not changed value', function () {
getInputVal = function () {
return '';
};
assert.strictEqual(yii.validation.trim($form, attribute, {skipOnEmpty: true}), '');
assert.isTrue(formSpy.calledOnce);
assert.equal(formSpy.getCall(0).args[0], attribute.input);
assert.isTrue(inputSpy.calledOnce);
assert.strictEqual(inputSpy.getCall(0).args[0], undefined);
});
});
withData({
'nothing to trim': ['value', 'value'],
'spaces at the beginning and end': [' value ', 'value'],
'newlines at the beginning and end': ['\nvalue\n', 'value'],
'spaces and newlines at the beginning and end': ['\n value \n', 'value']
}, function (value, expectedValue) {
it('should return trimmed value and set it as value of element', function () {
getInputVal = function (val) {
return val === undefined ? value : undefined;
};
assert.equal(yii.validation.trim($form, attribute, {}), expectedValue);
assert.isTrue(formSpy.calledOnce);
assert.equal(formSpy.getCall(0).args[0], attribute.input);
assert.equal(inputSpy.callCount, 2);
assert.strictEqual(inputSpy.getCall(0).args[0], undefined);
assert.equal(inputSpy.getCall(1).args[0], expectedValue);
});
});
});
describe('trim filter on checkbox', function () {
var attribute = {input: '#input-id'};
var getInputVal;
var $checkbox = {
is: function (selector) {
if (selector === ':checked') {
return true;
}
if (selector === ':checkbox, :radio') {
return true;
}
}
};
var $form = {
find: function () {
return $checkbox;
}
};
it('should be left as is', function () {
assert.strictEqual(yii.validation.trim($form, attribute, {}, true), true);
});
});
describe('captcha validator', function () {
// Converted using yii\captcha\CaptchaAction generateValidationHash() method
var hashes = {'Code': 1497, 'code': 1529};
var caseInSensitiveData = {
'valid code in lowercase': ['code', true],
'valid code in uppercase': ['CODE', true],
'valid code as is': ['Code', true],
'invalid code': ['invalid code', false]
};
var caseSensitiveData = {
'valid code in lowercase': ['code', false],
'valid code in uppercase': ['CODE', false],
'valid code as is': ['Code', true],
'invalid code': ['invalid code', false]
};
var defaultOptions = {
message: 'Invalid value.',
hashKey: 'hashKey'
};
var hashesData = [hashes['Code'], hashes['code']];
var jQueryDataStub;
beforeEach(function () {
jQueryDataStub = sinon.stub($.prototype, 'data', function () {
return hashesData;
});
});
afterEach(function () {
jQueryDataStub.restore();
});
function verifyJQueryDataStub() {
assert.isTrue(jQueryDataStub.calledOnce);
assert.equal(jQueryDataStub.getCall(0).args[0], defaultOptions.hashKey);
}
describe('with empty string, skip on empty', function () {
it(VALIDATOR_SUCCESS_MESSAGE, function () {
var messages = [];
var options = $.extend({}, defaultOptions, {skipOnEmpty: true});
yii.validation.captcha('', messages, options);
assert.deepEqual(messages, []);
assert.isFalse(jQueryDataStub.called);
});
});
describe('with ajax, case insensitive', function () {
withData(caseInSensitiveData, function (value, expectValid) {
it(getValidatorMessage(expectValid), function () {
var messages = [];
var expectedMessages = expectValid ? [] : ['Invalid value.'];
yii.validation.captcha(value, messages, defaultOptions);
assert.deepEqual(messages, expectedMessages);
verifyJQueryDataStub();
});
});
});
describe('with ajax, case sensitive', function () {
withData(caseSensitiveData, function (value, expectValid) {
it(getValidatorMessage(expectValid), function () {
var messages = [];
var expectedMessages = expectValid ? [] : ['Invalid value.'];
var options = $.extend({}, defaultOptions, {caseSensitive: true});
yii.validation.captcha(value, messages, options);
assert.deepEqual(messages, expectedMessages);
verifyJQueryDataStub();
});
});
});
describe('with hash, case insensitive', function () {
withData(caseInSensitiveData, function (value, expectValid) {
it(getValidatorMessage(expectValid), function () {
hashesData = undefined;
var messages = [];
var expectedMessages = expectValid ? [] : ['Invalid value.'];
var options = $.extend({}, defaultOptions, {hash: hashes['code']});
yii.validation.captcha(value, messages, options);
assert.deepEqual(messages, expectedMessages);
verifyJQueryDataStub();
});
});
});
describe('with hash, case sensitive', function () {
withData(caseSensitiveData, function (value, expectValid) {
it(getValidatorMessage(expectValid), function () {
hashesData = undefined;
var messages = [];
var expectedMessages = expectValid ? [] : ['Invalid value.'];
var options = $.extend({}, defaultOptions, {hash: hashes['Code'], caseSensitive: true});
yii.validation.captcha(value, messages, options);
assert.deepEqual(messages, expectedMessages);
verifyJQueryDataStub();
});
});
});
});
describe('compare validator', function () {
var $input = {
val: function () {
return 'b';
}
};
var jQueryInitStub;
var inputSpy;
beforeEach(function () {
jQueryInitStub = sinon.stub($.fn, 'init', function () {
return $input;
});
inputSpy = sinon.spy($input, 'val');
});
afterEach(function () {
jQueryInitStub.restore();
inputSpy.restore();
});
withData({
'empty string, skip on empty': ['', {skipOnEmpty: true}, true],
// ==
'"==" operator, 2 identical integers': [2, {operator: '==', compareValue: 2}, true],
'"==" operator, 2 different integers': [2, {operator: '==', compareValue: 3}, false],
'"==" operator, 2 identical decimal fractions': [2.5, {operator: '==', compareValue: 2.5}, true],
'"==" operator, integer and string with the same values': [2, {operator: '==', compareValue: '2'}, true],
'"==" operator, integer and string with the different values': [
2,
{operator: '==', compareValue: '3'},
false
],
'"==" operator, 2 identical strings': ['b', {operator: '==', compareValue: 'b'}, true],
// ===
'"===" operator, 2 identical integers': [2, {operator: '===', compareValue: 2}, true],
'"===" operator, 2 different integers': [2, {operator: '===', compareValue: 3}, false],
'"===" operator, 2 identical decimal fractions': [2.5, {operator: '===', compareValue: 2.5}, true],
'"===" operator, integer and string with the same value': [2, {operator: '===', compareValue: '2'}, false],
'"===" operator, integer and string with the different values': [
2,
{operator: '===', compareValue: '3'},
false
],
'"===" operator, 2 identical strings': ['b', {operator: '===', compareValue: 'b'}, true],
// !=
'"!=" operator, 2 identical integers': [2, {operator: '!=', compareValue: 2}, false],
'"!=" operator, 2 different integers': [2, {operator: '!=', compareValue: 3}, true],
'"!=" operator, 2 identical decimal fractions': [2.5, {operator: '!=', compareValue: 2.5}, false],
'"!=" operator, integer and string with the same value': [2, {operator: '!=', compareValue: '2'}, false],
'"!=" operator, integer and string with the different values': [
2,
{operator: '!=', compareValue: '3'},
true
],
'"!=" operator, 2 identical strings': ['b', {operator: '!=', compareValue: 'b'}, false],
// !==
'"!==" operator, 2 identical integers': [2, {operator: '!==', compareValue: 2}, false],
'"!==" operator, 2 different integers': [2, {operator: '!==', compareValue: 3}, true],
'"!==" operator, 2 identical decimal fractions': [2.5, {operator: '!==', compareValue: 2.5}, false],
'"!==" operator, integer and string with the same value': [2, {operator: '!==', compareValue: '2'}, true],
'"!==" operator, integer and string with the different values': [
2,
{operator: '!==', compareValue: '3'},
true
],
'"!==" operator, 2 identical strings': ['b', {operator: '!==', compareValue: 'b'}, false],
// >
'">" operator, 2 identical integers': [2, {operator: '>', compareValue: 2}, false],
'">" operator, 2 integers, 2nd is greater': [2, {operator: '>', compareValue: 3}, false],
'">" operator, 2 integers, 2nd is lower': [2, {operator: '>', compareValue: 1}, true],
'">" operator, 2 identical strings': ['b', {operator: '>', compareValue: 'b'}, false],
'">" operator, 2 strings, 2nd is greater': ['a', {operator: '>', compareValue: 'b'}, false],
'">" operator, 2 strings, 2nd is lower': ['b', {operator: '>', compareValue: 'a'}, true],
// >=
'">=" operator, 2 identical integers': [2, {operator: '>=', compareValue: 2}, true],
'">=" operator, 2 integers, 2nd is greater': [2, {operator: '>=', compareValue: 3}, false],
'">=" operator, 2 integers, 2nd is lower': [2, {operator: '>=', compareValue: 1}, true],
'">=" operator, 2 identical strings': ['b', {operator: '>=', compareValue: 'b'}, true],
'">=" operator, 2 strings, 2nd is greater': ['a', {operator: '>=', compareValue: 'b'}, false],
'">=" operator, 2 strings, 2nd is lower': ['b', {operator: '>=', compareValue: 'a'}, true],
// <
'"<" operator, 2 identical integers': [2, {operator: '<', compareValue: 2}, false],
'"<" operator, 2 integers, 2nd is greater': [2, {operator: '<', compareValue: 3}, true],
'"<" operator, 2 integers, 2nd is lower': [2, {operator: '<', compareValue: 1}, false],
'"<" operator, 2 identical strings': ['b', {operator: '<', compareValue: 'b'}, false],
'"<" operator, 2 strings, 2nd is greater': ['a', {operator: '<', compareValue: 'b'}, true],
'"<" operator, 2 strings, 2nd is lower': ['b', {operator: '<', compareValue: 'a'}, false],
'"<" operator, strings "10" and "2"': ['10', {operator: '<', compareValue: '2'}, true],
// <=
'"<=" operator, 2 identical integers': [2, {operator: '<=', compareValue: 2}, true],
'"<=" operator, 2 integers, 2nd is greater': [2, {operator: '<=', compareValue: 3}, true],
'"<=" operator, 2 integers, 2nd is lower': [2, {operator: '<=', compareValue: 1}, false],
'"<=" operator, 2 identical strings': ['b', {operator: '<=', compareValue: 'b'}, true],
'"<=" operator, 2 strings, 2nd is greater': ['a', {operator: '<=', compareValue: 'b'}, true],
'"<=" operator, 2 strings, 2nd is lower': ['b', {operator: '<=', compareValue: 'a'}, false],
// type
'number type, "<" operator, strings "10" and "2"': [
'10',
{operator: '<', compareValue: '2', type: 'number'},
false
],
'number type, ">=" operator, 2nd is lower': [
10,
{operator: '>=', compareValue: 2, type: 'number'},
true
],
'number type, "<=" operator, 2nd is lower': [
10,
{operator: '<=', compareValue: 2, type: 'number'},
false
],
'number type, ">" operator, 2nd is lower': [
10,
{operator: '>', compareValue: 2, type: 'number'},
true
],
'number type, ">" operator, compare value undefined': [
undefined,
{operator: '>', compareValue: 2, type: 'number'},
false
],
'number type, "<" operator, compare value undefined': [
undefined,
{operator: '<', compareValue: 2, type: 'number'},
true
],
'number type, ">=" operator, compare value undefined': [
undefined,
{operator: '>=', compareValue: 2, type: 'number'},
false
],
'number type, "<=" operator, compare value undefined': [
undefined,
{operator: '<=', compareValue: 2, type: 'number'},
true
],
// default compare value
'default compare value, "===" operator, against undefined': [undefined, {operator: '==='}, true]
}, function (value, options, expectValid) {
it(getValidatorMessage(expectValid), function () {
options.message = 'Invalid value.';
var messages = [];
var expectedMessages = expectValid ? [] : ['Invalid value.'];
yii.validation.compare(value, messages, options);
assert.deepEqual(messages, expectedMessages);
assert.isFalse(jQueryInitStub.called);
assert.isFalse(inputSpy.called);
})
});
describe('with compareAttribute, "==" operator and 2 identical strings', function () {
it(VALIDATOR_SUCCESS_MESSAGE, function () {
var $form = {
find: function(){
return $input;
}
};
var messages = [];
yii.validation.compare('b', messages, {operator: '==', compareAttribute: 'input-id'}, $form);
assert.deepEqual(messages, []);
assert.isTrue(jQueryInitStub.calledOnce);
assert.equal(jQueryInitStub.getCall(0).args[0], '#input-id');
assert.isTrue(inputSpy.calledOnce);
assert.strictEqual(inputSpy.getCall(0).args[0], undefined);
});
});
});
describe('ip validator', function () {
var ipParsePattern = '^(\\!?)(.+?)(\/(\\d+))?$';
var ipv4Pattern = '^(?:(?:2(?:[0-4][0-9]|5[0-5])|[0-1]?[0-9]?[0-9])\\.){3}(?:(?:2([0-4][0-9]|5[0-5])|[0-1]?' +
'[0-9]?[0-9]))$';
var ipv6Pattern = '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:)' +
'{1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}' +
'(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}' +
'(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|' +
'fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}' +
'[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|' +
'(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$';
var defaultOptions = {
messages: {
message: 'Invalid value.',
noSubnet: 'No subnet.',
hasSubnet: 'Has subnet.',
ipv4NotAllowed: 'IPv4 is not allowed.',
ipv6NotAllowed: 'IPv6 is not allowed.'
},
'ipParsePattern': ipParsePattern,
'ipv4Pattern': ipv4Pattern,
'ipv6Pattern': ipv6Pattern,
ipv4: true,
ipv6: true
};
withData({
'empty string, skip on empty': ['', {skipOnEmpty: true}, []],
'not IP': ['not IP', {}, ['Invalid value.']],
'not IP, IPv4 is disabled': ['not:IP', {ipv4: false}, ['Invalid value.']],
'not IP, IPv6 is disabled': ['not IP', {ipv6: false}, ['Invalid value.']],
// subnet, IPv4
'IPv4, subnet option is not defined': ['192.168.10.0', {}, []],
'IPv4, subnet option is set to "false"': ['192.168.10.0', {subnet: false}, []],
'IPv4, subnet option is set to "true"': ['192.168.10.0', {subnet: true}, ['No subnet.']],
'IPv4 with CIDR subnet, subnet option is not defined': ['192.168.10.0/24', {}, []],
'IPv4 with CIDR subnet, subnet option is set to "false"': [
'192.168.10.0/24',
{subnet: false},
['Has subnet.']
],
'IPv4 with CIDR subnet, subnet option is set to "true"': ['192.168.10.0/24', {subnet: true}, []],
// subnet, IPv6
'IPv6, subnet option is not defined': ['2001:0db8:11a3:09d7:1f34:8a2e:07a0:765d', {}, []],
'IPv6, subnet option is set to "false"': ['2001:0db8:11a3:09d7:1f34:8a2e:07a0:765d', {subnet: false}, []],
'IPv6, subnet option is set to "true"': [
'2001:0db8:11a3:09d7:1f34:8a2e:07a0:765d',
{subnet: true},
['No subnet.']
],
'IPv6 with CIDR subnet, subnet option is not defined': [
'2001:0db8:11a3:09d7:1f34:8a2e:07a0:765d/24',
{},
[]
],
'IPv6 with CIDR subnet, subnet option is set to "false"': [
'2001:0db8:11a3:09d7:1f34:8a2e:07a0:765d/24',
{subnet: false},
['Has subnet.']
],
'IPv6 with CIDR subnet, subnet option is set to "true"': [
'2001:0db8:11a3:09d7:1f34:8a2e:07a0:765d/24',
{subnet: true},
[]
],
// negation, IPv4
'IPv4, negation option is not defined': ['192.168.10.0', {}, []],
'IPv4, negation option is set to "false"': ['192.168.10.0', {negation: false}, []],
'IPv4, negation option is set to "true"': ['192.168.10.0', {negation: true}, []],
'IPv4 with negation, negation option is not defined': ['!192.168.10.0', {}, []],
'IPv4 with negation, negation option is set to "false"': [
'!192.168.10.0',
{negation: false},
['Invalid value.']
],
'IPv4 with negation, negation option is set to "true"': ['!192.168.10.0', {negation: true}, []],
// negation, IPv6
'IPv6, negation option is not defined': ['2001:0db8:11a3:09d7:1f34:8a2e:07a0:765d', {}, []],
'IPv6, negation option is set to "false"': [
'2001:0db8:11a3:09d7:1f34:8a2e:07a0:765d',
{negation: false},
[]
],
'IPv6, negation option is set to "true"': ['2001:0db8:11a3:09d7:1f34:8a2e:07a0:765d', {negation: true}, []],
'IPv6 with negation, negation option is not defined': ['!2001:0db8:11a3:09d7:1f34:8a2e:07a0:765d', {}, []],
'IPv6 with negation, negation option is set to "false"': [
'!2001:0db8:11a3:09d7:1f34:8a2e:07a0:765d',
{negation: false},
['Invalid value.']
],
'IPv6 with negation, negation option is set to "true"': [
'!2001:0db8:11a3:09d7:1f34:8a2e:07a0:765d',
{negation: true},
[]
],
// ipv4, ipv6
'IPv4, IPv4 option is set to "false"': ['192.168.10.0', {ipv4: false}, ['IPv4 is not allowed.']],
'IPv6, IPv6 option is set to "false"': [
'2001:0db8:11a3:09d7:1f34:8a2e:07a0:765d',
{ipv6: false},
['IPv6 is not allowed.']
],
'IPv6, short variation (4 groups)': ['2001:db8::ae21:ad12', {}, []],
'IPv6, short variation (2 groups)': ['::ae21:ad12', {}, []],
'IPv4, IPv4 and IPv6 options are set to "false"': [
'192.168.10.0',
{ipv4: false, ipv6: false},
['IPv4 is not allowed.']
],
'IPv6, IPv4 and IPv6 options are set to "false"': [
'2001:0db8:11a3:09d7:1f34:8a2e:07a0:765d',
{ipv4: false, ipv6: false},
['IPv6 is not allowed.']
],
'invalid IPv4': ['192,168.10.0', {}, ['Invalid value.']],
'invalid IPv6': ['2001,0db8:11a3:09d7:1f34:8a2e:07a0:765d', {}, ['Invalid value.']],
'invalid IPv4, IPv4 option is set to "false"': [
'192,168.10.0',
{ipv4: false},
['Invalid value.', 'IPv4 is not allowed.']
],
'invalid IPv6, IPv6 option is set to "false"': [
'2001,0db8:11a3:09d7:1f34:8a2e:07a0:765d',
{ipv6: false},
['Invalid value.', 'IPv6 is not allowed.']
]
}, function (value, customOptions, expectedMessages) {
it(getValidatorMessage(expectedMessages), function () {
var messages = [];
var options = $.extend({}, defaultOptions, customOptions);
yii.validation.ip(value, messages, options);
assert.deepEqual(messages, expectedMessages);
})
});
});
});<|fim▁end|> | if (typeof object.resolve !== 'function') {
return false;
}
|
<|file_name|>MiaoZuanScripts.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#encoding=utf-8
import urllib, urllib2
import cookielib
import re
import time
from random import random
from json import dumps as json_dumps, loads as json_loads
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import os
project_root_path = os.path.abspath(os.path.join(os.path.dirname(__file__)))
sys.path.append(project_root_path)
from logger.logger import logFactory
logger = logFactory.getLogger(__name__)
class MiaoZuan(object):
"""docstring for MiaoZuan"""
def __init__(self, account_file):
super(MiaoZuan, self).__init__()
self.headers = headers = {
'User-Agent':'IOS_8.1_IPHONE5C',
'm-lng':'113.331639',
'm-ct':'2',
'm-lat':'23.158624',
'm-cw':'320',
'm-iv':'3.0.1',
'm-ch':'568',
'm-cv':'6.5.2',
'm-lt':'1',
'm-nw':'WIFI',
#'Content-Type':'application/json;charset=utf-8'
}
self.accountList = self.get_account_List(account_file)
def get_account_List(self, account_file):
accountList = []
try:
with open(account_file, 'r') as f:
lines = f.readlines()
for line in lines:
user, userName, passWord, imei = line.strip('\n').split(',')
accountList.append([user, userName, passWord, imei])
except Exception as e:
logger.exception(e)
finally:
return accountList
def login(self, userName, passWord, imei):
postdata = urllib.urlencode({
'UserName':userName,
'Password':passWord,
'Imei':imei
})
req = urllib2.Request(
url='http://service.inkey.com/api/Auth/Login',
data=postdata,
headers=self.headers
)
cookie_support = urllib2.HTTPCookieProcessor(cookielib.CookieJar())
opener = urllib2.build_opener(cookie_support, urllib2.HTTPHandler)
urllib2.install_opener(opener)
try:
content = urllib2.urlopen(req).read()
resp_dict = json_loads(content)
return resp_dict
except Exception as e:
logger.exception(e)
return {"IsSuccess": False, "Desc": ""}
def pull_SilverAdvert_List(self, categoryId):
postdata = urllib.urlencode({
'CategoryIds':categoryId
})
req = urllib2.Request(
url='http://service.inkey.com/api/SilverAdvert/Pull',
data = postdata,
headers = self.headers
)
try:
content = urllib2.urlopen(req).read()
silverAdvert_pat = re.compile(r'"Id":(.*?),')
silverAdvert_list = re.findall(silverAdvert_pat, content)
logger.debug("categoryId = %s, pull_SilverAdvert_List = %s", categoryId, silverAdvert_list)
except Exception as e:
logger.exception(e)
silverAdvert_list = []
return silverAdvert_list
def viewOne_SilverAdvert_by_advertsID(self, advertsID):
postdata = urllib.urlencode({
'IsGame':"false",
"Id":advertsID
})
req = urllib2.Request(
url='http://service.inkey.com/api/SilverAdvert/GeneratedIntegral',
data = postdata,
headers = self.headers
)
try:
content = urllib2.urlopen(req).read()
logger.debug("view advert id = %s, Response from the server: %s", advertsID, content)
resp_dict = json_loads(content)
return resp_dict
except Exception as e:
logger.exception(e)
return {"IsSuccess": False}
def viewAll_SilverAdverts_by_categoryId(self, categoryId):
silverAdsList = self.pull_SilverAdvert_List(categoryId)
silverAdsList_Count = len(silverAdsList)
total_data_by_categoryId = 0
result_Code = 0
result_Code_31303_count = 0
selectNum = 0
if silverAdsList_Count > 0:
while True:
advertsID = silverAdsList[selectNum]
resp_dict = self.viewOne_SilverAdvert_by_advertsID(advertsID)
selectNum += 1
if selectNum >= silverAdsList_Count:
selectNum -= silverAdsList_Count
if resp_dict["IsSuccess"]:
total_data_by_categoryId += resp_dict["Data"]
logger.debug("get %s more points", resp_dict["Data"])
<|fim▁hole|> elif resp_dict["Code"] == 31307 or result_Code_31303_count > silverAdsList_Count:
logger.debug("Response from the server: %s", resp_dict["Desc"])
break
time.sleep(12+3*random())
logger.info("categoryId = %s, total_data_by_categoryId = %s" % (categoryId, total_data_by_categoryId))
return [result_Code, total_data_by_categoryId]
def get_all_silvers(self):
total_data = 0
result_Code = 0
categoryIds = [-1, 1, -2, 2, -3, 3, -4, 4, 5, 6, 10]
categoryIds_Count = len(categoryIds)
i = 0
List_Count_equals_0 = 0 #如果获取12次广告,广告数都为零,则切换至下一个帐号
while result_Code != '31307' and List_Count_equals_0 < 12:
categoryId = categoryIds[i]
[result_Code, data_by_categoryId] = self.viewAll_SilverAdverts_by_categoryId(categoryId)
total_data += data_by_categoryId
if result_Code == 0:
List_Count_equals_0 += 1
i += 1
if i >= categoryIds_Count:
i -= categoryIds_Count
return total_data
def start(self):
for account in self.accountList:
user, userName, passWord, imei = account
logger.info("User Iteration Started: %s", user)
login_result_dict = self.login(userName, passWord, imei)
if login_result_dict["IsSuccess"]:
try:
total_data_by_all_categoryIds = self.get_all_silvers()
logger.debug("total_data_by_all_categoryIds: %s" % total_data_by_all_categoryIds)
except Exception as e:
logger.exception(e)
finally:
logger.info("User Iteration Ended: %s", user)
else:
logger.warning("Login failed, login user: %s, error description: %s", user, login_result_dict["Desc"])
logger.info("---------------------------------------------------\n")
def run_forever(self):
while True:
self.start()
time.sleep(4*3600)
if __name__ == '__main__':
account_file = os.path.join(project_root_path, 'Config', 'Accounts.dat')
mz = MiaoZuan(account_file)
mz.run_forever()<|fim▁end|> | elif resp_dict["Code"] == 31303:
logger.debug("view advert id = %s, Response from the server: %s", advertsID, resp_dict["Desc"])
result_Code_31303_count += 1
continue
|
<|file_name|>ResourceLoader.js<|end_file_name|><|fim▁begin|>function ResourceLoader(baseurl) {
this.BASEURL = baseurl;
}
ResourceLoader.prototype.loadResource = function(resource, callback) {
var self = this;
evaluateScripts([resource], function(success) {
if(success) {
var resource = Template.call(self);
callback.call(self, resource);<|fim▁hole|> } else {
var title = "Resource Loader Error",
description = `Error loading resource '${resource}'. \n\n Try again later.`,
alert = createAlert(title, description);
navigationDocument.presentModal(alert);
}
});
}<|fim▁end|> | |
<|file_name|>get.gen.spec.js<|end_file_name|><|fim▁begin|><|fim▁hole|>var falcor = require("./../../../lib/");
var Model = falcor.Model;
var expect = require('chai').expect;
describe('getVersionSync', function() {
it('should get a version', function() {
var model = new Model({cache: {hello: 'world'}});
model._root.unsafeMode = true;
var version = model.getVersion('hello');
expect(version >= 0).to.be.ok;
});
it('should get a version on the root model', function() {
var model = new Model({cache: {hello: 'world'}, unsafeMode: true});
var version = model.getVersion();
expect(version >= 0).to.be.ok;
});
it('should get -1 if no path exists.', function() {
var model = new Model({cache: {hello: 'world'}});
model._root.unsafeMode = true;
var version = model.getVersion('world');
expect(version === -1).to.be.ok;
});
});<|fim▁end|> | |
<|file_name|>client.py<|end_file_name|><|fim▁begin|>import urllib2
import appuifw, e32
from key_codes import *
class Drinker(object):
def __init__(self):
self.id = 0<|fim▁hole|> self.drinks = 0
def get_drinker_list():
data = urllib2.urlopen("http://192.168.11.5:8080/drinkcounter/get_datas/").read().split("\n")
drinkers = []
for data_row in data:
if data_row == '': continue
fields = data_row.split('|')
drinker = Drinker()
drinker.id = int(fields[0])
drinker.name = fields[1]
drinker.drinks = int(fields[2])
drinker.prom = float(fields[3])
drinker.idle = fields[4]
drinkers.append(drinker)
return drinkers
def get_listbox_items(drinkers):
items = []
for drinker in drinkers:
items.append(unicode('%s, %d drinks, %s' % (drinker.name, drinker.drinks, drinker.idle)))
return items
appuifw.app.title = u"Alkoholilaskuri"
app_lock = e32.Ao_lock()
#Define the exit function
def quit():
app_lock.signal()
appuifw.app.exit_key_handler = quit
drinkers = get_drinker_list()
items = get_listbox_items(drinkers)
#Define a function that is called when an item is selected
def handle_selection():
selected_drinker = drinkers[lb.current()]
urllib2.urlopen("http://192.168.11.5:8080/drinkcounter/add_drink/%d/" % (selected_drinker.id))
appuifw.note(u"A drink has been added to " + drinkers[lb.current()].name, 'info')
new_drinkers = get_drinker_list()
items = get_listbox_items(new_drinkers)
lb.set_list(items, lb.current())
#Create an instance of Listbox and set it as the application's body
lb = appuifw.Listbox(items, handle_selection)
appuifw.app.body = lb
app_lock.wait()<|fim▁end|> | self.name = ""
self.prom = 0.0
self.idle = "" |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'toolsforbiology.views.home', name='home'),<|fim▁hole|> url(r'^admin/', include(admin.site.urls)),
)<|fim▁end|> | # url(r'^blog/', include('blog.urls')),
|
<|file_name|>dlg_subida.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
---------------------------------------------------------------------------------------------------
dlg_subida
mantém as informações sobre a dialog de subida
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
revision 0.2 2015/nov mlabru
pep8 style conventions
revision 0.1 2014/nov mlabru
initial release (Linux/Python)
---------------------------------------------------------------------------------------------------
"""
__version__ = "$revision: 0.2$"
__author__ = "mlabru, sophosoft"
__date__ = "2015/12"
# < imports >--------------------------------------------------------------------------------------
# python library
import json
import os
# PyQt library
from PyQt4 import QtCore
from PyQt4 import QtGui
# view
import view.piloto.dlg_subida_ui as dlg
# < class CDlgSubida >-----------------------------------------------------------------------------
class CDlgSubida(QtGui.QDialog, dlg.Ui_CDlgSubida):
"""
mantém as informações sobre a dialog de subida
"""
# ---------------------------------------------------------------------------------------------
def __init__(self, fsck_http, fdct_config, f_strip_cur, fdct_sub, f_parent=None):
"""
@param fsck_http: socket de comunicação com o servidor
@param fdct_config: dicionário de configuração
@param f_strip_cur: strip selecionada
@param fdct_sub: dicionário de subidas
@param f_parent: janela pai
"""
# init super class
super(CDlgSubida, self).__init__(f_parent)
# socket de comunicação
self.__sck_http = fsck_http
assert self.__sck_http
# dicionário de configuração
self.__dct_config = fdct_config<|fim▁hole|> self.__dct_sub = fdct_sub
assert self.__dct_sub is not None
# monta a dialog
self.setupUi(self)
# configura título da dialog
self.setWindowTitle(u"Procedimento de Subida")
# configurações de conexões slot/signal
self.__config_connects()
# configurações de títulos e mensagens da janela de edição
self.__config_texts()
# restaura as configurações da janela de edição
self.__restore_settings()
# dicionário de subidas vazio ?
if not self.__dct_sub:
# carrega o dicionário
self.__load_sub()
# inicia valores
self.cbx_sub.addItems(sorted(self.__dct_sub.values()))
# configura botões
self.bbx_subida.button(QtGui.QDialogButtonBox.Cancel).setText("&Cancela")
self.bbx_subida.button(QtGui.QDialogButtonBox.Ok).setFocus()
# inicia os parâmetros da subida
self.__update_command()
# ---------------------------------------------------------------------------------------------
def __config_connects(self):
"""
configura as conexões slot/signal
"""
# conecta spinBox
self.cbx_sub.currentIndexChanged.connect(self.__on_cbx_currentIndexChanged)
# ---------------------------------------------------------------------------------------------
def __config_texts(self):
"""
DOCUMENT ME!
"""
# configura títulos e mensagens
self.__txt_settings = "CDlgSubida"
# ---------------------------------------------------------------------------------------------
def get_data(self):
"""
DOCUMENT ME!
"""
# return command line
return self.lbl_comando.text()
# ---------------------------------------------------------------------------------------------
def __load_sub(self):
"""
carrega o dicionário de subidas
"""
# check for requirements
assert self.__sck_http is not None
assert self.__dct_config is not None
assert self.__dct_sub is not None
# monta o request das subidas
ls_req = "data/sub.json"
# get server address
l_srv = self.__dct_config.get("srv.addr", None)
if l_srv is not None:
# obtém os dados de subidas do servidor
l_dict = self.__sck_http.get_data(l_srv, ls_req)
if l_dict is not None:
# coloca a subidas no dicionário
self.__dct_sub.update(json.loads(l_dict))
# senão, não achou no servidor...
else:
# logger
l_log = logging.getLogger("CDlgSubida::__load_sub")
l_log.setLevel(logging.ERROR)
l_log.error(u"<E01: tabela de subidas não existe no servidor.")
# senão, não achou endereço do servidor
else:
# logger
l_log = logging.getLogger("CDlgSubida::__load_sub")
l_log.setLevel(logging.WARNING)
l_log.warning(u"<E02: srv.addr não existe na configuração.")
# ---------------------------------------------------------------------------------------------
def __restore_settings(self):
"""
restaura as configurações salvas para esta janela
"""
# obtém os settings
l_set = QtCore.QSettings("sophosoft", "piloto")
assert l_set
# restaura geometria da janela
self.restoreGeometry(l_set.value("%s/Geometry" % (self.__txt_settings)).toByteArray())
# ---------------------------------------------------------------------------------------------
def __update_command(self):
"""
DOCUMENT ME!
"""
# para todas as subidas...
for l_key, l_sub in self.__dct_sub.iteritems():
# é a subida selecionada ?
if self.cbx_sub.currentText() == l_sub:
break
# inicia o comando
ls_cmd = "SUB {}".format(l_key)
# coloca o comando no label
self.lbl_comando.setText(ls_cmd)
# =============================================================================================
# edição de campos
# =============================================================================================
# ---------------------------------------------------------------------------------------------
@QtCore.pyqtSignature("int")
def __on_cbx_currentIndexChanged(self, f_val):
"""
DOCUMENT ME!
"""
# atualiza comando
self.__update_command()
# < the end >--------------------------------------------------------------------------------------<|fim▁end|> | assert self.__dct_config is not None
# dicionário de subidas |
<|file_name|>remote_access_api.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Swaggy Jenkins
Jenkins API clients generated from Swagger / Open API specification # noqa: E501
The version of the OpenAPI document: 1.1.2-pre.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from openapi_client.api_client import ApiClient
from openapi_client.api.remote_access_api_endpoints.get_computer import GetComputer
from openapi_client.api.remote_access_api_endpoints.get_jenkins import GetJenkins
from openapi_client.api.remote_access_api_endpoints.get_job import GetJob
from openapi_client.api.remote_access_api_endpoints.get_job_config import GetJobConfig
from openapi_client.api.remote_access_api_endpoints.get_job_last_build import GetJobLastBuild
from openapi_client.api.remote_access_api_endpoints.get_job_progressive_text import GetJobProgressiveText
from openapi_client.api.remote_access_api_endpoints.get_queue import GetQueue
from openapi_client.api.remote_access_api_endpoints.get_queue_item import GetQueueItem
from openapi_client.api.remote_access_api_endpoints.get_view import GetView
from openapi_client.api.remote_access_api_endpoints.get_view_config import GetViewConfig
from openapi_client.api.remote_access_api_endpoints.head_jenkins import HeadJenkins
from openapi_client.api.remote_access_api_endpoints.post_create_item import PostCreateItem
from openapi_client.api.remote_access_api_endpoints.post_create_view import PostCreateView
from openapi_client.api.remote_access_api_endpoints.post_job_build import PostJobBuild
from openapi_client.api.remote_access_api_endpoints.post_job_config import PostJobConfig
from openapi_client.api.remote_access_api_endpoints.post_job_delete import PostJobDelete
from openapi_client.api.remote_access_api_endpoints.post_job_disable import PostJobDisable
from openapi_client.api.remote_access_api_endpoints.post_job_enable import PostJobEnable
from openapi_client.api.remote_access_api_endpoints.post_job_last_build_stop import PostJobLastBuildStop
from openapi_client.api.remote_access_api_endpoints.post_view_config import PostViewConfig
class RemoteAccessApi(
GetComputer,
GetJenkins,
GetJob,
GetJobConfig,
GetJobLastBuild,
GetJobProgressiveText,
GetQueue,
GetQueueItem,
GetView,
GetViewConfig,
HeadJenkins,
PostCreateItem,
PostCreateView,
PostJobBuild,
PostJobConfig,
PostJobDelete,
PostJobDisable,
PostJobEnable,
PostJobLastBuildStop,
PostViewConfig,
ApiClient,
):
"""NOTE: This class is auto generated by OpenAPI Generator<|fim▁hole|>
Do not edit the class manually.
"""
pass<|fim▁end|> | Ref: https://openapi-generator.tech |
<|file_name|>switch.py<|end_file_name|><|fim▁begin|>import datetime
day = datetime.datetime.now().weekday()
def get_sunday():
return "Today it's Sunday"
def get_monday():
return "Today it's Monday"
def get_tuesday():
return "Today it's Tuesday"
def get_wednesday():
return "Today it's Wednesday"
def get_thursday():
return "Today it's Thursday"
def get_friday():
return "Today it's Friday"
def get_saturday():
return "Today it's Saturday"
def get_default():
return "Looking forward to the Weekend"<|fim▁hole|>
switcher = {
0:get_sunday,
1:get_monday,
2:get_tuesday,
3:get_wednesday,
4:get_thursday,
5:get_friday,
6:get_default
}
dayName = switcher.get(day,get_default)()
print(dayName)<|fim▁end|> | |
<|file_name|>sunf95.py<|end_file_name|><|fim▁begin|>"""SCons.Tool.sunf95
Tool-specific initialization for sunf95, the Sun Studio F95 compiler.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/sunf95.py 4720 2010/03/24 03:14:11 jars"
import SCons.Util
from FortranCommon import add_all_to_env
compilers = ['sunf95', 'f95']
def generate(env):
"""Add Builders and construction variables for sunf95 to an
Environment."""
add_all_to_env(env)
fcomp = env.Detect(compilers) or 'f95'
env['FORTRAN'] = fcomp
env['F95'] = fcomp
env['SHFORTRAN'] = '$FORTRAN'
env['SHF95'] = '$F95'
env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS -KPIC')<|fim▁hole|> env['SHF95FLAGS'] = SCons.Util.CLVar('$F95FLAGS -KPIC')
def exists(env):
return env.Detect(compilers)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:<|fim▁end|> | |
<|file_name|>snippet_parser.rs<|end_file_name|><|fim▁begin|>use std::error::Error;
use structs::*;
use filesystem::read_file_to_json;
use std::fs::read_dir;
use filesystem::read_file_to_string;
use std::fs::DirEntry;
pub fn get_all_snippets() -> Result<Vec<Snippet>, Box<Error>> {
let mut all_snippets = Vec::new();
let snippets_path = "./snippets/";
let snippets_dirs = read_dir(snippets_path).unwrap();
for snippet_folder in snippets_dirs {
let uw = snippet_folder?;
if uw.file_type().expect("failed to get folder type").is_dir() {
let snippet = parse_snippet(&uw);
all_snippets.push(snippet);
}
}
Ok(all_snippets)
}
fn parse_snippet(snippet_folder: &DirEntry) -> Snippet {<|fim▁hole|> let folder_relative_path = uw.path().display().to_string();
let folder_name = uw.file_name()
.to_str()
.expect("failed to get snippet folder name")
.to_string();
let info_path = format!("{}/info.json", folder_relative_path);
let content_path = format!("{}/content.md", folder_relative_path);
let info = read_file_to_json(&info_path);
let content = read_file_to_string(&content_path);
let ssnippet = Snippet {
title: info["title"]
.as_str()
.expect("failed to parse title")
.to_string(),
crates: info["crates"]
.as_array()
.expect("failed to parse crates")
.into_iter()
.map(|x| x.as_str().expect("failed to parse crates").to_string())
.collect(),
tags: info["tags"]
.as_array()
.expect("failed to parse tags")
.into_iter()
.map(|x| x.as_str().expect("failed to parse tags").to_string())
.collect(),
content: content,
link: folder_name,
};
println!("parsed: {}", folder_relative_path);
ssnippet
}<|fim▁end|> | let uw = snippet_folder; |
<|file_name|>groupe.js<|end_file_name|><|fim▁begin|>System.register(["./departementsiglename"], function (_export) {
var DepartementSigleNameItem, _createClass, _get, _inherits, _classCallCheck, Groupe;
return {
setters: [function (_departementsiglename) {
DepartementSigleNameItem = _departementsiglename.DepartementSigleNameItem;
}],
execute: function () {
"use strict";
_createClass = (function () { function defineProperties(target, props) { for (var key in props) { var prop = props[key]; prop.configurable = true; if (prop.value) prop.writable = true; } Object.defineProperties(target, props); } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
_get = function get(object, property, receiver) { var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { return get(parent, property, receiver); } } else if ("value" in desc && desc.writable) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } };
_inherits = function (subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) subClass.__proto__ = superClass; };
_classCallCheck = function (instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } };
//
Groupe = _export("Groupe", (function (_DepartementSigleNameItem) {
function Groupe(oMap) {
_classCallCheck(this, Groupe);
_get(Object.getPrototypeOf(Groupe.prototype), "constructor", this).call(this, oMap);
}
_inherits(Groupe, _DepartementSigleNameItem);
_createClass(Groupe, {
type: {
get: function () {
return "groupe";
}
},
collection_name: {
get: function () {
return "groupes";
}<|fim▁hole|>
return Groupe;
})(DepartementSigleNameItem));
}
};
});
// groupe.js
//
// class Groupe
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbImRhdGEvZG9tYWluL2dyb3VwZS5qcyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiO1FBR0ksd0JBQXdCLGtEQUlmLE1BQU07Ozs7QUFKZixvQ0FBd0IseUJBQXhCLHdCQUF3Qjs7Ozs7Ozs7Ozs7Ozs7QUFJZixrQkFBTTtBQUNKLHlCQURGLE1BQU0sQ0FDSCxJQUFJLEVBQUU7MENBRFQsTUFBTTs7QUFFWCwrQ0FGSyxNQUFNLDZDQUVMLElBQUksRUFBRTtpQkFDZjs7MEJBSFEsTUFBTTs7NkJBQU4sTUFBTTtBQUlYLHdCQUFJOzZCQUFBLFlBQUc7QUFDUCxtQ0FBTyxRQUFRLENBQUM7eUJBQ25COztBQUNHLG1DQUFlOzZCQUFBLFlBQUc7QUFDbEIsbUNBQU8sU0FBUyxDQUFDO3lCQUNwQjs7Ozt1QkFUUSxNQUFNO2VBQVMsd0JBQXdCIiwiZmlsZSI6ImRhdGEvZG9tYWluL2dyb3VwZS5qcyIsInNvdXJjZVJvb3QiOiIvc3JjLyJ9<|fim▁end|> | }
}); |
<|file_name|>test_vec_env.py<|end_file_name|><|fim▁begin|>from slm_lab.env.vec_env import make_gym_venv
import numpy as np
import pytest
@pytest.mark.parametrize('name,state_shape,reward_scale', [
('PongNoFrameskip-v4', (1, 84, 84), 'sign'),
('LunarLander-v2', (8,), None),
('CartPole-v0', (4,), None),
])
@pytest.mark.parametrize('num_envs', (1, 4))
def test_make_gym_venv_nostack(name, num_envs, state_shape, reward_scale):
seed = 0
frame_op = None
frame_op_len = None
venv = make_gym_venv(name, num_envs, seed, frame_op=frame_op, frame_op_len=frame_op_len, reward_scale=reward_scale)
venv.reset()
for i in range(5):
state, reward, done, info = venv.step([venv.action_space.sample()] * num_envs)
assert isinstance(state, np.ndarray)
assert state.shape == (num_envs,) + state_shape
assert isinstance(reward, np.ndarray)
assert reward.shape == (num_envs,)
assert isinstance(done, np.ndarray)
assert done.shape == (num_envs,)
assert len(info) == num_envs
venv.close()
@pytest.mark.parametrize('name,state_shape, reward_scale', [
('PongNoFrameskip-v4', (1, 84, 84), 'sign'),
('LunarLander-v2', (8,), None),
('CartPole-v0', (4,), None),
])
@pytest.mark.parametrize('num_envs', (1, 4))
def test_make_gym_concat(name, num_envs, state_shape, reward_scale):
seed = 0
frame_op = 'concat' # used for image, or for concat vector
frame_op_len = 4
venv = make_gym_venv(name, num_envs, seed, frame_op=frame_op, frame_op_len=frame_op_len, reward_scale=reward_scale)
venv.reset()
for i in range(5):
state, reward, done, info = venv.step([venv.action_space.sample()] * num_envs)
assert isinstance(state, np.ndarray)
stack_shape = (num_envs, frame_op_len * state_shape[0],) + state_shape[1:]
assert state.shape == stack_shape
assert isinstance(reward, np.ndarray)
assert reward.shape == (num_envs,)
assert isinstance(done, np.ndarray)
assert done.shape == (num_envs,)
assert len(info) == num_envs
venv.close()
@pytest.mark.skip(reason='Not implemented yet')
@pytest.mark.parametrize('name,state_shape,reward_scale', [
('LunarLander-v2', (8,), None),
('CartPole-v0', (4,), None),
])
@pytest.mark.parametrize('num_envs', (1, 4))
def test_make_gym_stack(name, num_envs, state_shape, reward_scale):
seed = 0
frame_op = 'stack' # used for rnn
frame_op_len = 4
venv = make_gym_venv(name, num_envs, seed, frame_op=frame_op, frame_op_len=frame_op_len, reward_scale=reward_scale)
venv.reset()
for i in range(5):
state, reward, done, info = venv.step([venv.action_space.sample()] * num_envs)
assert isinstance(state, np.ndarray)
stack_shape = (num_envs, frame_op_len,) + state_shape
assert state.shape == stack_shape
assert isinstance(reward, np.ndarray)<|fim▁hole|> assert reward.shape == (num_envs,)
assert isinstance(done, np.ndarray)
assert done.shape == (num_envs,)
assert len(info) == num_envs
venv.close()
@pytest.mark.parametrize('name,state_shape,image_downsize', [
('PongNoFrameskip-v4', (1, 84, 84), (84, 84)),
('PongNoFrameskip-v4', (1, 64, 64), (64, 64)),
])
@pytest.mark.parametrize('num_envs', (1, 4))
def test_make_gym_venv_downsize(name, num_envs, state_shape, image_downsize):
seed = 0
frame_op = None
frame_op_len = None
venv = make_gym_venv(name, num_envs, seed, frame_op=frame_op, frame_op_len=frame_op_len, image_downsize=image_downsize)
venv.reset()
for i in range(5):
state, reward, done, info = venv.step([venv.action_space.sample()] * num_envs)
assert isinstance(state, np.ndarray)
assert state.shape == (num_envs,) + state_shape
assert isinstance(reward, np.ndarray)
assert reward.shape == (num_envs,)
assert isinstance(done, np.ndarray)
assert done.shape == (num_envs,)
assert len(info) == num_envs
venv.close()<|fim▁end|> | |
<|file_name|>recognize-page-type.js<|end_file_name|><|fim▁begin|>/**
* Created by leow on 2/12/17.
*/
"use strict";
// Stdlib<|fim▁hole|>const NON_FATAL_INCONSISTENT = "Inconsistent state - retry!"
const FATAL_CONTENT = "Page does not exist!"
const FATAL_UNKNOWN = "Unknown Error!"
// Libs
const cheerio = require('cheerio')
const tableParser = require('cheerio-tableparser')
function recognize_page_type(page_parsed) {
// console.error("TYPE: " + typeof(page_parsed))
// DEBUG:
// console.error("TITLE IS " + pageParsed('head title').text())
// If title is "PublicViewStatus"; is OK; otherwise ERROR out!!
if (page_parsed('head title').text() == "PublicViewStatus") {
// Aduan Information
// id="dlAduan"
// DEBUG:
// console.error("ADUAN: " + pageParsed('#Table9'))
/*
console.error("====== ADUAN_DATA: =====\n" + util.inspect(
pageParsed('#Table9').parsetable(false, false, true).reduce(
(p, n) => n.map((item, i) => [...(p[i] || []), n[i]]), []
)))
*/
/* NO NEED TRANSPOSE!! :P
const aduanData = pageParsed('#Table9').parsetable(false, false, true).reduce(
(p, n) => n.map((item, i) => [...(p[i] || []), n[i]]), []
)
*/
const aduan_data = page_parsed('#Table9').parsetable(false, false, true)
// DEBUG:
/*
console.error("SIZE: " + aduan_data.length)
aduanData[0].every((element, index, array) => {
console.error("EL: " + util.inspect(element) + " ID: " + index )
})
*/
// Choose the column number; then we can get out the key/value
// aduanData[0] for the label
// aduanData[1] for the value
// DEBUG:
/*
aduan_data[1].forEach((element, index) => {
console.error('a[' + index + '] = ' + element)
})
*/
// console.error("ADUANID: " + aduan_data[1][0])
// Tindakan Table
// id="dsTindakan"
// DEBUG:
// console.error("TINDAKAN: " + pageParsed('#dsTindakan'))
// References:
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array
// Transpose assumes matrix (same size both end); not suitable
// const transpose = a => a.map((_, c) => a.map(r => r[c]))
// Last solution by @tatomyr works!!
const tindakan_data = page_parsed('#dsTindakan').parsetable(false, false, true).reduce(
(p, n) => n.map((item, i) => [...(p[i] || []), n[i]]), []
)
// DEBUG:
/*
console.error("TINDAKAN_DATA:" + util.inspect(tindakan_data))
console.error("TINDAKAN_LENGTH: " + tindakan_data.length)
*/
if (tindakan_data.length == 1) {
return {
"page_type": "empty",
"aduan_id": aduan_data[1][0]
}
} else {
return {
"page_type": "good",
"aduan_id": aduan_data[1][0]
}
}
} else {
return {
"page_type": "error"
}
}
// Should not get here .. is bad!
return {
"page_type": "unknown"
}
}
function extract_table(loaded_raw_content) {
if (loaded_raw_content === null || loaded_raw_content === undefined) {
return {
"error": FATAL_CONTENT
}
}
const page_parsed = cheerio.load(loaded_raw_content)
// Setup cheerio-tableparser
tableParser(page_parsed)
// Extract out page type and other goodies?
const res = recognize_page_type(page_parsed)
if (res.page_type == "error") {
// Assumes Error Page; but it is loaded correctly ..
return {
"error": null,
"type": "error"
}
} else if (res.page_type == "unknown") {
return {
"error": FATAL_UNKNOWN,
"type": "error"
}
}
// Type: "good" or "empty"
return {
"error": null,
"type": res.page_type,
"aduan_id": res.aduan_id
}
}
module.exports = {
execute: extract_table
}<|fim▁end|> | const util = require("util")
// Constants |
<|file_name|>grouping.js<|end_file_name|><|fim▁begin|>import Collection from './collection';
import extend from '../utils/extend';
export default function Grouping(key, elements) {
this.key = key;
this.elements = elements;
Collection.call(this, elements);
}
extend(Grouping, Collection, {<|fim▁hole|> * Gets the number of elements in the Grouping.
* @returns {Number}
*/
count: function () {
return this.elements.length;
},
/**
* Creates an array from the Grouping.
* @returns {Array}
*/
toArray: function () {
return this.elements;
},
toString: function () {
return '[Grouping]';
}
});<|fim▁end|> | /** |
<|file_name|>open_directory.py<|end_file_name|><|fim▁begin|># coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
from getpass import getuser
import ctypes
from ctypes.util import find_library
from ctypes import c_void_p, c_uint32, POINTER, c_bool, byref
from .core_foundation import CoreFoundation, unicode_to_cfstring, cfstring_to_unicode
from .._types import str_cls, type_name
od_path = find_library('OpenDirectory')
OpenDirectory = ctypes.CDLL(od_path, use_errno=True)
ODAttributeType = CoreFoundation.CFStringRef
ODMatchType = c_uint32
ODRecordType = CoreFoundation.CFStringRef
ODSessionRef = c_void_p
ODNodeRef = c_void_p
ODQueryRef = c_void_p
ODRecordRef = c_void_p
OpenDirectory.ODSessionCreate.argtypes = [
CoreFoundation.CFAllocatorRef,
CoreFoundation.CFDictionaryRef,
POINTER(CoreFoundation.CFErrorRef)
]
OpenDirectory.ODSessionCreate.restype = ODSessionRef
OpenDirectory.ODNodeCreateWithName.argtypes = [
CoreFoundation.CFAllocatorRef,
ODSessionRef,
CoreFoundation.CFStringRef,
POINTER(CoreFoundation.CFErrorRef)
]
OpenDirectory.ODNodeCreateWithName.restype = ODNodeRef
OpenDirectory.ODQueryCreateWithNode.argtypes = [
CoreFoundation.CFAllocatorRef,
ODNodeRef,
CoreFoundation.CFTypeRef,
ODAttributeType,
ODMatchType,
CoreFoundation.CFTypeRef,
CoreFoundation.CFTypeRef,
CoreFoundation.CFIndex,
POINTER(CoreFoundation.CFErrorRef)
]
OpenDirectory.ODQueryCreateWithNode.restype = ODQueryRef
OpenDirectory.ODQueryCopyResults.argtypes = [
ODQueryRef,
c_bool,
POINTER(CoreFoundation.CFErrorRef)
]
OpenDirectory.ODQueryCopyResults.restype = CoreFoundation.CFArrayRef
OpenDirectory.ODRecordCopyValues.argtypes = [
ODRecordRef,
ODAttributeType,
POINTER(CoreFoundation.CFErrorRef)
]
OpenDirectory.ODRecordCopyValues.restype = CoreFoundation.CFArrayRef
kODMatchEqualTo = ODMatchType(0x2001)
kODRecordTypeUsers = ODRecordType.in_dll(OpenDirectory, 'kODRecordTypeUsers')
kODAttributeTypeRecordName = ODAttributeType.in_dll(OpenDirectory, 'kODAttributeTypeRecordName')
kODAttributeTypeUserShell = ODAttributeType.in_dll(OpenDirectory, 'kODAttributeTypeUserShell')
_login_shells = {}
def get_user_login_shell(username=None):
"""
Uses OS X's OpenDirectory.framework to get the user's login shell
:param username:
A unicode string of the user to get the shell for - None for the
current user
:return:
A unicode string of the user's login shell
"""
if username is None:
username = getuser()
if not isinstance(username, str_cls):
username = username.decode('utf-8')
if not isinstance(username, str_cls):
raise TypeError('username must be a unicode string, not %s' % type_name(username))
if username not in _login_shells:
error_ref = CoreFoundation.CFErrorRef()
session = OpenDirectory.ODSessionCreate(
CoreFoundation.kCFAllocatorDefault,
None,
byref(error_ref)
)
if bool(error_ref):
raise OSError('Error!')
node = OpenDirectory.ODNodeCreateWithName(
CoreFoundation.kCFAllocatorDefault,
session,
unicode_to_cfstring("/Local/Default"),
byref(error_ref)
)
if bool(error_ref):
raise OSError('Error!')
query = OpenDirectory.ODQueryCreateWithNode(
CoreFoundation.kCFAllocatorDefault,
node,
kODRecordTypeUsers,
kODAttributeTypeRecordName,
kODMatchEqualTo,
unicode_to_cfstring(username),
kODAttributeTypeUserShell,
1,
byref(error_ref)
)
if bool(error_ref):
raise OSError('Error!')
results = OpenDirectory.ODQueryCopyResults(
query,
False,
byref(error_ref)
)
if bool(error_ref):
raise OSError('Error!')
login_shell = None
num_results = CoreFoundation.CFArrayGetCount(results)
if num_results == 1:
od_record = CoreFoundation.CFArrayGetValueAtIndex(results, 0)
attributes = OpenDirectory.ODRecordCopyValues(od_record, kODAttributeTypeUserShell, byref(error_ref))
if bool(error_ref):
raise OSError('Error!')
num_attributes = CoreFoundation.CFArrayGetCount(results)
if num_attributes == 1:<|fim▁hole|> string_ref = CoreFoundation.CFArrayGetValueAtIndex(attributes, 0)
login_shell = cfstring_to_unicode(string_ref)
_login_shells[username] = login_shell
return _login_shells.get(username)<|fim▁end|> | |
<|file_name|>ServerlessApplicationRepositoryClient.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <aws/core/utils/Outcome.h>
#include <aws/core/auth/AWSAuthSigner.h>
#include <aws/core/client/CoreErrors.h>
#include <aws/core/client/RetryStrategy.h>
#include <aws/core/http/HttpClient.h>
#include <aws/core/http/HttpResponse.h>
#include <aws/core/http/HttpClientFactory.h>
#include <aws/core/auth/AWSCredentialsProviderChain.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <aws/core/utils/memory/stl/AWSStringStream.h>
#include <aws/core/utils/threading/Executor.h>
#include <aws/core/utils/DNS.h>
#include <aws/core/utils/logging/LogMacros.h>
#include <aws/serverlessrepo/ServerlessApplicationRepositoryClient.h>
#include <aws/serverlessrepo/ServerlessApplicationRepositoryEndpoint.h>
#include <aws/serverlessrepo/ServerlessApplicationRepositoryErrorMarshaller.h>
#include <aws/serverlessrepo/model/CreateApplicationRequest.h>
#include <aws/serverlessrepo/model/CreateApplicationVersionRequest.h>
#include <aws/serverlessrepo/model/CreateCloudFormationChangeSetRequest.h>
#include <aws/serverlessrepo/model/CreateCloudFormationTemplateRequest.h>
#include <aws/serverlessrepo/model/DeleteApplicationRequest.h>
#include <aws/serverlessrepo/model/GetApplicationRequest.h>
#include <aws/serverlessrepo/model/GetApplicationPolicyRequest.h>
#include <aws/serverlessrepo/model/GetCloudFormationTemplateRequest.h>
#include <aws/serverlessrepo/model/ListApplicationDependenciesRequest.h>
#include <aws/serverlessrepo/model/ListApplicationVersionsRequest.h>
#include <aws/serverlessrepo/model/ListApplicationsRequest.h>
#include <aws/serverlessrepo/model/PutApplicationPolicyRequest.h>
#include <aws/serverlessrepo/model/UnshareApplicationRequest.h>
#include <aws/serverlessrepo/model/UpdateApplicationRequest.h>
using namespace Aws;
using namespace Aws::Auth;
using namespace Aws::Client;
using namespace Aws::ServerlessApplicationRepository;
using namespace Aws::ServerlessApplicationRepository::Model;
using namespace Aws::Http;
using namespace Aws::Utils::Json;
static const char* SERVICE_NAME = "serverlessrepo";
static const char* ALLOCATION_TAG = "ServerlessApplicationRepositoryClient";
ServerlessApplicationRepositoryClient::ServerlessApplicationRepositoryClient(const Client::ClientConfiguration& clientConfiguration) :
BASECLASS(clientConfiguration,
Aws::MakeShared<AWSAuthV4Signer>(ALLOCATION_TAG, Aws::MakeShared<DefaultAWSCredentialsProviderChain>(ALLOCATION_TAG),
SERVICE_NAME, clientConfiguration.region),
Aws::MakeShared<ServerlessApplicationRepositoryErrorMarshaller>(ALLOCATION_TAG)),
m_executor(clientConfiguration.executor)
{
init(clientConfiguration);
}
ServerlessApplicationRepositoryClient::ServerlessApplicationRepositoryClient(const AWSCredentials& credentials, const Client::ClientConfiguration& clientConfiguration) :
BASECLASS(clientConfiguration,
Aws::MakeShared<AWSAuthV4Signer>(ALLOCATION_TAG, Aws::MakeShared<SimpleAWSCredentialsProvider>(ALLOCATION_TAG, credentials),
SERVICE_NAME, clientConfiguration.region),
Aws::MakeShared<ServerlessApplicationRepositoryErrorMarshaller>(ALLOCATION_TAG)),
m_executor(clientConfiguration.executor)
{
init(clientConfiguration);
}
ServerlessApplicationRepositoryClient::ServerlessApplicationRepositoryClient(const std::shared_ptr<AWSCredentialsProvider>& credentialsProvider,
const Client::ClientConfiguration& clientConfiguration) :
BASECLASS(clientConfiguration,
Aws::MakeShared<AWSAuthV4Signer>(ALLOCATION_TAG, credentialsProvider,
SERVICE_NAME, clientConfiguration.region),
Aws::MakeShared<ServerlessApplicationRepositoryErrorMarshaller>(ALLOCATION_TAG)),
m_executor(clientConfiguration.executor)
{
init(clientConfiguration);
}
ServerlessApplicationRepositoryClient::~ServerlessApplicationRepositoryClient()
{
}
void ServerlessApplicationRepositoryClient::init(const ClientConfiguration& config)
{
m_configScheme = SchemeMapper::ToString(config.scheme);
if (config.endpointOverride.empty())
{
m_uri = m_configScheme + "://" + ServerlessApplicationRepositoryEndpoint::ForRegion(config.region, config.useDualStack);
}
else
{
OverrideEndpoint(config.endpointOverride);
}
}
void ServerlessApplicationRepositoryClient::OverrideEndpoint(const Aws::String& endpoint)
{
if (endpoint.compare(0, 7, "http://") == 0 || endpoint.compare(0, 8, "https://") == 0)
{
m_uri = endpoint;
}
else
{
m_uri = m_configScheme + "://" + endpoint;
}
}
CreateApplicationOutcome ServerlessApplicationRepositoryClient::CreateApplication(const CreateApplicationRequest& request) const
{
Aws::Http::URI uri = m_uri;
Aws::StringStream ss;
ss << "/applications";
uri.SetPath(uri.GetPath() + ss.str());
JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_POST, Aws::Auth::SIGV4_SIGNER);
if(outcome.IsSuccess())
{
return CreateApplicationOutcome(CreateApplicationResult(outcome.GetResult()));
}
else
{
return CreateApplicationOutcome(outcome.GetError());
}
}
CreateApplicationOutcomeCallable ServerlessApplicationRepositoryClient::CreateApplicationCallable(const CreateApplicationRequest& request) const
{
auto task = Aws::MakeShared< std::packaged_task< CreateApplicationOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->CreateApplication(request); } );
auto packagedFunction = [task]() { (*task)(); };
m_executor->Submit(packagedFunction);
return task->get_future();
}
void ServerlessApplicationRepositoryClient::CreateApplicationAsync(const CreateApplicationRequest& request, const CreateApplicationResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
m_executor->Submit( [this, request, handler, context](){ this->CreateApplicationAsyncHelper( request, handler, context ); } );
}
void ServerlessApplicationRepositoryClient::CreateApplicationAsyncHelper(const CreateApplicationRequest& request, const CreateApplicationResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
handler(this, request, CreateApplication(request), context);
}
CreateApplicationVersionOutcome ServerlessApplicationRepositoryClient::CreateApplicationVersion(const CreateApplicationVersionRequest& request) const
{
if (!request.ApplicationIdHasBeenSet())
{
AWS_LOGSTREAM_ERROR("CreateApplicationVersion", "Required field: ApplicationId, is not set");
return CreateApplicationVersionOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false));
}
if (!request.SemanticVersionHasBeenSet())
{
AWS_LOGSTREAM_ERROR("CreateApplicationVersion", "Required field: SemanticVersion, is not set");
return CreateApplicationVersionOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [SemanticVersion]", false));
}
Aws::Http::URI uri = m_uri;
Aws::StringStream ss;
ss << "/applications/";
ss << request.GetApplicationId();
ss << "/versions/";
ss << request.GetSemanticVersion();
uri.SetPath(uri.GetPath() + ss.str());
JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_PUT, Aws::Auth::SIGV4_SIGNER);
if(outcome.IsSuccess())
{
return CreateApplicationVersionOutcome(CreateApplicationVersionResult(outcome.GetResult()));
}
else
{
return CreateApplicationVersionOutcome(outcome.GetError());
}
}
CreateApplicationVersionOutcomeCallable ServerlessApplicationRepositoryClient::CreateApplicationVersionCallable(const CreateApplicationVersionRequest& request) const
{
auto task = Aws::MakeShared< std::packaged_task< CreateApplicationVersionOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->CreateApplicationVersion(request); } );
auto packagedFunction = [task]() { (*task)(); };
m_executor->Submit(packagedFunction);
return task->get_future();
}
void ServerlessApplicationRepositoryClient::CreateApplicationVersionAsync(const CreateApplicationVersionRequest& request, const CreateApplicationVersionResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
m_executor->Submit( [this, request, handler, context](){ this->CreateApplicationVersionAsyncHelper( request, handler, context ); } );
}
void ServerlessApplicationRepositoryClient::CreateApplicationVersionAsyncHelper(const CreateApplicationVersionRequest& request, const CreateApplicationVersionResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
handler(this, request, CreateApplicationVersion(request), context);
}
CreateCloudFormationChangeSetOutcome ServerlessApplicationRepositoryClient::CreateCloudFormationChangeSet(const CreateCloudFormationChangeSetRequest& request) const
{
if (!request.ApplicationIdHasBeenSet())
{
AWS_LOGSTREAM_ERROR("CreateCloudFormationChangeSet", "Required field: ApplicationId, is not set");
return CreateCloudFormationChangeSetOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false));
}
Aws::Http::URI uri = m_uri;
Aws::StringStream ss;
ss << "/applications/";
ss << request.GetApplicationId();
ss << "/changesets";
uri.SetPath(uri.GetPath() + ss.str());
JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_POST, Aws::Auth::SIGV4_SIGNER);
if(outcome.IsSuccess())
{
return CreateCloudFormationChangeSetOutcome(CreateCloudFormationChangeSetResult(outcome.GetResult()));
}
else
{
return CreateCloudFormationChangeSetOutcome(outcome.GetError());
}
}
CreateCloudFormationChangeSetOutcomeCallable ServerlessApplicationRepositoryClient::CreateCloudFormationChangeSetCallable(const CreateCloudFormationChangeSetRequest& request) const
{
auto task = Aws::MakeShared< std::packaged_task< CreateCloudFormationChangeSetOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->CreateCloudFormationChangeSet(request); } );
auto packagedFunction = [task]() { (*task)(); };
m_executor->Submit(packagedFunction);
return task->get_future();
}
void ServerlessApplicationRepositoryClient::CreateCloudFormationChangeSetAsync(const CreateCloudFormationChangeSetRequest& request, const CreateCloudFormationChangeSetResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
m_executor->Submit( [this, request, handler, context](){ this->CreateCloudFormationChangeSetAsyncHelper( request, handler, context ); } );
}
void ServerlessApplicationRepositoryClient::CreateCloudFormationChangeSetAsyncHelper(const CreateCloudFormationChangeSetRequest& request, const CreateCloudFormationChangeSetResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
handler(this, request, CreateCloudFormationChangeSet(request), context);
}
CreateCloudFormationTemplateOutcome ServerlessApplicationRepositoryClient::CreateCloudFormationTemplate(const CreateCloudFormationTemplateRequest& request) const
{
if (!request.ApplicationIdHasBeenSet())
{
AWS_LOGSTREAM_ERROR("CreateCloudFormationTemplate", "Required field: ApplicationId, is not set");
return CreateCloudFormationTemplateOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false));
}
Aws::Http::URI uri = m_uri;
Aws::StringStream ss;
ss << "/applications/";
ss << request.GetApplicationId();
ss << "/templates";
uri.SetPath(uri.GetPath() + ss.str());
JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_POST, Aws::Auth::SIGV4_SIGNER);
if(outcome.IsSuccess())
{
return CreateCloudFormationTemplateOutcome(CreateCloudFormationTemplateResult(outcome.GetResult()));
}
else
{
return CreateCloudFormationTemplateOutcome(outcome.GetError());
}
}
CreateCloudFormationTemplateOutcomeCallable ServerlessApplicationRepositoryClient::CreateCloudFormationTemplateCallable(const CreateCloudFormationTemplateRequest& request) const
{
auto task = Aws::MakeShared< std::packaged_task< CreateCloudFormationTemplateOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->CreateCloudFormationTemplate(request); } );
auto packagedFunction = [task]() { (*task)(); };
m_executor->Submit(packagedFunction);
return task->get_future();
}
void ServerlessApplicationRepositoryClient::CreateCloudFormationTemplateAsync(const CreateCloudFormationTemplateRequest& request, const CreateCloudFormationTemplateResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
m_executor->Submit( [this, request, handler, context](){ this->CreateCloudFormationTemplateAsyncHelper( request, handler, context ); } );
}
void ServerlessApplicationRepositoryClient::CreateCloudFormationTemplateAsyncHelper(const CreateCloudFormationTemplateRequest& request, const CreateCloudFormationTemplateResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
handler(this, request, CreateCloudFormationTemplate(request), context);
}
DeleteApplicationOutcome ServerlessApplicationRepositoryClient::DeleteApplication(const DeleteApplicationRequest& request) const
{
if (!request.ApplicationIdHasBeenSet())
{
AWS_LOGSTREAM_ERROR("DeleteApplication", "Required field: ApplicationId, is not set");
return DeleteApplicationOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false));
}
Aws::Http::URI uri = m_uri;
Aws::StringStream ss;
ss << "/applications/";
ss << request.GetApplicationId();
uri.SetPath(uri.GetPath() + ss.str());
JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_DELETE, Aws::Auth::SIGV4_SIGNER);
if(outcome.IsSuccess())
{
return DeleteApplicationOutcome(NoResult());
}
else
{
return DeleteApplicationOutcome(outcome.GetError());
}
}
DeleteApplicationOutcomeCallable ServerlessApplicationRepositoryClient::DeleteApplicationCallable(const DeleteApplicationRequest& request) const
{
auto task = Aws::MakeShared< std::packaged_task< DeleteApplicationOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->DeleteApplication(request); } );
auto packagedFunction = [task]() { (*task)(); };
m_executor->Submit(packagedFunction);
return task->get_future();
}
void ServerlessApplicationRepositoryClient::DeleteApplicationAsync(const DeleteApplicationRequest& request, const DeleteApplicationResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
m_executor->Submit( [this, request, handler, context](){ this->DeleteApplicationAsyncHelper( request, handler, context ); } );
}
void ServerlessApplicationRepositoryClient::DeleteApplicationAsyncHelper(const DeleteApplicationRequest& request, const DeleteApplicationResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
handler(this, request, DeleteApplication(request), context);
}
GetApplicationOutcome ServerlessApplicationRepositoryClient::GetApplication(const GetApplicationRequest& request) const
{
if (!request.ApplicationIdHasBeenSet())
{
AWS_LOGSTREAM_ERROR("GetApplication", "Required field: ApplicationId, is not set");
return GetApplicationOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false));
}
Aws::Http::URI uri = m_uri;
Aws::StringStream ss;
ss << "/applications/";
ss << request.GetApplicationId();
uri.SetPath(uri.GetPath() + ss.str());
JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_GET, Aws::Auth::SIGV4_SIGNER);
if(outcome.IsSuccess())
{
return GetApplicationOutcome(GetApplicationResult(outcome.GetResult()));
}
else
{
return GetApplicationOutcome(outcome.GetError());
}
}
GetApplicationOutcomeCallable ServerlessApplicationRepositoryClient::GetApplicationCallable(const GetApplicationRequest& request) const
{
auto task = Aws::MakeShared< std::packaged_task< GetApplicationOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->GetApplication(request); } );
auto packagedFunction = [task]() { (*task)(); };
m_executor->Submit(packagedFunction);
return task->get_future();
}
void ServerlessApplicationRepositoryClient::GetApplicationAsync(const GetApplicationRequest& request, const GetApplicationResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
m_executor->Submit( [this, request, handler, context](){ this->GetApplicationAsyncHelper( request, handler, context ); } );
}
void ServerlessApplicationRepositoryClient::GetApplicationAsyncHelper(const GetApplicationRequest& request, const GetApplicationResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
handler(this, request, GetApplication(request), context);
}
GetApplicationPolicyOutcome ServerlessApplicationRepositoryClient::GetApplicationPolicy(const GetApplicationPolicyRequest& request) const
{
if (!request.ApplicationIdHasBeenSet())
{
AWS_LOGSTREAM_ERROR("GetApplicationPolicy", "Required field: ApplicationId, is not set");
return GetApplicationPolicyOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false));
}
Aws::Http::URI uri = m_uri;
Aws::StringStream ss;
ss << "/applications/";
ss << request.GetApplicationId();
ss << "/policy";
uri.SetPath(uri.GetPath() + ss.str());
JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_GET, Aws::Auth::SIGV4_SIGNER);
if(outcome.IsSuccess())
{
return GetApplicationPolicyOutcome(GetApplicationPolicyResult(outcome.GetResult()));
}
else
{
return GetApplicationPolicyOutcome(outcome.GetError());
}
}
GetApplicationPolicyOutcomeCallable ServerlessApplicationRepositoryClient::GetApplicationPolicyCallable(const GetApplicationPolicyRequest& request) const
{
auto task = Aws::MakeShared< std::packaged_task< GetApplicationPolicyOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->GetApplicationPolicy(request); } );
auto packagedFunction = [task]() { (*task)(); };
m_executor->Submit(packagedFunction);
return task->get_future();
}
void ServerlessApplicationRepositoryClient::GetApplicationPolicyAsync(const GetApplicationPolicyRequest& request, const GetApplicationPolicyResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
m_executor->Submit( [this, request, handler, context](){ this->GetApplicationPolicyAsyncHelper( request, handler, context ); } );
}
void ServerlessApplicationRepositoryClient::GetApplicationPolicyAsyncHelper(const GetApplicationPolicyRequest& request, const GetApplicationPolicyResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
handler(this, request, GetApplicationPolicy(request), context);
}
GetCloudFormationTemplateOutcome ServerlessApplicationRepositoryClient::GetCloudFormationTemplate(const GetCloudFormationTemplateRequest& request) const
{
if (!request.ApplicationIdHasBeenSet())
{
AWS_LOGSTREAM_ERROR("GetCloudFormationTemplate", "Required field: ApplicationId, is not set");
return GetCloudFormationTemplateOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false));
}
if (!request.TemplateIdHasBeenSet())
{
AWS_LOGSTREAM_ERROR("GetCloudFormationTemplate", "Required field: TemplateId, is not set");
return GetCloudFormationTemplateOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [TemplateId]", false));
}
Aws::Http::URI uri = m_uri;
Aws::StringStream ss;
ss << "/applications/";
ss << request.GetApplicationId();
ss << "/templates/";
ss << request.GetTemplateId();
uri.SetPath(uri.GetPath() + ss.str());
JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_GET, Aws::Auth::SIGV4_SIGNER);
if(outcome.IsSuccess())
{
return GetCloudFormationTemplateOutcome(GetCloudFormationTemplateResult(outcome.GetResult()));
}
else
{
return GetCloudFormationTemplateOutcome(outcome.GetError());
}
}
GetCloudFormationTemplateOutcomeCallable ServerlessApplicationRepositoryClient::GetCloudFormationTemplateCallable(const GetCloudFormationTemplateRequest& request) const
{
auto task = Aws::MakeShared< std::packaged_task< GetCloudFormationTemplateOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->GetCloudFormationTemplate(request); } );
auto packagedFunction = [task]() { (*task)(); };
m_executor->Submit(packagedFunction);
return task->get_future();
}
void ServerlessApplicationRepositoryClient::GetCloudFormationTemplateAsync(const GetCloudFormationTemplateRequest& request, const GetCloudFormationTemplateResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
m_executor->Submit( [this, request, handler, context](){ this->GetCloudFormationTemplateAsyncHelper( request, handler, context ); } );
}
void ServerlessApplicationRepositoryClient::GetCloudFormationTemplateAsyncHelper(const GetCloudFormationTemplateRequest& request, const GetCloudFormationTemplateResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
handler(this, request, GetCloudFormationTemplate(request), context);
}
ListApplicationDependenciesOutcome ServerlessApplicationRepositoryClient::ListApplicationDependencies(const ListApplicationDependenciesRequest& request) const
{
if (!request.ApplicationIdHasBeenSet())
{
AWS_LOGSTREAM_ERROR("ListApplicationDependencies", "Required field: ApplicationId, is not set");
return ListApplicationDependenciesOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false));
}
Aws::Http::URI uri = m_uri;
Aws::StringStream ss;
ss << "/applications/";
ss << request.GetApplicationId();
ss << "/dependencies";
uri.SetPath(uri.GetPath() + ss.str());
JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_GET, Aws::Auth::SIGV4_SIGNER);
if(outcome.IsSuccess())
{
return ListApplicationDependenciesOutcome(ListApplicationDependenciesResult(outcome.GetResult()));
}
else
{
return ListApplicationDependenciesOutcome(outcome.GetError());
}
}
ListApplicationDependenciesOutcomeCallable ServerlessApplicationRepositoryClient::ListApplicationDependenciesCallable(const ListApplicationDependenciesRequest& request) const
{
auto task = Aws::MakeShared< std::packaged_task< ListApplicationDependenciesOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->ListApplicationDependencies(request); } );
auto packagedFunction = [task]() { (*task)(); };
m_executor->Submit(packagedFunction);
return task->get_future();
}
void ServerlessApplicationRepositoryClient::ListApplicationDependenciesAsync(const ListApplicationDependenciesRequest& request, const ListApplicationDependenciesResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
m_executor->Submit( [this, request, handler, context](){ this->ListApplicationDependenciesAsyncHelper( request, handler, context ); } );
}
void ServerlessApplicationRepositoryClient::ListApplicationDependenciesAsyncHelper(const ListApplicationDependenciesRequest& request, const ListApplicationDependenciesResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
handler(this, request, ListApplicationDependencies(request), context);
}
ListApplicationVersionsOutcome ServerlessApplicationRepositoryClient::ListApplicationVersions(const ListApplicationVersionsRequest& request) const
{
if (!request.ApplicationIdHasBeenSet())
{
AWS_LOGSTREAM_ERROR("ListApplicationVersions", "Required field: ApplicationId, is not set");
return ListApplicationVersionsOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false));
}
Aws::Http::URI uri = m_uri;
Aws::StringStream ss;
ss << "/applications/";
ss << request.GetApplicationId();
ss << "/versions";
uri.SetPath(uri.GetPath() + ss.str());
JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_GET, Aws::Auth::SIGV4_SIGNER);
if(outcome.IsSuccess())
{
return ListApplicationVersionsOutcome(ListApplicationVersionsResult(outcome.GetResult()));
}
else
{
return ListApplicationVersionsOutcome(outcome.GetError());
}
}
ListApplicationVersionsOutcomeCallable ServerlessApplicationRepositoryClient::ListApplicationVersionsCallable(const ListApplicationVersionsRequest& request) const
{
auto task = Aws::MakeShared< std::packaged_task< ListApplicationVersionsOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->ListApplicationVersions(request); } );
auto packagedFunction = [task]() { (*task)(); };
m_executor->Submit(packagedFunction);
return task->get_future();
}
void ServerlessApplicationRepositoryClient::ListApplicationVersionsAsync(const ListApplicationVersionsRequest& request, const ListApplicationVersionsResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
m_executor->Submit( [this, request, handler, context](){ this->ListApplicationVersionsAsyncHelper( request, handler, context ); } );
}
void ServerlessApplicationRepositoryClient::ListApplicationVersionsAsyncHelper(const ListApplicationVersionsRequest& request, const ListApplicationVersionsResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
handler(this, request, ListApplicationVersions(request), context);
}
ListApplicationsOutcome ServerlessApplicationRepositoryClient::ListApplications(const ListApplicationsRequest& request) const
{
Aws::Http::URI uri = m_uri;
Aws::StringStream ss;
ss << "/applications";
uri.SetPath(uri.GetPath() + ss.str());
JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_GET, Aws::Auth::SIGV4_SIGNER);
if(outcome.IsSuccess())
{
return ListApplicationsOutcome(ListApplicationsResult(outcome.GetResult()));
}
else
{
return ListApplicationsOutcome(outcome.GetError());
}
}
ListApplicationsOutcomeCallable ServerlessApplicationRepositoryClient::ListApplicationsCallable(const ListApplicationsRequest& request) const
{
auto task = Aws::MakeShared< std::packaged_task< ListApplicationsOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->ListApplications(request); } );
auto packagedFunction = [task]() { (*task)(); };
m_executor->Submit(packagedFunction);
return task->get_future();
}
void ServerlessApplicationRepositoryClient::ListApplicationsAsync(const ListApplicationsRequest& request, const ListApplicationsResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
m_executor->Submit( [this, request, handler, context](){ this->ListApplicationsAsyncHelper( request, handler, context ); } );
}
void ServerlessApplicationRepositoryClient::ListApplicationsAsyncHelper(const ListApplicationsRequest& request, const ListApplicationsResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
handler(this, request, ListApplications(request), context);
}
PutApplicationPolicyOutcome ServerlessApplicationRepositoryClient::PutApplicationPolicy(const PutApplicationPolicyRequest& request) const
{
if (!request.ApplicationIdHasBeenSet())
{
AWS_LOGSTREAM_ERROR("PutApplicationPolicy", "Required field: ApplicationId, is not set");
return PutApplicationPolicyOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false));
}
Aws::Http::URI uri = m_uri;
Aws::StringStream ss;
ss << "/applications/";
ss << request.GetApplicationId();
ss << "/policy";
uri.SetPath(uri.GetPath() + ss.str());
JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_PUT, Aws::Auth::SIGV4_SIGNER);
if(outcome.IsSuccess())
{
return PutApplicationPolicyOutcome(PutApplicationPolicyResult(outcome.GetResult()));
}
else
{
return PutApplicationPolicyOutcome(outcome.GetError());
}
}
PutApplicationPolicyOutcomeCallable ServerlessApplicationRepositoryClient::PutApplicationPolicyCallable(const PutApplicationPolicyRequest& request) const
{
auto task = Aws::MakeShared< std::packaged_task< PutApplicationPolicyOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->PutApplicationPolicy(request); } );
auto packagedFunction = [task]() { (*task)(); };
m_executor->Submit(packagedFunction);
return task->get_future();
}
void ServerlessApplicationRepositoryClient::PutApplicationPolicyAsync(const PutApplicationPolicyRequest& request, const PutApplicationPolicyResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
m_executor->Submit( [this, request, handler, context](){ this->PutApplicationPolicyAsyncHelper( request, handler, context ); } );
}
void ServerlessApplicationRepositoryClient::PutApplicationPolicyAsyncHelper(const PutApplicationPolicyRequest& request, const PutApplicationPolicyResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
handler(this, request, PutApplicationPolicy(request), context);
}
UnshareApplicationOutcome ServerlessApplicationRepositoryClient::UnshareApplication(const UnshareApplicationRequest& request) const
{
if (!request.ApplicationIdHasBeenSet())
{
AWS_LOGSTREAM_ERROR("UnshareApplication", "Required field: ApplicationId, is not set");
return UnshareApplicationOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false));
}
Aws::Http::URI uri = m_uri;
Aws::StringStream ss;
ss << "/applications/";
ss << request.GetApplicationId();
ss << "/unshare";
uri.SetPath(uri.GetPath() + ss.str());
JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_POST, Aws::Auth::SIGV4_SIGNER);
if(outcome.IsSuccess())
{
return UnshareApplicationOutcome(NoResult());
}
else
{
return UnshareApplicationOutcome(outcome.GetError());
}
}
UnshareApplicationOutcomeCallable ServerlessApplicationRepositoryClient::UnshareApplicationCallable(const UnshareApplicationRequest& request) const
{
auto task = Aws::MakeShared< std::packaged_task< UnshareApplicationOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->UnshareApplication(request); } );
auto packagedFunction = [task]() { (*task)(); };
m_executor->Submit(packagedFunction);
return task->get_future();
}
void ServerlessApplicationRepositoryClient::UnshareApplicationAsync(const UnshareApplicationRequest& request, const UnshareApplicationResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
m_executor->Submit( [this, request, handler, context](){ this->UnshareApplicationAsyncHelper( request, handler, context ); } );
}
void ServerlessApplicationRepositoryClient::UnshareApplicationAsyncHelper(const UnshareApplicationRequest& request, const UnshareApplicationResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
handler(this, request, UnshareApplication(request), context);
}
UpdateApplicationOutcome ServerlessApplicationRepositoryClient::UpdateApplication(const UpdateApplicationRequest& request) const
{
if (!request.ApplicationIdHasBeenSet())
{
AWS_LOGSTREAM_ERROR("UpdateApplication", "Required field: ApplicationId, is not set");
return UpdateApplicationOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false));
}
Aws::Http::URI uri = m_uri;
Aws::StringStream ss;
ss << "/applications/";
ss << request.GetApplicationId();
uri.SetPath(uri.GetPath() + ss.str());
JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_PATCH, Aws::Auth::SIGV4_SIGNER);
if(outcome.IsSuccess())
{
return UpdateApplicationOutcome(UpdateApplicationResult(outcome.GetResult()));
}
else
{
return UpdateApplicationOutcome(outcome.GetError());
}
}
UpdateApplicationOutcomeCallable ServerlessApplicationRepositoryClient::UpdateApplicationCallable(const UpdateApplicationRequest& request) const
{
auto task = Aws::MakeShared< std::packaged_task< UpdateApplicationOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->UpdateApplication(request); } );
auto packagedFunction = [task]() { (*task)(); };
m_executor->Submit(packagedFunction);
return task->get_future();
}
void ServerlessApplicationRepositoryClient::UpdateApplicationAsync(const UpdateApplicationRequest& request, const UpdateApplicationResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const
{
m_executor->Submit( [this, request, handler, context](){ this->UpdateApplicationAsyncHelper( request, handler, context ); } );
}
void ServerlessApplicationRepositoryClient::UpdateApplicationAsyncHelper(const UpdateApplicationRequest& request, const UpdateApplicationResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const<|fim▁hole|><|fim▁end|> | {
handler(this, request, UpdateApplication(request), context);
} |
<|file_name|>0030_author_displayname.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-11-22 22:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):<|fim▁hole|> ('socialnet', '0029_auto_20161121_0543'),
]
operations = [
migrations.AddField(
model_name='author',
name='displayname',
field=models.CharField(blank=True, max_length=255, null=True),
),
]<|fim▁end|> |
dependencies = [ |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>var express = require('express');
var path = require('path');
var favicon = require('serve-favicon');
var logger = require('morgan');
var cookieParser = require('cookie-parser');
var bodyParser = require('body-parser');
var partials = require('express-partials');
var methodOverride = require('method-override');
var session = require('express-session');
var routes = require('./routes/index');
var app = express();
// view engine setup
app.set('views', path.join(__dirname, 'views'));
app.set('view engine', 'ejs');
app.use(partials());
// uncomment after placing your favicon in /public
app.use(favicon(__dirname + '/public/favicon.ico'));
app.use(logger('dev'));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded());
app.use(cookieParser('Quiz 2015'));
app.use(session());
app.use(methodOverride('_method'));
app.use(express.static(path.join(__dirname, 'public')));
// Helpers dinámicos
app.use(function(req, res, next) {
// Guardar el path en session.redir para después del login.
if (!req.path.match(/\/login|\/logout/)) {
req.session.redir = req.path;
}
// Hacer visible req.session en las vistas.
res.locals.session = req.session;
next();
});
// Gestión del autologout.
app.use(function(req, res, next) {
var current_time = new Date();
var lastdate = req.session.time ? new Date(req.session.time): new Date();
if (req.session.user && !req.path.match(/\/logout/)) {
if ( (current_time.getMinutes() - 2) > lastdate.getMinutes() ) {
req.session.errors = [{"message": 'Sesión caducada'}];
delete req.session.user;
delete req.session.time;
res.redirect('/login');
} else {
console.log((current_time.getMinutes() - 2) + ' > '+ lastdate.getMinutes());
// Refrescamos el tiempo.
req.session.time = new Date();
next();
}
} else {
next();
}
});
app.use('/', routes);
// catch 404 and forward to error handler
app.use(function(req, res, next) {
var err = new Error('Not Found');
err.status = 404;
next(err);
});<|fim▁hole|>
// development error handler
// will print stacktrace
if (app.get('env') === 'development') {
app.use(function(err, req, res, next) {
res.status(err.status || 500);
res.render('error', {
message: err.message,
error: err,
errors: []
});
});
}
// production error handler
// no stacktraces leaked to user
app.use(function(err, req, res, next) {
res.status(err.status || 500);
res.render('error', {
message: err.message,
error: {},
errors: []
});
});
module.exports = app;<|fim▁end|> |
// error handlers |
<|file_name|>commands.py<|end_file_name|><|fim▁begin|>import subprocess<|fim▁hole|>@only_osx
def systemsetup(*args):
subprocess.call(['systemsetup'] + list(args))<|fim▁end|> |
from genes.mac.traits import only_osx
|
<|file_name|>FolderInfo.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.launcher3;
import android.content.ContentValues;
import android.content.Context;
import com.android.launcher3.compat.UserHandleCompat;
import java.util.ArrayList;
/**
* Represents a folder containing shortcuts or apps.
*/
public class FolderInfo extends ItemInfo {
public static final int NO_FLAGS = 0x00000000;
/**
* The folder is locked in sorted mode
*/
public static final int FLAG_ITEMS_SORTED = 0x00000001;
/**
* It is a work folder
*/
public static final int FLAG_WORK_FOLDER = 0x00000002;
/**
* The multi-page animation has run for this folder
*/
public static final int FLAG_MULTI_PAGE_ANIMATION = 0x00000004;
/**
* Whether this folder has been opened
*/
public boolean opened;
public int options;
/**
* The apps and shortcuts
*/
public ArrayList<ShortcutInfo> contents = new ArrayList<ShortcutInfo>();
ArrayList<FolderListener> listeners = new ArrayList<FolderListener>();
public FolderInfo() {
itemType = LauncherSettings.Favorites.ITEM_TYPE_FOLDER;
user = UserHandleCompat.myUserHandle();
}
/**
* Add an app or shortcut
*
* @param item
*/
public void add(ShortcutInfo item, boolean animate) {
contents.add(item);
for (int i = 0; i < listeners.size(); i++) {
listeners.get(i).onAdd(item);
}
itemsChanged(animate);
}
/**
* Remove an app or shortcut. Does not change the DB.
*
* @param item
*/
public void remove(ShortcutInfo item, boolean animate) {
contents.remove(item);
for (int i = 0; i < listeners.size(); i++) {
listeners.get(i).onRemove(item);
}
itemsChanged(animate);
}
public void setTitle(CharSequence title) {
this.title = title;
for (int i = 0; i < listeners.size(); i++) {
listeners.get(i).onTitleChanged(title);
}
}
@Override
void onAddToDatabase(Context context, ContentValues values) {
super.onAddToDatabase(context, values);
values.put(LauncherSettings.Favorites.TITLE, title.toString());
values.put(LauncherSettings.Favorites.OPTIONS, options);
}
public void addListener(FolderListener listener) {
listeners.add(listener);
}
public void removeListener(FolderListener listener) {
listeners.remove(listener);
}
public void itemsChanged(boolean animate) {
for (int i = 0; i < listeners.size(); i++) {
listeners.get(i).onItemsChanged(animate);
}
}
public interface FolderListener {
public void onAdd(ShortcutInfo item);
public void onRemove(ShortcutInfo item);
public void onTitleChanged(CharSequence title);
public void onItemsChanged(boolean animate);
}
public boolean hasOption(int optionFlag) {
return (options & optionFlag) != 0;
}
/**
* @param option flag to set or clear
* @param isEnabled whether to set or clear the flag
* @param context if not null, save changes to the db.
*/
public void setOption(int option, boolean isEnabled, Context context) {
int oldOptions = options;
if (isEnabled) {<|fim▁hole|> if (context != null && oldOptions != options) {
LauncherModel.updateItemInDatabase(context, this);
}
}
}<|fim▁end|> | options |= option;
} else {
options &= ~option;
} |
<|file_name|>Session.js<|end_file_name|><|fim▁begin|>var mongoose = require('mongoose');
var Shape = require('./Shape');
var User = require('./User');
// Create a session model, _id will be assigned by Mongoose
var CanvasSessionSchema = new mongoose.Schema(
{
_id: String,
users: [User],
dateCreated: Date,
dateUpdated: Date,
// canDraw: Boolean,
// canChat: Boolean,
// maxUsers: Number,
sessionProperties: {
canDraw: Boolean,
canChat: Boolean,
maxUsers: Number
},
//canvasModel: { type: Object },
canvasShapes: { type: Array, unique: true, index: true },
messages: Array
},
{ autoIndex: false }
);<|fim▁hole|><|fim▁end|> |
// Make Session available to rest of the application
module.exports = mongoose.model('Session', CanvasSessionSchema); |
<|file_name|>rapidvideo.py<|end_file_name|><|fim▁begin|># -*- coding: iso-8859-1 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para rapidvideo
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urllib,re
from core import scrapertools
from core import logger
from lib.jsbeautifier.unpackers import packer
def test_video_exists( page_url ):
logger.info( "[rapidvideo.py] test_video_exists(page_url='%s')" % page_url )
video_id = scrapertools.get_match( page_url, 'org/([A-Za-z0-9]+)' )
url = 'http://www.rapidvideo.org/embed-%s-607x360.html' % video_id
data = scrapertools.cache_page( url )
if "The file was removed from RapidVideo" in data:
return False, "The file not exists or was removed from RapidVideo."
return True, ""
def get_video_url( page_url, premium = False, user="", password="", video_password="" ):
logger.info( "[rapidvideo.py] url=" + page_url )
video_id = scrapertools.get_match( page_url, 'org/([A-Za-z0-9]+)' )
url = 'http://www.rapidvideo.org/embed-%s-607x360.html' % video_id
#data = scrapertools.cache_page( url ).replace( 'TMPL_VAR|', '' )
data = scrapertools.cache_page( url )
packed = scrapertools.get_match( data, "<script type='text/javascript'>eval.function.p,a,c,k,e,.*?</script>" )
unpacked = packer.unpack( packed )
media_url = scrapertools.get_match( unpacked, 'file:"([^"]+)"' )
video_urls = []
video_urls.append( [ scrapertools.get_filename_from_url( media_url )[-4:] + " [fastvideo.me]", media_url ] )
for video_url in video_urls:
logger.info( "[fastvideo.py] %s - %s" % ( video_url[0], video_url[1] ) )
video_urls = []
video_urls.append( [ scrapertools.get_filename_from_url( media_url )[-4:] + " [rapidvideo.org]", media_url ] )
for video_url in video_urls:
logger.info( "[rapidvideo.py] %s - %s" % ( video_url[0], video_url[1] ) )
return video_urls
# Encuentra vídeos de este servidor en el texto pasado
def find_videos( data ):
encontrados = set()
devuelve = []
#http://www.rapidvideo.org/xr1nb7cfh58a
patronvideos = 'rapidvideo.org/([A-Za-z0-9]+)'
logger.info( "[rapidvideo.py] find_videos #" + patronvideos + "#" )
matches = re.compile( patronvideos, re.DOTALL ).findall( data )
for match in matches:
titulo = "[rapidvideo]"
url = "http://www.rapidvideo.org/" + match
if url not in encontrados:
logger.info( " url=" + url )
devuelve.append( [ titulo, url, 'rapidvideo' ] )
encontrados.add( url )
else:
logger.info( " url duplicada=" + url )
return devuelve
def test():
<|fim▁hole|>
return len( video_urls ) > 0<|fim▁end|> | video_urls = get_video_url( "http://www.rapidvideo.org/xr1nb7cfh58a" ) |
<|file_name|>scrape.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import logging
import sys
from urllib.parse import urljoin
import requests
from lxml import html
from six.moves.urllib.parse import urlparse
from django_docutils.exc import BasedException
logger = logging.getLogger(__name__)
def _request_favicon(url):
"""Tries to download favicon from URL and checks if it's valid."""
r = requests.get(url)
r.raise_for_status()
if 'image' not in r.headers['Content-Type']:
raise BasedException('Not an image')
return r.content
def get_favicon(url):
try:
r = requests.get(url)
r.raise_for_status()
# update url if redirected
if r.url != url:
url = r.url
doc = html.fromstring(r.content)
except requests.exceptions.ConnectionError as e:
raise BasedException(f"The website {url} isn't connecting:", e)
paths = ['//link[@rel="shortcut icon"]/@href', '//link[@rel="icon"]/@href']
for path in paths:
# Method 1: to find favicon via "shortcut icon"
favicons = doc.xpath(path)
if len(favicons): # Is pattern found?<|fim▁hole|> except Exception as e:
logger.debug(
'Could not retrieve {favicon_url}: \n{e}'.format(
favicon_url=favicon_url, e=e
)
)
# Method 2: site root/favicon.ico
try:
parsed = urlparse(url)
parsed = parsed._replace(path='/favicon.ico')
favicon_url = parsed.geturl()
return _request_favicon(favicon_url)
except Exception as e:
logger.debug(
'Could not retrieve {favicon_url}.\n{e}'.format(
favicon_url=favicon_url, e=e
)
)
raise BasedException(
"""
Could not retrieve favicon for {url}. Both strategies failed
""".format(
url=url
)
)
if __name__ == '__main__':
favicon = get_favicon(sys.argv[1])
file_ = open('/Users/me/favicon.ico', 'wb')
file_.write(favicon)
file_.close()<|fim▁end|> | try:
favicon_url = favicons[0]
favicon_url = urljoin(url, favicon_url)
return _request_favicon(favicon_url) |
<|file_name|>encode.rs<|end_file_name|><|fim▁begin|>extern crate criterion;
use criterion::{BenchmarkId, Criterion, criterion_group, criterion_main};
use image::{ColorType, bmp::BmpEncoder, jpeg::JpegEncoder};
use std::fs::File;
use std::io::{BufWriter, Write, Seek, SeekFrom};
trait Encoder {
fn encode_raw(&self, into: &mut Vec<u8>, im: &[u8], dims: u32, color: ColorType);
fn encode_bufvec(&self, into: &mut Vec<u8>, im: &[u8], dims: u32, color: ColorType);
fn encode_file(&self, file: &File, im: &[u8], dims: u32, color: ColorType);
}
#[derive(Clone, Copy)]
struct BenchDef {
with: &'static dyn Encoder,
name: &'static str,
sizes: &'static [u32],
colors: &'static [ColorType],
}
fn encode_all(c: &mut Criterion) {
const BENCH_DEFS: &'static [BenchDef] = &[
BenchDef {
with: &Bmp,
name: "bmp",
sizes: &[100u32, 200, 400],
colors: &[ColorType::L8, ColorType::Rgb8, ColorType::Rgba8],
},
BenchDef {
with: &Jpeg,
name: "jpeg",
sizes: &[64u32, 128, 256],
colors: &[ColorType::L8, ColorType::Rgb8, ColorType::Rgba8],
},
];
for definition in BENCH_DEFS {
encode_definition(c, definition)
}
}
criterion_group!(benches, encode_all);
criterion_main!(benches);<|fim▁hole|>///
/// For compressed formats this is surely not representative of encoding a normal image but it's a
/// start for benchmarking.
fn encode_zeroed(group: &mut BenchGroup, with: &dyn Encoder, size: u32, color: ColorType) {
let bytes = size as usize * usize::from(color.bytes_per_pixel());
let im = vec![0; bytes * bytes];
group.bench_with_input(BenchmarkId::new(format!("zero-{:?}-rawvec", color), size), &im, |b, image| {
let mut v = vec![];
with.encode_raw(&mut v, &im, size, color);
b.iter(|| with.encode_raw(&mut v, image, size, color));
});
group.bench_with_input(BenchmarkId::new(format!("zero-{:?}-bufvec", color), size), &im, |b, image| {
let mut v = vec![];
with.encode_raw(&mut v, &im, size, color);
b.iter(|| with.encode_bufvec(&mut v, image, size, color));
});
group.bench_with_input(BenchmarkId::new(format!("zero-{:?}-file", color), size), &im, |b, image| {
let file = File::create("temp.bmp").unwrap();
b.iter(|| with.encode_file(&file, image, size, color));
});
}
fn encode_definition(criterion: &mut Criterion, def: &BenchDef) {
let mut group = criterion.benchmark_group(format!("encode-{}", def.name));
for &color in def.colors {
for &size in def.sizes {
encode_zeroed(&mut group, def.with, size, color);
}
}
}
struct Bmp;
struct Jpeg;
trait EncoderBase {
fn encode(&self, into: impl Write, im: &[u8], dims: u32, color: ColorType);
}
impl<T: EncoderBase> Encoder for T {
fn encode_raw(&self, into: &mut Vec<u8>, im: &[u8], dims: u32, color: ColorType) {
into.clear();
self.encode(into, im, dims, color);
}
fn encode_bufvec(&self, into: &mut Vec<u8>, im: &[u8], dims: u32, color: ColorType) {
into.clear();
let buf = BufWriter::new(into);
self.encode(buf, im, dims, color);
}
fn encode_file(&self, mut file: &File, im: &[u8], dims: u32, color: ColorType) {
file.seek(SeekFrom::Start(0)).unwrap();
let buf = BufWriter::new(file);
self.encode(buf, im, dims, color);
}
}
impl EncoderBase for Bmp {
fn encode(&self, mut into: impl Write, im: &[u8], size: u32, color: ColorType) {
let mut x = BmpEncoder::new(&mut into);
x.encode(im, size, size, color).unwrap();
}
}
impl EncoderBase for Jpeg {
fn encode(&self, mut into: impl Write, im: &[u8], size: u32, color: ColorType) {
let mut x = JpegEncoder::new(&mut into);
x.encode(im, size, size, color).unwrap();
}
}<|fim▁end|> |
type BenchGroup<'a> = criterion::BenchmarkGroup<'a, criterion::measurement::WallTime>;
/// Benchmarks encoding a zeroed image. |
<|file_name|>extension_class_impl.rs<|end_file_name|><|fim▁begin|>/*
* Swaggy Jenkins
*
* Jenkins API clients generated from Swagger / Open API specification
*
* The version of the OpenAPI document: 1.1.2-pre.0
* Contact: [email protected]
* Generated by: https://openapi-generator.tech
*/
#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
pub struct ExtensionClassImpl {
#[serde(rename = "_class", skip_serializing_if = "Option::is_none")]
pub _class: Option<String>,
#[serde(rename = "_links", skip_serializing_if = "Option::is_none")]
pub _links: Option<Box<crate::models::ExtensionClassImpllinks>>,
#[serde(rename = "classes", skip_serializing_if = "Option::is_none")]
pub classes: Option<Vec<String>>,
}
impl ExtensionClassImpl {
pub fn new() -> ExtensionClassImpl {
ExtensionClassImpl {
_class: None,<|fim▁hole|> classes: None,
}
}
}<|fim▁end|> | _links: None, |
<|file_name|>memory.py<|end_file_name|><|fim▁begin|>"""
Ecks plugin to collect system memory usage information
Copyright 2011 Chris Read ([email protected])
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def get_memory(parent, host, community):
""" This is a plugin to be loaded by Ecks
return a tuple containing (total_swap, avail_swap, total_real, avail_real, mem_buffer, mem_cached). Values are in kiloBytes
"""
memory = (1,3,6,1,4,1,2021,4) # UCD-SNMP-MIB
data = parent.get_snmp_data(host, community, memory, 1)
if data:
return map(parent._build_answer,
parent._extract(data, int, 3),<|fim▁hole|> parent._extract(data, int, 5),
parent._extract(data, int, 6),
parent._extract(data, int, 14),
parent._extract(data, int, 15),
)[0]<|fim▁end|> | parent._extract(data, int, 4), |
<|file_name|>root.js<|end_file_name|><|fim▁begin|>import {combineEpics} from 'redux-observable';
import {combineReducers} from 'redux';
import {
disposePastebinEpic,
pastebinLayoutEpic,
pastebinEpic,
pastebinTokenEpic,
pastebinTokenRejectedEpic,
pastebinReducer,
pastebinContentEpic,
pastebinContentRejectedEpic,
} from './pastebin';
import {
firecoReducer,
firecoEditorsEpic,
firecoActivateEpic,
firecoEditorEpic,
firecoChatEpic,
firecoPersistableComponentEpic,
} from './fireco';
import {
configureMonacoModelsEpic,
updateMonacoModelsEpic,
configureMonacoThemeSwitchEpic,
monacoReducer,
} from './monaco';
import {
monacoEditorsEpic,
monacoEditorsReducer,
mountedEditorEpic,<|fim▁hole|>import {
updatePlaygroundReducer,
} from './playground';
import {updateBundleReducer} from './liveExpressionStore';
export const rootEpic = combineEpics(
disposePastebinEpic,
pastebinLayoutEpic,
pastebinEpic,
pastebinContentEpic,
pastebinContentRejectedEpic,
pastebinTokenEpic,
pastebinTokenRejectedEpic,
mountedEditorEpic,
configureMonacoModelsEpic,
updateMonacoModelsEpic,
configureMonacoThemeSwitchEpic,
monacoEditorsEpic,
// updatePlaygroundEpic,
// updatePlaygroundInstrumentationEpic,
firecoEditorsEpic,
firecoActivateEpic,
firecoEditorEpic,
firecoChatEpic,
firecoPersistableComponentEpic,
);
export const rootReducer = combineReducers({
pastebinReducer,
monacoReducer,
monacoEditorsReducer,
firecoReducer,
updateBundleReducer,
updatePlaygroundReducer,
});<|fim▁end|> | } from './monacoEditor';
|
<|file_name|>test_views.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from datetime import datetime
import json
from urlparse import urlparse
from django.core.urlresolvers import reverse
from django.db import reset_queries
from django.http import QueryDict
from django.test.utils import override_settings
import mock
from mock import patch
from nose.tools import eq_, ok_
import mkt
import mkt.regions
from mkt.api.tests.test_oauth import RestOAuth
from mkt.developers.models import ActivityLog
from mkt.prices.models import AddonPurchase
from mkt.ratings.models import Review, ReviewFlag
from mkt.site.fixtures import fixture
from mkt.site.utils import app_factory, version_factory
from mkt.webapps.models import AddonExcludedRegion, AddonUser, Webapp
from mkt.users.models import UserProfile
@mock.patch('mkt.webapps.models.Webapp.get_cached_manifest', mock.Mock)
class TestRatingResource(RestOAuth, mkt.site.tests.MktPaths):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
super(TestRatingResource, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.user = UserProfile.objects.get(pk=2519)
self.user2 = UserProfile.objects.get(pk=31337)
self.list_url = reverse('ratings-list')
def _get_url(self, url, client=None, **kwargs):
if client is None:
client = self.client
res = client.get(url, kwargs)
data = json.loads(res.content)
return res, data
def _get_filter(self, client=None, expected_status=200, **params):
res, data = self._get_url(self.list_url, client=client, **params)
eq_(res.status_code, expected_status)
if expected_status == 200:
eq_(len(data['objects']), 1)
return res, data
def _compare_review_data(self, client, data, review):
self.assertApiUrlEqual(data['app'], '/apps/app/337141/')
eq_(data['body'], review.body)
self.assertCloseToNow(data['created'], now=review.created)
self.assertCloseToNow(data['modified'], now=review.modified)
eq_(data['rating'], review.rating)
eq_(data['report_spam'],
reverse('ratings-flag', kwargs={'pk': review.pk}))
eq_(data['resource_uri'],
reverse('ratings-detail', kwargs={'pk': review.pk}))
eq_(data['user']['display_name'], review.user.display_name)
eq_(data['version']['version'], review.version.version)
eq_(data['version']['resource_uri'],
reverse('version-detail', kwargs={'pk': review.version.pk}))
if client != self.anon:
eq_(data['is_author'], review.user == self.user)
else:
ok_('is_author' not in data)
def test_has_cors(self):
self.assertCORS(self.client.get(self.list_url),
'get', 'post', 'put', 'delete')
def test_options(self):
res = self.anon.options(self.list_url)
eq_(res.status_code, 200)
data = json.loads(res.content)
ok_('application/json' in data['renders'])
ok_('application/json' in data['parses'])
def test_get_empty_with_app(self):
AddonUser.objects.create(user=self.user, addon=self.app)
res, data = self._get_url(self.list_url, app=self.app.pk)
eq_(res.status_code, 200)
eq_(data['info']['average'], self.app.average_rating)
eq_(data['info']['slug'], self.app.app_slug)
assert not data['user']['can_rate']
assert not data['user']['has_rated']
def test_get(self, client=None):
first_version = self.app.current_version
rev = Review.objects.create(addon=self.app, user=self.user,
version=first_version,
body=u'I lôve this app',
rating=5)
rev.update(created=self.days_ago(2))
rev2 = Review.objects.create(addon=self.app, user=self.user2,
version=first_version,
body=u'I also lôve this app',<|fim▁hole|> extra_app = app_factory()
Review.objects.create(addon=extra_app, user=self.user,
version=extra_app.current_version,
body=u'I häte this extra app',
rating=1)
self.app.total_reviews = 2
ver = version_factory(addon=self.app, version='2.0',
file_kw=dict(status=mkt.STATUS_PUBLIC))
self.app.update_version()
reset_queries()
res, data = self._get_url(self.list_url, app=self.app.pk,
client=client)
eq_(len(data['objects']), 2)
self._compare_review_data(client, data['objects'][0], rev2)
self._compare_review_data(client, data['objects'][1], rev)
eq_(data['info']['average'], self.app.average_rating)
eq_(data['info']['slug'], self.app.app_slug)
eq_(data['info']['current_version'], ver.version)
if client != self.anon:
eq_(data['user']['can_rate'], True)
eq_(data['user']['has_rated'], True)
return res
def test_get_304(self):
etag = self.test_get(client=self.anon)['ETag']
res = self.anon.get(self.list_url, {'app': self.app.pk},
HTTP_IF_NONE_MATCH='%s' % etag)
eq_(res.status_code, 304)
@override_settings(DEBUG=True)
def test_get_anonymous_queries(self):
first_version = self.app.current_version
Review.objects.create(addon=self.app, user=self.user,
version=first_version,
body=u'I lôve this app',
rating=5)
Review.objects.create(addon=self.app, user=self.user2,
version=first_version,
body=u'I also lôve this app',
rating=4)
self.app.total_reviews = 2
version_factory(addon=self.app, version='2.0',
file_kw=dict(status=mkt.STATUS_PUBLIC))
self.app.update_version()
reset_queries()
with self.assertNumQueries(7):
# 7 queries:
# - 1 SAVEPOINT
# - 2 for the Reviews queryset and the translations
# - 2 for the Version associated to the reviews (qs + translations)
# - 1 for the File attached to the Version
# - 1 RELEASE SAVEPOINT
#
# Notes:
# - In prod, we actually do COMMIT/ROLLBACK and not
# SAVEPOINT/RELEASE SAVEPOINT. It would be nice to avoid those for
# all GET requests in the API, but it's not trivial to do for
# ViewSets which implement multiple actions through the same view
# function (non_atomic_requests() really want to be applied to the
# view function).
#
# - The query count is slightly higher in prod. In tests, we patch
# get_app() to avoid the app queries to pollute the queries count.
#
# Once we are on django 1.7, we'll be able to play with Prefetch
# to reduce the number of queries further by customizing the
# queryset used for the complex related objects like versions and
# webapp.
with patch('mkt.ratings.views.RatingViewSet.get_app') as get_app:
get_app.return_value = self.app
res, data = self._get_url(self.list_url, client=self.anon,
app=self.app.pk)
def test_is_flagged_false(self):
Review.objects.create(addon=self.app, user=self.user2, body='yes')
res, data = self._get_url(self.list_url, app=self.app.pk)
eq_(data['objects'][0]['is_author'], False)
eq_(data['objects'][0]['has_flagged'], False)
def test_is_flagged_is_author(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
res, data = self._get_url(self.list_url, app=self.app.pk)
eq_(data['objects'][0]['is_author'], True)
eq_(data['objects'][0]['has_flagged'], False)
def test_is_flagged_true(self):
rat = Review.objects.create(addon=self.app, user=self.user2, body='ah')
ReviewFlag.objects.create(review=rat, user=self.user,
flag=ReviewFlag.SPAM)
res, data = self._get_url(self.list_url, app=self.app.pk)
eq_(data['objects'][0]['is_author'], False)
eq_(data['objects'][0]['has_flagged'], True)
def test_get_detail(self):
fmt = '%Y-%m-%dT%H:%M:%S'
Review.objects.create(addon=self.app, user=self.user2, body='no')
rev = Review.objects.create(addon=self.app, user=self.user, body='yes')
url = reverse('ratings-detail', kwargs={'pk': rev.pk})
res, data = self._get_url(url)
self.assertCloseToNow(datetime.strptime(data['modified'], fmt))
self.assertCloseToNow(datetime.strptime(data['created'], fmt))
eq_(data['body'], 'yes')
def test_filter_self(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
Review.objects.create(addon=self.app, user=self.user2, body='no')
self._get_filter(user=self.user.pk)
def test_filter_mine(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
Review.objects.create(addon=self.app, user=self.user2, body='no')
self._get_filter(user='mine')
def test_filter_mine_anonymous(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
self._get_filter(user='mine', client=self.anon, expected_status=403)
def test_filter_by_app_slug(self):
self.app2 = app_factory()
Review.objects.create(addon=self.app2, user=self.user, body='no')
Review.objects.create(addon=self.app, user=self.user, body='yes')
res, data = self._get_filter(app=self.app.app_slug)
eq_(data['info']['slug'], self.app.app_slug)
eq_(data['info']['current_version'], self.app.current_version.version)
def test_filter_by_app_pk(self):
self.app2 = app_factory()
Review.objects.create(addon=self.app2, user=self.user, body='no')
Review.objects.create(addon=self.app, user=self.user, body='yes')
res, data = self._get_filter(app=self.app.pk)
eq_(data['info']['slug'], self.app.app_slug)
eq_(data['info']['current_version'], self.app.current_version.version)
def test_filter_by_invalid_app(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
self._get_filter(app='wrongslug', expected_status=404)
self._get_filter(app=2465478, expected_status=404)
@patch('mkt.ratings.views.get_region')
def test_filter_by_nonpublic_app(self, get_region_mock):
Review.objects.create(addon=self.app, user=self.user, body='yes')
self.app.update(status=mkt.STATUS_PENDING)
get_region_mock.return_value = mkt.regions.USA
res, data = self._get_filter(
app=self.app.app_slug, expected_status=403)
eq_(data['detail'], 'The app requested is not public or not available '
'in region "us".')
def test_filter_by_nonpublic_app_admin(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
self.grant_permission(self.user, 'Apps:Edit')
self.app.update(status=mkt.STATUS_PENDING)
self._get_filter(app=self.app.app_slug)
def test_filter_by_nonpublic_app_owner(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
AddonUser.objects.create(user=self.user, addon=self.app)
self.app.update(status=mkt.STATUS_PENDING)
self._get_filter(app=self.app.app_slug)
@patch('mkt.ratings.views.get_region')
def test_filter_by_app_excluded_in_region(self, get_region_mock):
Review.objects.create(addon=self.app, user=self.user, body='yes')
AddonExcludedRegion.objects.create(addon=self.app,
region=mkt.regions.BRA.id)
get_region_mock.return_value = mkt.regions.BRA
res, data = self._get_filter(
app=self.app.app_slug, expected_status=403)
eq_(data['detail'], 'The app requested is not public or not available '
'in region "br".')
@patch('mkt.ratings.views.get_region')
def test_filter_by_app_excluded_in_region_admin(self, get_region_mock):
Review.objects.create(addon=self.app, user=self.user, body='yes')
self.grant_permission(self.user, 'Apps:Edit')
AddonExcludedRegion.objects.create(addon=self.app,
region=mkt.regions.BRA.id)
get_region_mock.return_value = mkt.regions.BRA
self._get_filter(app=self.app.app_slug)
@patch('mkt.ratings.views.get_region')
def test_filter_by_app_excluded_in_region_owner(self, get_region_mock):
Review.objects.create(addon=self.app, user=self.user, body='yes')
AddonUser.objects.create(user=self.user, addon=self.app)
AddonExcludedRegion.objects.create(addon=self.app,
region=mkt.regions.BRA.id)
get_region_mock.return_value = mkt.regions.BRA
self._get_filter(app=self.app.app_slug)
def test_anonymous_get_list_without_app(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
res, data = self._get_url(self.list_url, client=self.anon)
eq_(res.status_code, 200)
assert 'user' not in data
eq_(len(data['objects']), 1)
eq_(data['objects'][0]['body'], 'yes')
def test_anonymous_get_list_app(self):
res, data = self._get_url(self.list_url, app=self.app.app_slug,
client=self.anon)
eq_(res.status_code, 200)
eq_(data['user'], None)
def test_non_owner(self):
res, data = self._get_url(self.list_url, app=self.app.app_slug)
assert data['user']['can_rate']
assert not data['user']['has_rated']
@patch('mkt.webapps.models.Webapp.get_excluded_region_ids')
def test_can_rate_unpurchased(self, exclude_mock):
exclude_mock.return_value = []
self.app.update(premium_type=mkt.ADDON_PREMIUM)
res, data = self._get_url(self.list_url, app=self.app.app_slug)
assert not res.json['user']['can_rate']
@patch('mkt.webapps.models.Webapp.get_excluded_region_ids')
def test_can_rate_purchased(self, exclude_mock):
exclude_mock.return_value = []
self.app.update(premium_type=mkt.ADDON_PREMIUM)
AddonPurchase.objects.create(addon=self.app, user=self.user)
res, data = self._get_url(self.list_url, app=self.app.app_slug)
assert res.json['user']['can_rate']
def test_isowner_true(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
res, data = self._get_url(self.list_url, app=self.app.app_slug)
data = json.loads(res.content)
eq_(data['objects'][0]['is_author'], True)
def test_isowner_false(self):
Review.objects.create(addon=self.app, user=self.user2, body='yes')
res, data = self._get_url(self.list_url, app=self.app.app_slug)
data = json.loads(res.content)
eq_(data['objects'][0]['is_author'], False)
def test_isowner_anonymous(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
res, data = self._get_url(self.list_url, app=self.app.app_slug,
client=self.anon)
data = json.loads(res.content)
self.assertNotIn('is_author', data['objects'][0])
def test_already_rated(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
res, data = self._get_url(self.list_url, app=self.app.app_slug)
data = json.loads(res.content)
assert data['user']['can_rate']
assert data['user']['has_rated']
def test_already_rated_version(self):
self.app.update(is_packaged=True)
Review.objects.create(addon=self.app, user=self.user, body='yes')
version_factory(addon=self.app, version='3.0')
self.app.update_version()
res, data = self._get_url(self.list_url, app=self.app.app_slug)
data = json.loads(res.content)
assert data['user']['can_rate']
assert not data['user']['has_rated']
def _create(self, data=None, anonymous=False, version=None):
version = version or self.app.current_version
default_data = {
'app': self.app.id,
'body': 'Rocking the free web.',
'rating': 5,
'version': version.id
}
if data:
default_data.update(data)
json_data = json.dumps(default_data)
client = self.anon if anonymous else self.client
res = client.post(self.list_url, data=json_data)
try:
res_data = json.loads(res.content)
except ValueError:
res_data = res.content
return res, res_data
def test_anonymous_create_fails(self):
res, data = self._create(anonymous=True)
eq_(res.status_code, 403)
@patch('mkt.ratings.views.record_action')
def test_create(self, record_action):
log_review_id = mkt.LOG.ADD_REVIEW.id
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 0)
res, data = self._create()
eq_(201, res.status_code)
pk = Review.objects.latest('pk').pk
eq_(data['body'], 'Rocking the free web.')
eq_(data['rating'], 5)
eq_(data['resource_uri'], reverse('ratings-detail', kwargs={'pk': pk}))
eq_(data['report_spam'], reverse('ratings-flag', kwargs={'pk': pk}))
eq_(record_action.call_count, 1)
eq_(record_action.call_args[0][0], 'new-review')
eq_(record_action.call_args[0][2], {'app-id': 337141})
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 1)
return res, data
def test_create_packaged(self):
self.app.update(is_packaged=True)
res, data = self.test_create()
eq_(data['version']['version'], '1.0')
def test_create_bad_data(self):
res, data = self._create({'body': None})
eq_(400, res.status_code)
assert 'body' in data
def test_create_nonexistent_app(self):
res, data = self._create({'app': -1})
eq_(400, res.status_code)
assert 'app' in data
@patch('mkt.ratings.serializers.get_region')
def test_create_for_nonregion(self, get_region_mock):
AddonExcludedRegion.objects.create(addon=self.app,
region=mkt.regions.BRA.id)
get_region_mock.return_value = mkt.regions.BRA
res, data = self._create()
eq_(403, res.status_code)
def test_create_for_nonpublic(self):
self.app.update(status=mkt.STATUS_PENDING)
res, data = self._create(version=self.app.latest_version)
eq_(403, res.status_code)
def test_create_duplicate_rating(self):
self._create()
res, data = self._create()
eq_(409, res.status_code)
def test_new_rating_for_new_version(self):
self.app.update(is_packaged=True)
self._create()
version = version_factory(addon=self.app, version='3.0')
self.app.update_version()
eq_(self.app.reload().current_version, version)
res, data = self._create()
eq_(201, res.status_code)
eq_(data['version']['version'], '3.0')
def test_create_duplicate_rating_packaged(self):
self.app.update(is_packaged=True)
self._create()
res, data = self._create()
eq_(409, res.status_code)
def test_create_own_app(self):
AddonUser.objects.create(user=self.user, addon=self.app)
res, data = self._create()
eq_(403, res.status_code)
@patch('mkt.webapps.models.Webapp.get_excluded_region_ids')
def test_rate_unpurchased_premium(self, exclude_mock):
exclude_mock.return_value = []
self.app.update(premium_type=mkt.ADDON_PREMIUM)
res, data = self._create()
eq_(403, res.status_code)
@patch('mkt.webapps.models.Webapp.get_excluded_region_ids')
def test_rate_purchased_premium(self, exclude_mock):
exclude_mock.return_value = []
self.app.update(premium_type=mkt.ADDON_PREMIUM)
AddonPurchase.objects.create(addon=self.app, user=self.user)
res, data = self._create()
eq_(201, res.status_code)
def _create_default_review(self):
# Create the original review
default_data = {
'body': 'Rocking the free web.',
'rating': 5
}
res, res_data = self._create(default_data)
return res, res_data
def test_patch_not_implemented(self):
self._create_default_review()
pk = Review.objects.latest('id').pk
json_data = json.dumps({
'body': 'Totally rocking the free web.',
})
res = self.client.patch(reverse('ratings-detail', kwargs={'pk': pk}),
data=json_data)
# Should return a 405 but permission check is done first. It's fine.
eq_(res.status_code, 403)
def _update(self, updated_data, pk=None):
# Update the review
if pk is None:
pk = Review.objects.latest('id').pk
json_data = json.dumps(updated_data)
res = self.client.put(reverse('ratings-detail', kwargs={'pk': pk}),
data=json_data)
try:
res_data = json.loads(res.content)
except ValueError:
res_data = res.content
return res, res_data
def test_update(self):
rev = Review.objects.create(addon=self.app, user=self.user,
body='abcd', ip_address='1.2.3.4')
new_data = {
'body': 'Totally rocking the free web.',
'rating': 4,
}
log_review_id = mkt.LOG.EDIT_REVIEW.id
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 0)
res, data = self._update(new_data)
eq_(res.status_code, 200)
eq_(data['body'], new_data['body'])
eq_(data['rating'], new_data['rating'])
rev.reload()
eq_(rev.body, new_data['body'])
eq_(rev.rating, new_data['rating'])
eq_(rev.user, self.user)
eq_(rev.ip_address, '1.2.3.4')
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 1)
def test_update_admin(self):
self.grant_permission(self.user, 'Apps:Edit')
rev = Review.objects.create(addon=self.app, user=self.user2,
body='abcd', ip_address='1.2.3.4')
new_data = {
'body': 'Edited by admin',
'rating': 1,
}
log_review_id = mkt.LOG.EDIT_REVIEW.id
res = self.client.put(reverse('ratings-detail', kwargs={'pk': rev.pk}),
json.dumps(new_data))
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(data['body'], new_data['body'])
eq_(data['rating'], new_data['rating'])
rev.reload()
eq_(rev.body, new_data['body'])
eq_(rev.rating, new_data['rating'])
eq_(rev.user, self.user2)
eq_(rev.ip_address, '1.2.3.4')
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 1)
def test_update_bad_data(self):
self._create_default_review()
res, data = self._update({'body': None})
eq_(400, res.status_code)
assert 'body' in data
def test_update_change_app(self):
_, previous_data = self._create_default_review()
self.app2 = app_factory()
new_data = {
'body': 'Totally rocking the free web.',
'rating': 4,
'app': self.app2.pk
}
res, data = self._update(new_data)
eq_(res.status_code, 200)
eq_(data['body'], new_data['body'])
eq_(data['rating'], new_data['rating'])
eq_(data['app'], previous_data['app'])
def test_update_comment_not_mine(self):
rev = Review.objects.create(addon=self.app, user=self.user2,
body='yes')
res = self.client.put(reverse('ratings-detail', kwargs={'pk': rev.pk}),
json.dumps({'body': 'no', 'rating': 1}))
eq_(res.status_code, 403)
rev.reload()
eq_(rev.body, 'yes')
def test_delete_app_mine(self):
AddonUser.objects.filter(addon=self.app).update(user=self.user)
rev = Review.objects.create(addon=self.app, user=self.user2,
body='yes')
url = reverse('ratings-detail', kwargs={'pk': rev.pk})
res = self.client.delete(url)
eq_(res.status_code, 204)
eq_(Review.objects.count(), 0)
log_review_id = mkt.LOG.DELETE_REVIEW.id
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 1)
def test_delete_comment_mine(self):
rev = Review.objects.create(addon=self.app, user=self.user, body='yes')
url = reverse('ratings-detail', kwargs={'pk': rev.pk})
res = self.client.delete(url)
eq_(res.status_code, 204)
eq_(Review.objects.count(), 0)
log_review_id = mkt.LOG.DELETE_REVIEW.id
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 1)
def test_delete_addons_admin(self):
self.grant_permission(self.user, 'Apps:Edit')
rev = Review.objects.create(addon=self.app, user=self.user2,
body='yes')
url = reverse('ratings-detail', kwargs={'pk': rev.pk})
res = self.client.delete(url)
eq_(res.status_code, 204)
eq_(Review.objects.count(), 0)
log_review_id = mkt.LOG.DELETE_REVIEW.id
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 1)
def test_delete_users_admin(self):
self.grant_permission(self.user, 'Users:Edit')
rev = Review.objects.create(addon=self.app, user=self.user2,
body='yes')
url = reverse('ratings-detail', kwargs={'pk': rev.pk})
res = self.client.delete(url)
eq_(res.status_code, 204)
eq_(Review.objects.count(), 0)
log_review_id = mkt.LOG.DELETE_REVIEW.id
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 1)
def test_delete_not_mine(self):
rev = Review.objects.create(addon=self.app, user=self.user2,
body='yes')
url = reverse('ratings-detail', kwargs={'pk': rev.pk})
self.app.authors.clear()
res = self.client.delete(url)
eq_(res.status_code, 403)
eq_(Review.objects.count(), 1)
log_review_id = mkt.LOG.DELETE_REVIEW.id
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 0)
def test_delete_not_there(self):
url = reverse('ratings-detail', kwargs={'pk': 123})
res = self.client.delete(url)
eq_(res.status_code, 404)
log_review_id = mkt.LOG.DELETE_REVIEW.id
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 0)
class TestRatingResourcePagination(RestOAuth, mkt.site.tests.MktPaths):
fixtures = fixture('user_2519', 'user_999', 'webapp_337141')
def setUp(self):
super(TestRatingResourcePagination, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.user = UserProfile.objects.get(pk=2519)
self.user2 = UserProfile.objects.get(pk=31337)
self.user3 = UserProfile.objects.get(pk=999)
self.url = reverse('ratings-list')
def test_pagination(self):
first_version = self.app.current_version
rev1 = Review.objects.create(addon=self.app, user=self.user,
version=first_version,
body=u'I häte this app',
rating=0)
rev2 = Review.objects.create(addon=self.app, user=self.user2,
version=first_version,
body=u'I lôve this app',
rating=5)
rev3 = Review.objects.create(addon=self.app, user=self.user3,
version=first_version,
body=u'Blurp.',
rating=3)
rev1.update(created=self.days_ago(3))
rev2.update(created=self.days_ago(2))
self.app.update(total_reviews=3)
res = self.client.get(self.url, {'app': self.app.pk, 'limit': 2})
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 2)
eq_(data['objects'][0]['body'], rev3.body)
eq_(data['objects'][1]['body'], rev2.body)
eq_(data['meta']['total_count'], 3)
eq_(data['meta']['limit'], 2)
eq_(data['meta']['previous'], None)
eq_(data['meta']['offset'], 0)
next = urlparse(data['meta']['next'])
eq_(next.path, self.url)
eq_(QueryDict(next.query).dict(),
{'app': str(self.app.pk), 'limit': '2', 'offset': '2'})
res = self.client.get(self.url,
{'app': self.app.pk, 'limit': 2, 'offset': 2})
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 1)
eq_(data['objects'][0]['body'], rev1.body)
eq_(data['meta']['total_count'], 3)
eq_(data['meta']['limit'], 2)
prev = urlparse(data['meta']['previous'])
eq_(next.path, self.url)
eq_(QueryDict(prev.query).dict(),
{'app': str(self.app.pk), 'limit': '2', 'offset': '0'})
eq_(data['meta']['offset'], 2)
eq_(data['meta']['next'], None)
def test_total_count(self):
Review.objects.create(addon=self.app, user=self.user,
version=self.app.current_version,
body=u'I häte this app',
rating=0)
self.app.update(total_reviews=42)
res = self.client.get(self.url)
data = json.loads(res.content)
# We are not passing an app, so the app's total_reviews isn't used.
eq_(data['meta']['total_count'], 1)
# With an app however, it should be used as the total count.
res = self.client.get(self.url, data={'app': self.app.pk})
data = json.loads(res.content)
eq_(data['meta']['total_count'], 42)
def test_pagination_invalid(self):
res = self.client.get(self.url, data={'offset': '%E2%98%83'})
eq_(res.status_code, 200)
class TestReviewFlagResource(RestOAuth, mkt.site.tests.MktPaths):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
super(TestReviewFlagResource, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.user = UserProfile.objects.get(pk=2519)
self.user2 = UserProfile.objects.get(pk=31337)
self.rating = Review.objects.create(addon=self.app,
user=self.user2, body='yes')
self.flag_url = reverse('ratings-flag', kwargs={'pk': self.rating.pk})
def test_has_cors(self):
self.assertCORS(self.client.post(self.flag_url), 'post')
def test_flag(self):
data = json.dumps({'flag': ReviewFlag.SPAM})
res = self.client.post(self.flag_url, data=data)
eq_(res.status_code, 201)
rf = ReviewFlag.objects.get(review=self.rating)
eq_(rf.user, self.user)
eq_(rf.flag, ReviewFlag.SPAM)
eq_(rf.note, '')
def test_flag_note(self):
note = 'do not want'
data = json.dumps({'flag': ReviewFlag.SPAM, 'note': note})
res = self.client.post(self.flag_url, data=data)
eq_(res.status_code, 201)
rf = ReviewFlag.objects.get(review=self.rating)
eq_(rf.user, self.user)
eq_(rf.flag, ReviewFlag.OTHER)
eq_(rf.note, note)
def test_flag_anon(self):
data = json.dumps({'flag': ReviewFlag.SPAM})
res = self.anon.post(self.flag_url, data=data)
eq_(res.status_code, 201)
rf = ReviewFlag.objects.get(review=self.rating)
eq_(rf.user, None)
eq_(rf.flag, ReviewFlag.SPAM)
eq_(rf.note, '')
def test_flag_conflict(self):
data = json.dumps({'flag': ReviewFlag.SPAM})
res = self.client.post(self.flag_url, data=data)
res = self.client.post(self.flag_url, data=data)
eq_(res.status_code, 409)<|fim▁end|> | rating=4)
# Extra review for another app, should be ignored. |
<|file_name|>thread_sched.hh<|end_file_name|><|fim▁begin|>/// @file core/thread_sched.hh
// Uniqos -- Unique Operating System
// (C) 2012-2015 KATO Takeshi
//
// Uniqos is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// any later version.
//
// Uniqos is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
#ifndef CORE_THREAD_SCHED_HH_
#define CORE_THREAD_SCHED_HH_
#include <core/thread.hh>
class cpu_node;
class mempool;
class thread_sched<|fim▁hole|>{
public:
thread_sched(cpu_node* _owner_cpu);
void init();
cause::pair<thread*> start();
cause::t attach_boot_thread(thread* t);
void attach(thread* t);
void detach(thread* t);
thread* sleep_current_thread_np();
void ready(thread* t);
void ready_np(thread* t);
thread* get_running_thread() { return running_thread; }
void set_running_thread(thread* t);
thread* switch_next_thread();
thread* exit_thread(thread* t);
void dump();
private:
void _ready(thread* t);
private:
cpu_node* const owner_cpu;
thread* running_thread;
spin_rwlock thread_state_lock;
typedef fchain<thread, &thread::thread_sched_chainnode> thread_chain;
thread_chain ready_queue;
thread_chain sleeping_queue;
};
#endif // include guard<|fim▁end|> | |
<|file_name|>PublishTest.java<|end_file_name|><|fim▁begin|>package org.buddycloud.channelserver.packetprocessor.iq.namespace.pubsub.set;
import java.util.ArrayList;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import junit.framework.Assert;
import org.buddycloud.channelserver.Configuration;
import org.buddycloud.channelserver.channel.ChannelManager;
import org.buddycloud.channelserver.channel.validate.AtomEntry;
import org.buddycloud.channelserver.db.exception.NodeStoreException;
import org.buddycloud.channelserver.packetHandler.iq.IQTestHandler;
import org.buddycloud.channelserver.packetprocessor.iq.namespace.pubsub.JabberPubsub;
import org.buddycloud.channelserver.pubsub.affiliation.Affiliations;
import org.buddycloud.channelserver.pubsub.model.NodeMembership;
import org.buddycloud.channelserver.pubsub.model.NodeSubscription;
import org.buddycloud.channelserver.pubsub.model.impl.GlobalItemIDImpl;
import org.buddycloud.channelserver.pubsub.model.impl.NodeItemImpl;
import org.buddycloud.channelserver.pubsub.model.impl.NodeMembershipImpl;
import org.buddycloud.channelserver.pubsub.model.impl.NodeSubscriptionImpl;
import org.buddycloud.channelserver.pubsub.subscription.Subscriptions;
import org.buddycloud.channelserver.utils.XMLConstants;
import org.dom4j.Element;
import org.dom4j.tree.BaseElement;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
import org.xmpp.packet.IQ;
import org.xmpp.packet.JID;
import org.xmpp.packet.Message;
import org.xmpp.packet.Packet;
import org.xmpp.packet.PacketError;
import org.xmpp.resultsetmanagement.ResultSetImpl;
public class PublishTest extends IQTestHandler {
private IQ request;
private ChannelManager channelManager;
private Publish publish;
private JID jid;
private Element element;
private BlockingQueue<Packet> queue = new LinkedBlockingQueue<Packet>();
private String node = "/user/[email protected]/posts";
private String server = "channels.shakespeare.lit";
private AtomEntry validateEntry;
private Element entry;
@Before
public void setUp() throws Exception {
channelManager = Mockito.mock(ChannelManager.class);
validateEntry = Mockito.mock(AtomEntry.class);
Configuration.getInstance().putProperty(Configuration.CONFIGURATION_LOCAL_DOMAIN_CHECKER, Boolean.TRUE.toString());
queue = new LinkedBlockingQueue<Packet>();
publish = new Publish(queue, channelManager);
jid = new JID("[email protected]/balcony");
request = readStanzaAsIq("/iq/pubsub/publish/request.stanza");
publish.setServerDomain("shakespeare.lit");
publish.setChannelManager(channelManager);
publish.setEntryValidator(validateEntry);
entry = request.getChildElement().element("publish").element("item").element("entry").createCopy();
element = new BaseElement("publish");
Mockito.when(channelManager.nodeExists(node)).thenReturn(true);
NodeMembership membership = new NodeMembershipImpl(node, jid, Subscriptions.subscribed, Affiliations.publisher, null);
Mockito.when(channelManager.getNodeMembership(Mockito.eq(node), Mockito.eq(jid))).thenReturn(membership);
Mockito.when(channelManager.getNodeSubscriptionListeners(Mockito.eq(node))).thenReturn(
new ResultSetImpl<NodeSubscription>(new ArrayList<NodeSubscription>()));
validateEntry.setPayload(request.getChildElement().element("publish").element("item").createCopy());
Mockito.when(validateEntry.getGlobalItemId()).thenReturn(
new GlobalItemIDImpl(new JID(request.getTo().toBareJID()), node, entry.elementText("id")).toString());
Mockito.when(validateEntry.getLocalItemId()).thenCallRealMethod();
Mockito.when(validateEntry.isValid()).thenReturn(true);
Mockito.when(validateEntry.getPayload()).thenReturn(entry);
}
@Test
public void passingRetractAsElementNameReturnsTrue() {
Element element = new BaseElement("publish");
Assert.assertTrue(publish.accept(element));
}
@Test
public void passingNotRetractAsElementNameReturnsFalse() {
Element element = new BaseElement("not-publish");
Assert.assertFalse(publish.accept(element));
}
@Test
public void passingNoNodeResultsInErrorStanza() throws Exception {
IQ request = this.request.createCopy();
request.getChildElement().element("publish").attribute("node").detach();
publish.process(element, jid, request, null);
Packet response = queue.poll();
PacketError error = response.getError();
Assert.assertNotNull(error);
Assert.assertEquals(PacketError.Type.modify, error.getType());
Assert.assertEquals(XMLConstants.NODE_ID_REQUIRED, error.getApplicationConditionName());
}
@Test
public void nodeStoreExceptionReturnsErrorStanza() throws Exception {
Mockito.doThrow(new NodeStoreException()).when(channelManager).nodeExists(Mockito.eq(node));
publish.process(element, jid, request, null);
Packet response = queue.poll();
PacketError error = response.getError();
Assert.assertNotNull(error);
Assert.assertEquals(PacketError.Condition.internal_server_error, error.getCondition());
Assert.assertEquals(PacketError.Type.wait, error.getType());
}
@Test
public void providingNodeWhichDoesntExistReturnsError() throws Exception {
Mockito.when(channelManager.nodeExists(node)).thenReturn(false);
publish.process(element, jid, request, null);
Packet response = queue.poll();
PacketError error = response.getError();
Assert.assertNotNull(error);
Assert.assertEquals(PacketError.Type.cancel, error.getType());
Assert.assertEquals(PacketError.Condition.item_not_found, error.getCondition());
}
@Test
public void requestToRemoteNodeResultsInForwardedPacket() throws Exception {
Configuration.getInstance().remove(Configuration.CONFIGURATION_LOCAL_DOMAIN_CHECKER);
Configuration.getInstance().putProperty(Configuration.CONFIGURATION_SERVER_DOMAIN, "shakespeare.lit");
Assert.assertEquals(new JID("channels.shakespeare.lit"), request.getTo());
request.getElement().element("pubsub").element("publish").addAttribute("node", "/user/[email protected]/posts");
publish.process(element, jid, request, null);
Assert.assertEquals(1, queue.size());
Packet response = queue.poll();
Assert.assertEquals(new JID("barracks.lit"), response.getTo());
}
@Test
public void unsubscribedUserCanNotPublish() throws Exception {
NodeMembership membership = new NodeMembershipImpl(node, jid, Subscriptions.none, Affiliations.publisher, null);
Mockito.when(channelManager.getNodeMembership(Mockito.eq(node), Mockito.eq(jid))).thenReturn(membership);
publish.process(element, jid, request, null);
Packet response = queue.poll();
PacketError error = response.getError();
Assert.assertNotNull(error);
Assert.assertEquals(PacketError.Type.auth, error.getType());
Assert.assertEquals(PacketError.Condition.forbidden, error.getCondition());
}
@Test
public void pendingSubscriptionCanNotPublish() throws Exception {
NodeMembership membership = new NodeMembershipImpl(node, jid, Subscriptions.pending, Affiliations.publisher, null);
Mockito.when(channelManager.getNodeMembership(Mockito.eq(node), Mockito.eq(jid))).thenReturn(membership);
publish.process(element, jid, request, null);
Packet response = queue.poll();
PacketError error = response.getError();
Assert.assertNotNull(error);
Assert.assertEquals(PacketError.Type.auth, error.getType());
Assert.assertEquals(PacketError.Condition.forbidden, error.getCondition());
}
@Test
public void noAffiliationCanNotPublish() throws Exception {
NodeMembership membership = new NodeMembershipImpl(node, jid, Subscriptions.subscribed, Affiliations.none, null);
Mockito.when(channelManager.getNodeMembership(Mockito.eq(node), Mockito.eq(jid))).thenReturn(membership);
publish.process(element, jid, request, null);
Packet response = queue.poll();
PacketError error = response.getError();
Assert.assertNotNull(error);
Assert.assertEquals(PacketError.Type.auth, error.getType());
Assert.assertEquals(PacketError.Condition.forbidden, error.getCondition());
}
@Test
public void memberAffiliationCanNotPublish() throws Exception {
NodeMembership membership = new NodeMembershipImpl(node, jid, Subscriptions.subscribed, Affiliations.member, null);
Mockito.when(channelManager.getNodeMembership(Mockito.eq(node), Mockito.eq(jid))).thenReturn(membership);
publish.process(element, jid, request, null);
Packet response = queue.poll();
PacketError error = response.getError();
Assert.assertNotNull(error);
Assert.assertEquals(PacketError.Type.auth, error.getType());
Assert.assertEquals(PacketError.Condition.forbidden, error.getCondition());
}
@Test
public void outcastAffiliationCanNotPublish() throws Exception {
NodeMembership membership = new NodeMembershipImpl(node, jid, Subscriptions.subscribed, Affiliations.outcast, null);
Mockito.when(channelManager.getNodeMembership(Mockito.eq(node), Mockito.eq(jid))).thenReturn(membership);
publish.process(element, jid, request, null);
Packet response = queue.poll();
PacketError error = response.getError();
Assert.assertNotNull(error);
Assert.assertEquals(PacketError.Type.auth, error.getType());
Assert.assertEquals(PacketError.Condition.forbidden, error.getCondition());
}
@Test
public void noItemElementReturnsError() throws Exception {
IQ request = this.request.createCopy();
request.getChildElement().element("publish").element("item").detach();
publish.process(element, jid, request, null);
Packet response = queue.poll();
PacketError error = response.getError();
Assert.assertNotNull(error);
Assert.assertEquals(PacketError.Type.modify, error.getType());
Assert.assertEquals(PacketError.Condition.bad_request, error.getCondition());
Assert.assertEquals(XMLConstants.ITEM_REQUIRED_ELEM, error.getApplicationConditionName());
}
@Test
public void invalidEntryReturnsError() throws Exception {
String errorMessage = "errorMessage";
Mockito.when(validateEntry.isValid()).thenReturn(false);
Mockito.when(validateEntry.getErrorMessage()).thenReturn(errorMessage);
publish.process(element, jid, request, null);
Packet response = queue.poll();
PacketError error = response.getError();
Assert.assertNotNull(error);
Assert.assertEquals(PacketError.Type.modify, error.getType());
Assert.assertEquals(PacketError.Condition.bad_request, error.getCondition());
Assert.assertEquals(errorMessage, error.getApplicationConditionName());
}
@Test
public void itemIsSavedAsExpected() throws Exception {
IQ request = this.request.createCopy();
publish.process(element, jid, request, null);
Mockito.verify(channelManager, Mockito.times(1)).addNodeItem(Mockito.any(NodeItemImpl.class));
}
@Test
public void expectedSuccessResponseReceived() throws Exception {
IQ request = this.request.createCopy();
publish.process(element, jid, request, null);
IQ response = (IQ) queue.poll();
Assert.assertEquals(IQ.Type.result, response.getType());
Assert.assertEquals(request.getFrom(), response.getTo());
Assert.assertEquals(request.getTo(), response.getFrom());
Element pubsub = response.getElement().element("pubsub");
Assert.assertEquals(JabberPubsub.NAMESPACE_URI, pubsub.getNamespaceURI());
Element publish = pubsub.element("publish");
Assert.assertEquals(node, publish.attributeValue("node"));
Element item = publish.element("item");
Assert.assertNotNull(item);<|fim▁hole|> Assert.assertTrue(GlobalItemIDImpl.isGlobalId(item.attributeValue("id")));
}
@Test
public void sendsOutExpectedNotifications() throws Exception {
NodeSubscription subscriber1 = new NodeSubscriptionImpl(node, new JID("[email protected]"), Subscriptions.subscribed, null);
// Expect not to see this user (subscription: 'pending')
NodeSubscription subscriber2 = new NodeSubscriptionImpl(node, new JID("[email protected]"), Subscriptions.pending, null);
NodeSubscription subscriber3 =
new NodeSubscriptionImpl(node, new JID("[email protected]"), new JID("channels.marlowe.lit"), Subscriptions.subscribed, null);
ArrayList<NodeSubscription> subscribers = new ArrayList<NodeSubscription>();
subscribers.add(subscriber1);
subscribers.add(subscriber2);
subscribers.add(subscriber3);
Mockito.when(channelManager.getNodeSubscriptionListeners(Mockito.eq(node))).thenReturn(new ResultSetImpl<NodeSubscription>(subscribers));
IQ request = this.request.createCopy();
publish.process(element, jid, request, null);
Assert.assertEquals(5, queue.size());
queue.poll();
Message notification = (Message) queue.poll();
Assert.assertEquals(Message.Type.headline, notification.getType());
Assert.assertEquals(subscriber1.getUser(), notification.getTo());
Assert.assertEquals(server, notification.getFrom().toString());
Element event = notification.getElement().element("event");
Assert.assertEquals(JabberPubsub.NS_PUBSUB_EVENT, event.getNamespaceURI());
Element items = event.element("items");
Assert.assertEquals(node, items.attributeValue("node"));
Element item = items.element("item");
Assert.assertTrue(item.attributeValue("id").length() > 0);
Assert.assertTrue(GlobalItemIDImpl.isGlobalId(item.attributeValue("id")));
Element responseEntry = item.element("entry");
Assert.assertEquals(entry.asXML(), responseEntry.asXML());
notification = (Message) queue.poll();
Assert.assertEquals(subscriber3.getListener(), notification.getTo());
notification = (Message) queue.poll();
Assert.assertEquals(new JID("user1@server1"), notification.getTo());
notification = (Message) queue.poll();
Assert.assertEquals(new JID("user2@server1"), notification.getTo());
}
@Test
public void inReplyToIdIsSavedToDatabase() throws Exception {
IQ request = readStanzaAsIq("/iq/pubsub/publish/reply.stanza");
Mockito.when(validateEntry.getPayload()).thenReturn(request.getChildElement().element("publish").element("item").element("entry"));
Mockito.when(validateEntry.getInReplyTo()).thenReturn(
GlobalItemIDImpl.toLocalId(request.getChildElement().element("publish").element("item").element("entry").element("in-reply-to")
.attributeValue("ref")));
publish.process(element, jid, request, null);
Assert.assertEquals(IQ.Type.result, ((IQ) queue.poll()).getType());
ArgumentCaptor<NodeItemImpl> argument = ArgumentCaptor.forClass(NodeItemImpl.class);
Mockito.verify(channelManager, Mockito.times(1)).addNodeItem(argument.capture());
Assert.assertEquals("fc362eb42085f017ed9ccd9c4004b095", argument.getValue().getInReplyTo());
Assert.assertEquals(node, argument.getValue().getNodeId());
}
@Test
public void replyUpdatesThreadParentDate() throws Exception {
IQ request = readStanzaAsIq("/iq/pubsub/publish/reply.stanza");
Mockito.when(validateEntry.getPayload()).thenReturn(request.getChildElement().element("publish").element("item").element("entry"));
Mockito.when(validateEntry.getInReplyTo()).thenReturn("fc362eb42085f017ed9ccd9c4004b095");
publish.process(element, jid, request, null);
Assert.assertEquals(IQ.Type.result, ((IQ) queue.poll()).getType());
ArgumentCaptor<String> inReplyTo = ArgumentCaptor.forClass(String.class);
ArgumentCaptor<String> passedNode = ArgumentCaptor.forClass(String.class);
Mockito.verify(channelManager, Mockito.times(1)).updateThreadParent(passedNode.capture(), inReplyTo.capture());
Assert.assertEquals("fc362eb42085f017ed9ccd9c4004b095", inReplyTo.getValue());
Assert.assertEquals(node, passedNode.getValue());
}
@Test
public void doesNotUpdateParentThreadIfNotReply() throws Exception {
IQ request = this.request.createCopy();
publish.process(element, jid, request, null);
Mockito.verify(channelManager, Mockito.times(0)).updateThreadParent(Mockito.anyString(), Mockito.anyString());
}
}<|fim▁end|> |
Assert.assertTrue(item.attributeValue("id").length() > 0); |
<|file_name|>linalg_grad.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Gradients for operators defined in linalg_ops.py.
Useful reference for derivative formulas is
An extended collection of matrix derivative results for forward and reverse
mode algorithmic differentiation by Mike Giles:
http://eprints.maths.ox.ac.uk/1079/1/NA-08-01.pdf
A detailed derivation of formulas for backpropagating through spectral layers
(SVD and Eig) by Ionescu, Vantzos & Sminchisescu:
https://arxiv.org/pdf/1509.07838v4.pdf
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.linalg import linalg_impl as _linalg
@ops.RegisterGradient("MatrixInverse")
def _MatrixInverseGrad(op, grad):
"""Gradient for MatrixInverse."""
ainv = op.outputs[0]
return -math_ops.matmul(
ainv, math_ops.matmul(grad, ainv, adjoint_b=True), adjoint_a=True)
@ops.RegisterGradient("MatrixDeterminant")
def _MatrixDeterminantGrad(op, grad):
"""Gradient for MatrixDeterminant."""
a = op.inputs[0]
c = op.outputs[0]
a_adj_inv = linalg_ops.matrix_inverse(a, adjoint=True)
multipliers = array_ops.reshape(grad * c,
array_ops.concat([array_ops.shape(c), [1, 1]],
0))
return multipliers * a_adj_inv
@ops.RegisterGradient("Cholesky")
def _CholeskyGrad(op, grad):
"""Gradient for Cholesky."""
# Gradient is l^{-H} @ ((l^{H} @ grad) * (tril(ones)-1/2*eye)) @ l^{-1}
l = op.outputs[0]
num_rows = array_ops.shape(l)[-1]
batch_shape = array_ops.shape(l)[:-2]
l_inverse = linalg_ops.matrix_triangular_solve(l,
linalg_ops.eye(
num_rows,
batch_shape=batch_shape,
dtype=l.dtype))
middle = math_ops.matmul(l, grad, adjoint_a=True)
middle = array_ops.matrix_set_diag(middle,
0.5 * array_ops.matrix_diag_part(middle))
middle = array_ops.matrix_band_part(middle, -1, 0)
grad_a = math_ops.matmul(
math_ops.matmul(l_inverse, middle, adjoint_a=True), l_inverse)
grad_a += _linalg.adjoint(grad_a)
return grad_a * 0.5
@ops.RegisterGradient("Qr")
def _QrGrad(op, dq, dr):
"""Gradient for Qr."""
q, r = op.outputs
if q.dtype.is_complex:
raise NotImplementedError("QrGrad not implemented for dtype: %s" % q.dtype)
if (r.shape.ndims is None or r.shape.as_list()[-2] is None or
r.shape.as_list()[-1] is None):
raise NotImplementedError("QrGrad not implemented with dynamic shapes.")
if r.shape[-2].value != r.shape[-1].value:
raise NotImplementedError("QrGrad not implemented when ncols > nrows "
"or full_matrices is true and ncols != nrows.")
qdq = math_ops.matmul(q, dq, adjoint_a=True)
qdq_ = qdq - _linalg.adjoint(qdq)
rdr = math_ops.matmul(r, dr, adjoint_b=True)
rdr_ = rdr - _linalg.adjoint(rdr)
tril = array_ops.matrix_band_part(qdq_ + rdr_, -1, 0)
def _TriangularSolve(x, r):
"""Equiv to matmul(x, adjoint(matrix_inverse(r))) if r is upper-tri."""
return _linalg.adjoint(
linalg_ops.matrix_triangular_solve(
r, _linalg.adjoint(x), lower=False, adjoint=False))
grad_a = math_ops.matmul(q, dr + _TriangularSolve(tril, r))
grad_b = _TriangularSolve(dq - math_ops.matmul(q, qdq), r)
return grad_a + grad_b
@ops.RegisterGradient("MatrixSolve")
def _MatrixSolveGrad(op, grad):
"""Gradient for MatrixSolve."""
a = op.inputs[0]
adjoint_a = op.get_attr("adjoint")
c = op.outputs[0]
grad_b = linalg_ops.matrix_solve(a, grad, adjoint=not adjoint_a)
if adjoint_a:
grad_a = -math_ops.matmul(c, grad_b, adjoint_b=True)
else:
grad_a = -math_ops.matmul(grad_b, c, adjoint_b=True)
return (grad_a, grad_b)
@ops.RegisterGradient("MatrixSolveLs")
def _MatrixSolveLsGrad(op, grad):
"""Gradients for MatrixSolveLs."""
# TODO(rmlarsen): The implementation could be more efficient:
# a) Output the Cholesky factorization from forward op instead of
# recomputing it here.
# b) Implement a symmetric rank-k update op instead of computing
# x*z + transpose(x*z). This pattern occurs other places in TensorFlow.
def _Overdetermined(op, grad):
"""Gradients for the overdetermined case of MatrixSolveLs.
This is the backprop for the solution to the normal equations of the first
kind:
X = F(A, B) = (A^T * A + lambda * I)^{-1} * A^T * B
which solve the least squares problem
min ||A * X - B||_F^2 + lambda ||X||_F^2.
"""
a = op.inputs[0]
b = op.inputs[1]
x = op.outputs[0]
l2_regularizer = math_ops.cast(op.inputs[2], a.dtype.base_dtype)
# pylint: disable=protected-access
chol = linalg_ops._RegularizedGramianCholesky(
a, l2_regularizer=l2_regularizer, first_kind=True)
# pylint: enable=protected-access
# Temporary z = (A^T * A + lambda * I)^{-1} * grad.
z = linalg_ops.cholesky_solve(chol, grad)
xzt = math_ops.matmul(x, z, adjoint_b=True)
zx_sym = xzt + array_ops.matrix_transpose(xzt)
grad_a = -math_ops.matmul(a, zx_sym) + math_ops.matmul(b, z, adjoint_b=True)
grad_b = math_ops.matmul(a, z)
return (grad_a, grad_b, None)
def _Underdetermined(op, grad):
"""Gradients for the underdetermined case of MatrixSolveLs.
This is the backprop for the solution to the normal equations of the second
kind:
X = F(A, B) = A * (A*A^T + lambda*I)^{-1} * B
that (for lambda=0) solve the least squares problem
min ||X||_F subject to A*X = B.
"""
a = op.inputs[0]
b = op.inputs[1]
l2_regularizer = math_ops.cast(op.inputs[2], a.dtype.base_dtype)
# pylint: disable=protected-access
chol = linalg_ops._RegularizedGramianCholesky(
a, l2_regularizer=l2_regularizer, first_kind=False)
# pylint: enable=protected-access
grad_b = linalg_ops.cholesky_solve(chol, math_ops.matmul(a, grad))<|fim▁hole|> a2 = grad - math_ops.matmul(a, grad_b, adjoint_a=True)
a2 = math_ops.matmul(tmp, a2, adjoint_b=True)
grad_a = a1 + a2
return (grad_a, grad_b, None)
fast = op.get_attr("fast")
if fast is False:
raise ValueError("Gradient not defined for fast=False")
matrix_shape = op.inputs[0].get_shape()[-2:]
if matrix_shape.is_fully_defined():
if matrix_shape[-2] >= matrix_shape[-1]:
return _Overdetermined(op, grad)
else:
return _Underdetermined(op, grad)
else:
# We have to defer determining the shape to runtime and use
# conditional execution of the appropriate graph.
matrix_shape = array_ops.shape(op.inputs[0])[-2:]
return control_flow_ops.cond(matrix_shape[-2] >= matrix_shape[-1],
lambda: _Overdetermined(op, grad),
lambda: _Underdetermined(op, grad))
@ops.RegisterGradient("MatrixTriangularSolve")
def _MatrixTriangularSolveGrad(op, grad):
"""Gradient for MatrixTriangularSolve."""
a = op.inputs[0]
adjoint_a = op.get_attr("adjoint")
lower_a = op.get_attr("lower")
c = op.outputs[0]
grad_b = linalg_ops.matrix_triangular_solve(
a, grad, lower=lower_a, adjoint=not adjoint_a)
if adjoint_a:
grad_a = -math_ops.matmul(c, grad_b, adjoint_b=True)
else:
grad_a = -math_ops.matmul(grad_b, c, adjoint_b=True)
if lower_a:
grad_a = array_ops.matrix_band_part(grad_a, -1, 0)
else:
grad_a = array_ops.matrix_band_part(grad_a, 0, -1)
return (grad_a, grad_b)
@ops.RegisterGradient("SelfAdjointEigV2")
def _SelfAdjointEigV2Grad(op, grad_e, grad_v):
"""Gradient for SelfAdjointEigV2."""
e = op.outputs[0]
compute_v = op.get_attr("compute_v")
# a = op.inputs[0], which satisfies
# a[...,:,:] * v[...,:,i] = e[...,i] * v[...,i]
with ops.control_dependencies([grad_e, grad_v]):
if compute_v:
v = op.outputs[1]
# Construct the matrix f(i,j) = (i != j ? 1 / (e_i - e_j) : 0).
# Notice that because of the term involving f, the gradient becomes
# infinite (or NaN in practice) when eigenvalues are not unique.
# Mathematically this should not be surprising, since for (k-fold)
# degenerate eigenvalues, the corresponding eigenvectors are only defined
# up to arbitrary rotation in a (k-dimensional) subspace.
f = array_ops.matrix_set_diag(
math_ops.reciprocal(
array_ops.expand_dims(e, -2) - array_ops.expand_dims(e, -1)),
array_ops.zeros_like(e))
grad_a = math_ops.matmul(
v,
math_ops.matmul(
array_ops.matrix_diag(grad_e) +
f * math_ops.matmul(v, grad_v, adjoint_a=True),
v,
adjoint_b=True))
else:
_, v = linalg_ops.self_adjoint_eig(op.inputs[0])
grad_a = math_ops.matmul(v,
math_ops.matmul(
array_ops.matrix_diag(grad_e),
v,
adjoint_b=True))
# The forward op only depends on the lower triangular part of a, so here we
# symmetrize and take the lower triangle
grad_a = array_ops.matrix_band_part(grad_a + _linalg.adjoint(grad_a), -1, 0)
grad_a = array_ops.matrix_set_diag(grad_a,
0.5 * array_ops.matrix_diag_part(grad_a))
return grad_a
@ops.RegisterGradient("Svd")
def _SvdGrad(op, grad_s, grad_u, grad_v):
"""Gradient for the singular value decomposition."""
# The derivation for the compute_uv=False case, and most of
# the derivation for the full_matrices=True case, are in
# Giles' paper (see reference at top of file). A derivation for
# the full_matrices=False case is available at
# https://j-towns.github.io/papers/svd-derivative.pdf
a = op.inputs[0]
a_shape = a.get_shape().with_rank_at_least(2)
grad_s_mat = array_ops.matrix_diag(grad_s)
if not op.get_attr("compute_uv"):
s, u, v = linalg_ops.svd(a, compute_uv=True)
grad_a = math_ops.matmul(u, math_ops.matmul(grad_s_mat, v, adjoint_b=True))
grad_a.set_shape(a_shape)
return grad_a
full_matrices = op.get_attr("full_matrices")
# TODO(rmlarsen): Make this work with complex types.
if a.dtype.is_complex:
raise NotImplementedError(
"SVD gradient is not implemented for complex types and "
"compute_uv=True.")
grad_u_shape = grad_u.get_shape().with_rank_at_least(2)
grad_v_shape = grad_v.get_shape().with_rank_at_least(2)
m = a_shape[-2].merge_with(grad_u_shape[-2])
n = a_shape[-1].merge_with(grad_v_shape[-2])
batch_shape = a_shape[:-2].merge_with(grad_u_shape[:-2]).merge_with(
grad_v_shape[:-2])
a_shape = batch_shape.concatenate([m, n])
m = a_shape[-2].value
n = a_shape[-1].value
# TODO(rmlarsen): Make this work with placeholders.
if m is None or n is None:
raise NotImplementedError(
"SVD gradient has not been implemented for input with unknown "
"inner matrix shape.")
s = op.outputs[0]
u = op.outputs[1]
v = op.outputs[2]
use_adjoint = False
if m > n:
# Compute the gradient for A^H = V * S^T * U^H, and (implicitly) take the
# Hermitian transpose of the gradient at the end.
use_adjoint = True
m, n = n, m
u, v = v, u
grad_u, grad_v = grad_v, grad_u
with ops.control_dependencies([grad_s, grad_u, grad_v]):
if full_matrices and abs(m - n) > 1:
raise NotImplementedError(
"svd gradient is not implemented for abs(m - n) > 1 "
"when full_matrices is True")
s_mat = array_ops.matrix_diag(s)
s2 = math_ops.square(s)
# NOTICE: Because of the term involving f, the gradient becomes
# infinite (or NaN in practice) when singular values are not unique.
# Mathematically this should not be surprising, since for (k-fold)
# degenerate singular values, the corresponding singular vectors are
# only defined up a (k-dimensional) subspace. In practice, this can
# lead to numerical instability when singular values are close but not
# exactly equal.
f = array_ops.matrix_set_diag(
math_ops.reciprocal(
array_ops.expand_dims(s2, -2) - array_ops.expand_dims(s2, -1)),
array_ops.zeros_like(s))
s_inv_mat = array_ops.matrix_diag(math_ops.reciprocal(s))
v1 = v[..., :, :m]
grad_v1 = grad_v[..., :, :m]
u_gu = math_ops.matmul(u, grad_u, adjoint_a=True)
v_gv = math_ops.matmul(v1, grad_v1, adjoint_a=True)
f_u = f * u_gu
f_v = f * v_gv
term1_nouv = (
grad_s_mat + math_ops.matmul(f_u + _linalg.adjoint(f_u), s_mat) +
math_ops.matmul(s_mat, f_v + _linalg.adjoint(f_v)))
term1 = math_ops.matmul(u, math_ops.matmul(term1_nouv, v1, adjoint_b=True))
if m == n:
grad_a_before_transpose = term1
else:
gv1t = array_ops.matrix_transpose(grad_v1)
gv1t_v1 = math_ops.matmul(gv1t, v1)
term2_nous = gv1t - math_ops.matmul(gv1t_v1, v1, adjoint_b=True)
if full_matrices:
v2 = v[..., :, m:n]
grad_v2 = grad_v[..., :, m:n]
v1t_gv2 = math_ops.matmul(v1, grad_v2, adjoint_a=True)
term2_nous -= math_ops.matmul(v1t_gv2, v2, adjoint_b=True)
u_s_inv = math_ops.matmul(u, s_inv_mat)
term2 = math_ops.matmul(u_s_inv, term2_nous)
grad_a_before_transpose = term1 + term2
if use_adjoint:
grad_a = array_ops.matrix_transpose(grad_a_before_transpose)
else:
grad_a = grad_a_before_transpose
grad_a.set_shape(a_shape)
return grad_a<|fim▁end|> | # Temporary tmp = (A * A^T + lambda * I)^{-1} * B.
tmp = linalg_ops.cholesky_solve(chol, b)
a1 = math_ops.matmul(tmp, a, adjoint_a=True)
a1 = -math_ops.matmul(grad_b, a1) |
<|file_name|>iced_error.rs<|end_file_name|><|fim▁begin|>// SPDX-License-Identifier: MIT
// Copyright [email protected]
// Copyright iced contributors
use alloc::borrow::Cow;
use alloc::string::String;
use core::fmt;
#[cfg(feature = "std")]
use std::error::Error;
/// iced error
#[derive(Debug, Clone)]
pub struct IcedError {
error: Cow<'static, str>,
}
impl IcedError {
#[allow(dead_code)]
pub(crate) fn new(error: &'static str) -> Self {
Self { error: error.into() }<|fim▁hole|>
#[allow(dead_code)]
pub(crate) fn with_string(error: String) -> Self {
Self { error: error.into() }
}
}
#[cfg(feature = "std")]
impl Error for IcedError {
// Required since MSRV < 1.42.0
#[allow(clippy::missing_inline_in_public_items)]
fn description(&self) -> &str {
&self.error
}
}
impl fmt::Display for IcedError {
#[allow(clippy::missing_inline_in_public_items)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", &self.error)
}
}<|fim▁end|> | } |
<|file_name|>scrape_google_scholar_from_bing.py<|end_file_name|><|fim▁begin|>import requests
from urllib.parse import parse_qs, urlparse
from lxml.html import fromstring
_HEADERS = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/41.0.2272.76 Chrome/41.0.2272.76 Safari/537.36',
'accept': 'text/html,application/xhtml+xml,application/xml'
}
# get results from search
query = {"q": "site:scholar.google.com \"From Mechanism to Mouse\" "}
url = "https://cn.bing.com/search"
html = requests.get(url, headers=_HEADERS, params=query)
print(html.request.headers)
print(html.url)
print(html.content)
tree = fromstring(html.content)
results = tree.xpath(".//*[@id='b_results']/li/div[1]/h2/a")
print(len(results))
# grab the first link
link = results[0].get('href')<|fim▁hole|># parse the destination url from the querystring
qs = urlparse(link).query
parsed_qs = parse_qs(qs)
print(parsed_qs)
print(parsed_qs.get('user', []))
# as one list
links = []
for result in results:
link = result.get('href')
qs = urlparse(link).query
links.extend(parse_qs(qs).get('user', []))
print(links)<|fim▁end|> | print(link)
|
<|file_name|>regions-free-region-ordering-caller.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license<|fim▁hole|>// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test various ways to construct a pointer with a longer lifetime
// than the thing it points at and ensure that they result in
// errors. See also regions-free-region-ordering-callee.rs
struct Paramd<'self> { x: &'self uint }
fn call1<'a>(x: &'a uint) {
let y: uint = 3;
let z: &'a &'blk uint = &(&y);
//~^ ERROR pointer has a longer lifetime than the data it references
}
fn call2<'a, 'b>(a: &'a uint, b: &'b uint) {
let z: Option<&'b &'a uint> = None;
//~^ ERROR pointer has a longer lifetime than the data it references
}
fn call3<'a, 'b>(a: &'a uint, b: &'b uint) {
let y: Paramd<'a> = Paramd { x: a };
let z: Option<&'b Paramd<'a>> = None;
//~^ ERROR pointer has a longer lifetime than the data it references
}
fn call4<'a, 'b>(a: &'a uint, b: &'b uint) {
let z: Option<&fn(&'a &'b uint)> = None;
//~^ ERROR pointer has a longer lifetime than the data it references
}
fn main() {}<|fim▁end|> | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your |
<|file_name|>advance-tables-widget4.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-advance-tables-widget4',
<|fim▁hole|>
ngOnInit(): void {}
}<|fim▁end|> | templateUrl: './advance-tables-widget4.component.html',
})
export class AdvanceTablesWidget4Component implements OnInit {
constructor() {}
|
<|file_name|>country_page_data.rs<|end_file_name|><|fim▁begin|>use super::country::Country;
use super::year::Year;
/// `CountryPageData`
/// data actually sent to page
#[derive(Debug, Clone, RustcEncodable)]
pub struct CountryPageData {
pub found: bool,
pub name: String,
pub link: String,
pub total_eve: i32,
pub years: Vec<Year>,
}
impl CountryPageData {
pub fn new(t_name: String) -> CountryPageData {<|fim▁hole|> link: t_name.replace(" ", ""),
total_eve: 0,
years: Vec::new(),
}
}
pub fn from_country(ctry: Country) -> CountryPageData {
CountryPageData::new(ctry.name)
}
}<|fim▁end|> | CountryPageData {
found: true,
name: t_name.clone(), |
<|file_name|>development.py<|end_file_name|><|fim▁begin|>from .base import BASE_DIR, INSTALLED_APPS, MIDDLEWARE_CLASSES, REST_FRAMEWORK
DEBUG = True
ALLOWED_HOSTS = ['127.0.0.1']
SECRET_KEY = 'secret'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'holonet',
'USER': 'holonet',
'PASSWORD': '',
'HOST': '127.0.0.1',
'PORT': '',
}
}
CACHES = {
'default': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': 'redis://127.0.0.1:6379/0',
}
}
EMAIL_BACKEND = 'django.utils.mail.backends.console.EmailBackend'
BROKER_URL = 'redis://127.0.0.1'
ELASTICSEARCH = {<|fim▁hole|> ]
}
}
REST_FRAMEWORK['DEFAULT_RENDERER_CLASSES'] += ['rest_framework.renderers.BrowsableAPIRenderer']
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = ('127.0.0.1', )
POSTFIX_TRANSPORT_MAPS_LOCATION = '{0}/../mta/shared/'.format(BASE_DIR)<|fim▁end|> | 'default': {
'hosts': [
'127.0.0.1:9200' |
<|file_name|>redgem.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
.. module:: redgem
:platform: Unix, Windows
:synopsis: RedGEM Algorithm
.. moduleauthor:: pyTFA team
Model class
"""
from pytfa.redgem.network_expansion import NetworkExpansion
from pytfa.redgem.lumpgem import LumpGEM
from cobra import Reaction
from .utils import remove_blocked_reactions, set_medium
import yaml
class RedGEM():
def __init__(self, gem, parameters_path, inplace=False):
self.read_parameters(parameters_path)
# If inplace is True, no deepcopy is performed : the modifications are applied directly onto the gem
prepared_gem = set_medium(gem, self.params['medium'], inplace)
self._gem = prepared_gem
# This one is used to perform the lumping
self._source_gem = prepared_gem.copy()
self.logger = self._gem.logger
self.fill_default_params()
self.set_solver()
def read_parameters(self, parameters_path):
with open(parameters_path, 'r') as stream:
try:
self.params = yaml.safe_load(stream)
print("Opened parameters file")
except yaml.YAMLError as exc:
print(exc)
def fill_default_params(self):
# If auto is activated, automatically extracts inorganics from the gem
if "inorganic" not in self.params or self.params["inorganics"] == "auto":
self.logger.info("Automatically computing inorganics to use")
self.params["inorganics"] = self._extract_inorganics()
if "growth_rate" not in self.params or self.params["growth_rate"] == "auto":
self.logger.info("Setting minimal growth rate to 95% of the TFA solution")
obj_val = self._source_gem.slim_optimize()
self.logger.info("Setting minimal growth rate to {}".format(obj_val))
self.params["growth_rate"] = 0.95*obj_val
if "force_solve" not in self.params:
self.params["force_solve"] = False
if "timeout" not in self.params:
self.logger.info("Using default timeout : 3600s")
self.params["timeout"] = 3600
if "feasibility" not in self.params:
self.logger.info("Using default solver feasibility : 1e-9")
self.params["feasibility"] = 1e-9
else:
# numbers like 1e-9 are detected as strings by yaml module
# to enable their use, we cast them into floats
try:
self.params["feasibility"] = float(self.params["feasibility"])
except ValueError as v:
self.logger.error(v)
def set_solver(self):
if "solver" not in self.params or self.params["solver"].lower() == "auto":
return None
elif 'gurobi' in self.params["solver"].lower():
solver = 'gurobi'
elif 'cplex' in self.params["solver"].lower():
solver = 'cplex'
elif 'glpk' in self.params["solver"].lower():
solver = 'glpk'
else:
solver = self.params["solver"]
self._gem.solver = solver
self._source_gem.solver = solver
def run(self):
# Extracting parameters
core_subsystems = self.params["core_subsystems"]
extracellular_system = self.params["extracellular_system"]
biomass_rxn_ids = self.params["biomass_rxns"]
biomass_rxns = [self._gem.reactions.get_by_id(x) for x in biomass_rxn_ids]
main_bio_rxn = biomass_rxns[0]
growth_rate = self.params["growth_rate"]
small_metabolites = self.params["small_metabolites"]
cofactor_pairs = self.params["cofactor_pairs"]
# Flatten cofactor_pairs list
cofactors = [cofactor for pair in cofactor_pairs for cofactor in pair]
inorganics = self.params["inorganics"]
d = self.params["d"]
n = self.params["n"]
lump_method = self.params["lump_method"]
force_solve = self.params["force_solve"]
timeout = self.params["timeout"]
try:
self._gem.solver.configuration.tolerances.feasibility = self.params["feasibility"]
self._gem.solver.configuration.tolerances.integrality = self.params["feasibility"]
except AttributeError as e:
self.logger.error('Solver {} is not compatible with tolerance parameters'.format(self._gem.solver))
try:
self._source_gem.solver.configuration.tolerances.feasibility = self.params["feasibility"]
self._source_gem.solver.configuration.tolerances.integrality = self.params["feasibility"]
except AttributeError as e:
self.logger.error('Solver {} is not compatible with tolerance parameters'.format(self._source_gem.solver))
self.logger.info("Computing network expansion...")
expander = NetworkExpansion(self._gem, core_subsystems, extracellular_system,
cofactors, small_metabolites, inorganics,
d, n)
reduced_gem = expander.run()
self.logger.info("Done.")
# Add the expansion to core reactions
core_reactions = reduced_gem.reactions
self.logger.info("Computing lumps...")
lumper = LumpGEM(self._source_gem, core_reactions, self.params)
lumps = lumper.compute_lumps(force_solve, method = lump_method)
self.logger.info("Done.")
self.logger.info("Create final network...")
to_add = [x for x in biomass_rxns
+lumper._exchanges
+lumper._transports
+lumper._rcore
if not x.id in reduced_gem.reactions]
reduced_gem.add_reactions(to_add)
for rxns in lumps.values():
the_lumps = [add_lump(reduced_gem,rxn,id_suffix='_{}'.format(e))
for e,rxn in enumerate(rxns)]
# reduced_gem.add_reactions(rxns)
self.logger.info("Done.")
reduced_gem.objective = main_bio_rxn
reduced_gem.reactions.get_by_id(main_bio_rxn.id).lower_bound = growth_rate
if self.params['remove_blocked_reactions']:
self.logger.info('Detecting blocked reactions')
# Remove blocked reactions
nrxn_1 = len(reduced_gem.reactions)<|fim▁hole|> self.removed_reactions = remove_blocked_reactions(reduced_gem)
nrxn_2 = len(reduced_gem.reactions)
self.logger.info('Removed {} blocked reaction with '
'FVA post-processing'.format(nrxn_1-nrxn_2))
if main_bio_rxn.id not in reduced_gem.reactions:
raise RuntimeError('Main Biomass reaction appears blocked')
# For debugging purposes
self.lumper = lumper
main_bio_rxn.lower_bound = 0
return reduced_gem
def _extract_inorganics(self):
"""
Extract inorganics from self._gem based on their formula
:return: list of inorganics metabolites
"""
inorganics = []
for met in self._gem.metabolites:
if not met.elements == {}: # Edge case
# met is inorganic if it has 0 carbon in its formula
if (not 'C' in met.elements) or met.elements['C'] <= 0:
inorganics.append(met.id)
return inorganics
def add_lump(model, lump_object, id_suffix=''):
new = Reaction(id = lump_object.id_+id_suffix)
model.add_reaction(new)
new.add_metabolites(lump_object.metabolites)
new.gene_reaction_rule = lump_object.gene_reaction_rule
new.subnetwork = lump_object.subnetwork
return new<|fim▁end|> | |
<|file_name|>useless_chaining.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 Chao Lin & William Sergeant (Sorbonne University)
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
<|fim▁hole|>use oak::oak;
oak! {
// test1 = !(!"a") // &"a"
// test2 = &(&"a") // &"a"
// test3 = !(&"a") // !"a"
// test4 = &(!"a") // !"a"
// //test5 = ("a"*)* // infinite loop -> already detected by WFA
// test6 = ("a"+)+ // "a"+
// test7 = ("a"+)* // "a"+
// //test8 = ("a"*)+ // infinite loop -> already detected by WFA
//
test9 = !"a"
// test10 = !test9
//
// test11 = &"a"
// test12 = &test11
//
// test13 = !test11
//
// test14 = &test9
//
// test15 = "a"+
// test16 = test15+
//
// test17 = test15*
//
// test18 = &test12
// test19 = test16+
//
// test20 = ((("a")+)+)+
// test21 = &(&(&(&("a"))))
//
// test22 = &"a" / !"b"
// test23 = &test22
//
// test24 = &"a" "b"
// test25 = &test24
//
// test26 = &"a" / &"b"
// test27 = &test26
//
// test28 = &"a" / "b"
// test29 = &test28
//
// test30 = &"a" &(!"b")
// test31 = &test30
//
// test32 = &(&"a") / &(&"b")
// test33 = &test32
}<|fim▁end|> | |
<|file_name|>harfbuzz.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![allow(unsafe_code)]
use app_units::Au;
use euclid::Point2D;
use font::{ShapingFlags, Font, FontTableMethods, FontTableTag, ShapingOptions, KERN};
use harfbuzz::{HB_DIRECTION_LTR, HB_DIRECTION_RTL, HB_MEMORY_MODE_READONLY};
use harfbuzz::{hb_blob_create, hb_face_create_for_tables};
use harfbuzz::{hb_buffer_create, hb_font_destroy};
use harfbuzz::{hb_buffer_get_glyph_infos, hb_shape};
use harfbuzz::{hb_buffer_set_direction, hb_buffer_set_script};
use harfbuzz::{hb_buffer_t, hb_codepoint_t, hb_font_funcs_t};
use harfbuzz::{hb_face_t, hb_font_t};
use harfbuzz::{hb_position_t, hb_tag_t};
use harfbuzz::hb_blob_t;
use harfbuzz::hb_bool_t;
use harfbuzz::hb_buffer_add_utf8;
use harfbuzz::hb_buffer_destroy;
use harfbuzz::hb_buffer_get_glyph_positions;
use harfbuzz::hb_buffer_get_length;
use harfbuzz::hb_face_destroy;
use harfbuzz::hb_feature_t;
use harfbuzz::hb_font_create;
use harfbuzz::hb_font_funcs_create;
use harfbuzz::hb_font_funcs_set_glyph_h_advance_func;
use harfbuzz::hb_font_funcs_set_glyph_h_kerning_func;
use harfbuzz::hb_font_funcs_set_nominal_glyph_func;
use harfbuzz::hb_font_set_funcs;
use harfbuzz::hb_font_set_ppem;
use harfbuzz::hb_font_set_scale;
use harfbuzz::hb_glyph_info_t;
use harfbuzz::hb_glyph_position_t;
use platform::font::FontTable;
use std::{char, cmp, ptr};
use std::os::raw::{c_char, c_int, c_uint, c_void};
use text::glyph::{ByteIndex, GlyphData, GlyphId, GlyphStore};
use text::shaping::ShaperMethods;
use text::util::{fixed_to_float, float_to_fixed, is_bidi_control};
const NO_GLYPH: i32 = -1;
const LIGA: u32 = ot_tag!('l', 'i', 'g', 'a');
pub struct ShapedGlyphData {
count: usize,
glyph_infos: *mut hb_glyph_info_t,
pos_infos: *mut hb_glyph_position_t,
}
pub struct ShapedGlyphEntry {
codepoint: GlyphId,
advance: Au,
offset: Option<Point2D<Au>>,
}
impl ShapedGlyphData {
pub fn new(buffer: *mut hb_buffer_t) -> ShapedGlyphData {
unsafe {
let mut glyph_count = 0;
let glyph_infos = hb_buffer_get_glyph_infos(buffer, &mut glyph_count);
assert!(!glyph_infos.is_null());
let mut pos_count = 0;
let pos_infos = hb_buffer_get_glyph_positions(buffer, &mut pos_count);
assert!(!pos_infos.is_null());
assert_eq!(glyph_count, pos_count);
ShapedGlyphData {
count: glyph_count as usize,
glyph_infos: glyph_infos,
pos_infos: pos_infos,
}
}
}
#[inline(always)]
fn byte_offset_of_glyph(&self, i: usize) -> u32 {
assert!(i < self.count);
unsafe {
let glyph_info_i = self.glyph_infos.offset(i as isize);
(*glyph_info_i).cluster
}
}
pub fn len(&self) -> usize {
self.count
}
/// Returns shaped glyph data for one glyph, and updates the y-position of the pen.
pub fn entry_for_glyph(&self, i: usize, y_pos: &mut Au) -> ShapedGlyphEntry {
assert!(i < self.count);
unsafe {
let glyph_info_i = self.glyph_infos.offset(i as isize);
let pos_info_i = self.pos_infos.offset(i as isize);
let x_offset = Shaper::fixed_to_float((*pos_info_i).x_offset);
let y_offset = Shaper::fixed_to_float((*pos_info_i).y_offset);
let x_advance = Shaper::fixed_to_float((*pos_info_i).x_advance);
let y_advance = Shaper::fixed_to_float((*pos_info_i).y_advance);
let x_offset = Au::from_f64_px(x_offset);
let y_offset = Au::from_f64_px(y_offset);
let x_advance = Au::from_f64_px(x_advance);
let y_advance = Au::from_f64_px(y_advance);
let offset = if x_offset == Au(0) && y_offset == Au(0) && y_advance == Au(0) {
None
} else {
// adjust the pen..
if y_advance > Au(0) {
*y_pos = *y_pos - y_advance;
}
Some(Point2D::new(x_offset, *y_pos - y_offset))
};
ShapedGlyphEntry {
codepoint: (*glyph_info_i).codepoint as GlyphId,
advance: x_advance,
offset: offset,
}
}
}
}
#[derive(Debug)]
pub struct Shaper {
hb_face: *mut hb_face_t,
hb_font: *mut hb_font_t,
font: *const Font,
}
impl Drop for Shaper {
fn drop(&mut self) {
unsafe {
assert!(!self.hb_face.is_null());
hb_face_destroy(self.hb_face);
assert!(!self.hb_font.is_null());
hb_font_destroy(self.hb_font);
}
}
}
impl Shaper {
pub fn new(font: *const Font) -> Shaper {
unsafe {<|fim▁hole|> );
let hb_font: *mut hb_font_t = hb_font_create(hb_face);
// Set points-per-em. if zero, performs no hinting in that direction.
let pt_size = (*font).actual_pt_size.to_f64_px();
hb_font_set_ppem(hb_font, pt_size as c_uint, pt_size as c_uint);
// Set scaling. Note that this takes 16.16 fixed point.
hb_font_set_scale(
hb_font,
Shaper::float_to_fixed(pt_size) as c_int,
Shaper::float_to_fixed(pt_size) as c_int,
);
// configure static function callbacks.
hb_font_set_funcs(
hb_font,
HB_FONT_FUNCS.0,
font as *mut Font as *mut c_void,
None,
);
Shaper {
hb_face: hb_face,
hb_font: hb_font,
font: font,
}
}
}
fn float_to_fixed(f: f64) -> i32 {
float_to_fixed(16, f)
}
fn fixed_to_float(i: hb_position_t) -> f64 {
fixed_to_float(16, i)
}
}
impl ShaperMethods for Shaper {
/// Calculate the layout metrics associated with the given text when painted in a specific
/// font.
fn shape_text(&self, text: &str, options: &ShapingOptions, glyphs: &mut GlyphStore) {
unsafe {
let hb_buffer: *mut hb_buffer_t = hb_buffer_create();
hb_buffer_set_direction(
hb_buffer,
if options.flags.contains(ShapingFlags::RTL_FLAG) {
HB_DIRECTION_RTL
} else {
HB_DIRECTION_LTR
},
);
hb_buffer_set_script(hb_buffer, options.script.to_hb_script());
hb_buffer_add_utf8(
hb_buffer,
text.as_ptr() as *const c_char,
text.len() as c_int,
0,
text.len() as c_int,
);
let mut features = Vec::new();
if options
.flags
.contains(ShapingFlags::IGNORE_LIGATURES_SHAPING_FLAG)
{
features.push(hb_feature_t {
tag: LIGA,
value: 0,
start: 0,
end: hb_buffer_get_length(hb_buffer),
})
}
if options
.flags
.contains(ShapingFlags::DISABLE_KERNING_SHAPING_FLAG)
{
features.push(hb_feature_t {
tag: KERN,
value: 0,
start: 0,
end: hb_buffer_get_length(hb_buffer),
})
}
hb_shape(
self.hb_font,
hb_buffer,
features.as_mut_ptr(),
features.len() as u32,
);
self.save_glyph_results(text, options, glyphs, hb_buffer);
hb_buffer_destroy(hb_buffer);
}
}
}
impl Shaper {
fn save_glyph_results(
&self,
text: &str,
options: &ShapingOptions,
glyphs: &mut GlyphStore,
buffer: *mut hb_buffer_t,
) {
let glyph_data = ShapedGlyphData::new(buffer);
let glyph_count = glyph_data.len();
let byte_max = text.len();
debug!(
"Shaped text[byte count={}], got back {} glyph info records.",
byte_max, glyph_count
);
// make map of what chars have glyphs
let mut byte_to_glyph = vec![NO_GLYPH; byte_max];
debug!("(glyph idx) -> (text byte offset)");
for i in 0..glyph_data.len() {
let loc = glyph_data.byte_offset_of_glyph(i) as usize;
if loc < byte_max {
byte_to_glyph[loc] = i as i32;
} else {
debug!(
"ERROR: tried to set out of range byte_to_glyph: idx={}, glyph idx={}",
loc, i
);
}
debug!("{} -> {}", i, loc);
}
debug!("text: {:?}", text);
debug!("(char idx): char->(glyph index):");
for (i, ch) in text.char_indices() {
debug!("{}: {:?} --> {}", i, ch, byte_to_glyph[i]);
}
let mut glyph_span = 0..0;
let mut byte_range = 0..0;
let mut y_pos = Au(0);
// main loop over each glyph. each iteration usually processes 1 glyph and 1+ chars.
// in cases with complex glyph-character associations, 2+ glyphs and 1+ chars can be
// processed.
while glyph_span.start < glyph_count {
debug!("Processing glyph at idx={}", glyph_span.start);
glyph_span.end = glyph_span.start;
byte_range.end = glyph_data.byte_offset_of_glyph(glyph_span.start) as usize;
while byte_range.end < byte_max {
byte_range.end += 1;
// Extend the byte range to include any following byte without its own glyph.
while byte_range.end < byte_max && byte_to_glyph[byte_range.end] == NO_GLYPH {
byte_range.end += 1;
}
// Extend the glyph range to include all glyphs covered by bytes processed so far.
let mut max_glyph_idx = glyph_span.end;
for glyph_idx in &byte_to_glyph[byte_range.clone()] {
if *glyph_idx != NO_GLYPH {
max_glyph_idx = cmp::max(*glyph_idx as usize + 1, max_glyph_idx);
}
}
if max_glyph_idx > glyph_span.end {
glyph_span.end = max_glyph_idx;
debug!("Extended glyph span to {:?}", glyph_span);
}
// if there's just one glyph, then we don't need further checks.
if glyph_span.len() == 1 {
break;
}
// if no glyphs were found yet, extend the char byte range more.
if glyph_span.len() == 0 {
continue;
}
// If byte_range now includes all the byte offsets found in glyph_span, then we
// have found a contiguous "cluster" and can stop extending it.
let mut all_glyphs_are_within_cluster: bool = true;
for j in glyph_span.clone() {
let loc = glyph_data.byte_offset_of_glyph(j) as usize;
if !(byte_range.start <= loc && loc < byte_range.end) {
all_glyphs_are_within_cluster = false;
break;
}
}
if all_glyphs_are_within_cluster {
break;
}
// Otherwise, the bytes we have seen so far correspond to a non-contiguous set of
// glyphs. Keep extending byte_range until we fill in all the holes in the glyph
// span or reach the end of the text.
}
assert!(byte_range.len() > 0);
assert!(glyph_span.len() > 0);
// Now byte_range is the ligature clump formed by the glyphs in glyph_span.
// We will save these glyphs to the glyph store at the index of the first byte.
let byte_idx = ByteIndex(byte_range.start as isize);
if glyph_span.len() == 1 {
// Fast path: 1-to-1 mapping of byte offset to single glyph.
//
// TODO(Issue #214): cluster ranges need to be computed before
// shaping, and then consulted here.
// for now, just pretend that every character is a cluster start.
// (i.e., pretend there are no combining character sequences).
// 1-to-1 mapping of character to glyph also treated as ligature start.
//
// NB: When we acquire the ability to handle ligatures that cross word boundaries,
// we'll need to do something special to handle `word-spacing` properly.
let character = text[byte_range.clone()].chars().next().unwrap();
if is_bidi_control(character) {
// Don't add any glyphs for bidi control chars
} else if character == '\t' {
// Treat tabs in pre-formatted text as a fixed number of spaces.
//
// TODO: Proper tab stops.
const TAB_COLS: i32 = 8;
let (space_glyph_id, space_advance) = glyph_space_advance(self.font);
let advance = Au::from_f64_px(space_advance) * TAB_COLS;
let data =
GlyphData::new(space_glyph_id, advance, Default::default(), true, true);
glyphs.add_glyph_for_byte_index(byte_idx, character, &data);
} else {
let shape = glyph_data.entry_for_glyph(glyph_span.start, &mut y_pos);
let advance = self.advance_for_shaped_glyph(shape.advance, character, options);
let data = GlyphData::new(shape.codepoint, advance, shape.offset, true, true);
glyphs.add_glyph_for_byte_index(byte_idx, character, &data);
}
} else {
// collect all glyphs to be assigned to the first character.
let mut datas = vec![];
for glyph_i in glyph_span.clone() {
let shape = glyph_data.entry_for_glyph(glyph_i, &mut y_pos);
datas.push(GlyphData::new(
shape.codepoint,
shape.advance,
shape.offset,
true, // treat as cluster start
glyph_i > glyph_span.start,
));
// all but first are ligature continuations
}
// now add the detailed glyph entry.
glyphs.add_glyphs_for_byte_index(byte_idx, &datas);
}
glyph_span.start = glyph_span.end;
byte_range.start = byte_range.end;
}
// this must be called after adding all glyph data; it sorts the
// lookup table for finding detailed glyphs by associated char index.
glyphs.finalize_changes();
}
fn advance_for_shaped_glyph(
&self,
mut advance: Au,
character: char,
options: &ShapingOptions,
) -> Au {
if let Some(letter_spacing) = options.letter_spacing {
advance = advance + letter_spacing;
};
// CSS 2.1 § 16.4 states that "word spacing affects each space (U+0020) and non-breaking
// space (U+00A0) left in the text after the white space processing rules have been
// applied. The effect of the property on other word-separator characters is undefined."
// We elect to only space the two required code points.
if character == ' ' || character == '\u{a0}' {
// https://drafts.csswg.org/css-text-3/#word-spacing-property
let (length, percent) = options.word_spacing;
advance =
(advance + length) + Au::new((advance.0 as f32 * percent.into_inner()) as i32);
}
advance
}
}
/// Callbacks from Harfbuzz when font map and glyph advance lookup needed.
struct FontFuncs(*mut hb_font_funcs_t);
unsafe impl Sync for FontFuncs {}
lazy_static! {
static ref HB_FONT_FUNCS: FontFuncs = unsafe {
let hb_funcs = hb_font_funcs_create();
hb_font_funcs_set_nominal_glyph_func(hb_funcs, Some(glyph_func), ptr::null_mut(), None);
hb_font_funcs_set_glyph_h_advance_func(
hb_funcs,
Some(glyph_h_advance_func),
ptr::null_mut(),
None,
);
hb_font_funcs_set_glyph_h_kerning_func(
hb_funcs,
Some(glyph_h_kerning_func),
ptr::null_mut(),
None,
);
FontFuncs(hb_funcs)
};
}
extern "C" fn glyph_func(
_: *mut hb_font_t,
font_data: *mut c_void,
unicode: hb_codepoint_t,
glyph: *mut hb_codepoint_t,
_: *mut c_void,
) -> hb_bool_t {
let font: *const Font = font_data as *const Font;
assert!(!font.is_null());
unsafe {
match (*font).glyph_index(char::from_u32(unicode).unwrap()) {
Some(g) => {
*glyph = g as hb_codepoint_t;
true as hb_bool_t
},
None => false as hb_bool_t,
}
}
}
extern "C" fn glyph_h_advance_func(
_: *mut hb_font_t,
font_data: *mut c_void,
glyph: hb_codepoint_t,
_: *mut c_void,
) -> hb_position_t {
let font: *mut Font = font_data as *mut Font;
assert!(!font.is_null());
unsafe {
let advance = (*font).glyph_h_advance(glyph as GlyphId);
Shaper::float_to_fixed(advance)
}
}
fn glyph_space_advance(font: *const Font) -> (hb_codepoint_t, f64) {
let space_unicode = ' ';
let space_glyph: hb_codepoint_t;
match unsafe { (*font).glyph_index(space_unicode) } {
Some(g) => {
space_glyph = g as hb_codepoint_t;
},
None => panic!("No space info"),
}
let space_advance = unsafe { (*font).glyph_h_advance(space_glyph as GlyphId) };
(space_glyph, space_advance)
}
extern "C" fn glyph_h_kerning_func(
_: *mut hb_font_t,
font_data: *mut c_void,
first_glyph: hb_codepoint_t,
second_glyph: hb_codepoint_t,
_: *mut c_void,
) -> hb_position_t {
let font: *mut Font = font_data as *mut Font;
assert!(!font.is_null());
unsafe {
let advance = (*font).glyph_h_kerning(first_glyph as GlyphId, second_glyph as GlyphId);
Shaper::float_to_fixed(advance)
}
}
// Callback to get a font table out of a font.
extern "C" fn font_table_func(
_: *mut hb_face_t,
tag: hb_tag_t,
user_data: *mut c_void,
) -> *mut hb_blob_t {
unsafe {
// NB: These asserts have security implications.
let font = user_data as *const Font;
assert!(!font.is_null());
// TODO(Issue #197): reuse font table data, which will change the unsound trickery here.
match (*font).table_for_tag(tag as FontTableTag) {
None => ptr::null_mut(),
Some(font_table) => {
// `Box::into_raw` intentionally leaks the FontTable so we don't destroy the buffer
// while HarfBuzz is using it. When HarfBuzz is done with the buffer, it will pass
// this raw pointer back to `destroy_blob_func` which will deallocate the Box.
let font_table_ptr = Box::into_raw(Box::new(font_table));
let buf = (*font_table_ptr).buffer();
// HarfBuzz calls `destroy_blob_func` when the buffer is no longer needed.
let blob = hb_blob_create(
buf.as_ptr() as *const c_char,
buf.len() as c_uint,
HB_MEMORY_MODE_READONLY,
font_table_ptr as *mut c_void,
Some(destroy_blob_func),
);
assert!(!blob.is_null());
blob
},
}
}
}
extern "C" fn destroy_blob_func(font_table_ptr: *mut c_void) {
unsafe {
drop(Box::from_raw(font_table_ptr as *mut FontTable));
}
}<|fim▁end|> | let hb_face: *mut hb_face_t = hb_face_create_for_tables(
Some(font_table_func),
font as *const c_void as *mut c_void,
None, |
<|file_name|>nfs4.rs<|end_file_name|><|fim▁begin|>/* Copyright (C) 2018-2020 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
// written by Victor Julien
use nom;
use nom::number::streaming::be_u32;
use crate::core::*;
use crate::nfs::nfs::*;
use crate::nfs::types::*;
use crate::nfs::rpc_records::*;
use crate::nfs::nfs_records::*;
use crate::nfs::nfs4_records::*;
use crate::kerberos::{parse_kerberos5_request, Kerberos5Ticket, SecBlobError};
named!(parse_req_gssapi<&[u8], Kerberos5Ticket, SecBlobError>,
do_parse!(
len: be_u32
>> ap: flat_map!(take!(len), parse_kerberos5_request)
>> ( ap )
));
impl NFSState {
/* normal write: PUTFH (file handle), WRITE (write opts/data). File handle
* is not part of the write record itself so we pass it in here. */
fn write_v4<'b>(&mut self, r: &RpcPacket<'b>, w: &Nfs4RequestWrite<'b>, fh: &'b[u8])
{
// for now assume that stable FILE_SYNC flags means a single chunk
let is_last = if w.stable == 2 { true } else { false };
SCLogDebug!("is_last {}", is_last);
let mut fill_bytes = 0;
let pad = w.write_len % 4;
if pad != 0 {
fill_bytes = 4 - pad;
}
let file_handle = fh.to_vec();
let file_name = if let Some(name) = self.namemap.get(fh) {
SCLogDebug!("WRITE name {:?}", name);
name.to_vec()
} else {
SCLogDebug!("WRITE object {:?} not found", w.stateid.data);
Vec::new()
};
let found = match self.get_file_tx_by_handle(&file_handle, STREAM_TOSERVER) {
Some((tx, files, flags)) => {
if let Some(NFSTransactionTypeData::FILE(ref mut tdf)) = tx.type_data {
filetracker_newchunk(&mut tdf.file_tracker, files, flags,
&file_name, w.data, w.offset,
w.write_len, fill_bytes as u8, is_last, &r.hdr.xid);
tdf.chunk_count += 1;
if is_last {
tdf.file_last_xid = r.hdr.xid;
tx.is_last = true;
tx.response_done = true;
}
}
true
},
None => false,
};
if !found {
let (tx, files, flags) = self.new_file_tx(&file_handle, &file_name, STREAM_TOSERVER);
if let Some(NFSTransactionTypeData::FILE(ref mut tdf)) = tx.type_data {
filetracker_newchunk(&mut tdf.file_tracker, files, flags,
&file_name, w.data, w.offset,
w.write_len, fill_bytes as u8, is_last, &r.hdr.xid);
tx.procedure = NFSPROC4_WRITE;
tx.xid = r.hdr.xid;
tx.is_first = true;
tx.nfs_version = r.progver as u16;
if is_last {
tdf.file_last_xid = r.hdr.xid;
tx.is_last = true;
tx.request_done = true;
}
}
}
self.ts_chunk_xid = r.hdr.xid;
let file_data_len = w.data.len() as u32 - fill_bytes as u32;
self.ts_chunk_left = w.write_len as u32 - file_data_len as u32;
}
fn commit_v4<'b>(&mut self, r: &RpcPacket<'b>, fh: &'b[u8])
{
SCLogDebug!("COMMIT, closing shop");
let file_handle = fh.to_vec();
if let Some((tx, files, flags)) = self.get_file_tx_by_handle(&file_handle, STREAM_TOSERVER) {
if let Some(NFSTransactionTypeData::FILE(ref mut tdf)) = tx.type_data {
tdf.file_tracker.close(files, flags);
tdf.file_last_xid = r.hdr.xid;
tx.is_last = true;
tx.request_done = true;
}
}
}
fn new_tx_v4<'b>(&mut self, r: &RpcPacket<'b>,
xidmap: &NFSRequestXidMap, procedure: u32,
_aux_opcodes: &Vec<u32>)<|fim▁hole|> tx.procedure = procedure;
tx.request_done = true;
tx.file_name = xidmap.file_name.to_vec();
tx.nfs_version = r.progver as u16;
tx.file_handle = xidmap.file_handle.to_vec();
tx.auth_type = r.creds_flavor;
match r.creds {
RpcRequestCreds::Unix(ref u) => {
tx.request_machine_name = u.machine_name_buf.to_vec();
tx.request_uid = u.uid;
tx.request_gid = u.gid;
},
_ => { },
}
SCLogDebug!("NFSv4: TX created: ID {} XID {} PROCEDURE {}",
tx.id, tx.xid, tx.procedure);
self.transactions.push(tx);
}
/* A normal READ request looks like: PUTFH (file handle) READ (read opts).
* We need the file handle for the READ.
*/
fn compound_request<'b>(&mut self, r: &RpcPacket<'b>,
cr: &Nfs4RequestCompoundRecord<'b>,
xidmap: &mut NFSRequestXidMap)
{
let mut last_putfh : Option<&'b[u8]> = None;
let mut main_opcode : u32 = 0;
let mut aux_opcodes : Vec<u32> = Vec::new();
for c in &cr.commands {
SCLogDebug!("c {:?}", c);
match c {
&Nfs4RequestContent::PutFH(ref rd) => {
last_putfh = Some(rd.value);
aux_opcodes.push(NFSPROC4_PUTFH);
}
&Nfs4RequestContent::Read(ref rd) => {
SCLogDebug!("READv4: {:?}", rd);
if let Some(fh) = last_putfh {
xidmap.chunk_offset = rd.offset;
xidmap.file_handle = fh.to_vec();
self.xidmap_handle2name(xidmap);
}
}
&Nfs4RequestContent::Open(ref rd) => {
SCLogDebug!("OPENv4: {}", String::from_utf8_lossy(&rd.filename));
xidmap.file_name = rd.filename.to_vec();
}
&Nfs4RequestContent::Lookup(ref rd) => {
SCLogDebug!("LOOKUPv4: {}", String::from_utf8_lossy(&rd.filename));
xidmap.file_name = rd.filename.to_vec();
}
&Nfs4RequestContent::Write(ref rd) => {
SCLogDebug!("WRITEv4: {:?}", rd);
if let Some(fh) = last_putfh {
self.write_v4(r, rd, fh);
}
}
&Nfs4RequestContent::Commit => {
SCLogDebug!("COMMITv4");
if let Some(fh) = last_putfh {
self.commit_v4(r, fh);
}
}
&Nfs4RequestContent::Close(ref rd) => {
SCLogDebug!("CLOSEv4: {:?}", rd);
}
&Nfs4RequestContent::Create(ref rd) => {
SCLogDebug!("CREATEv4: {:?}", rd);
if let Some(fh) = last_putfh {
xidmap.file_handle = fh.to_vec();
}
xidmap.file_name = rd.filename.to_vec();
main_opcode = NFSPROC4_CREATE;
}
&Nfs4RequestContent::Remove(ref rd) => {
SCLogDebug!("REMOVEv4: {:?}", rd);
xidmap.file_name = rd.to_vec();
main_opcode = NFSPROC4_REMOVE;
}
&Nfs4RequestContent::SetClientId(ref rd) => {
SCLogDebug!("SETCLIENTIDv4: client id {} r_netid {} r_addr {}",
String::from_utf8_lossy(&rd.client_id),
String::from_utf8_lossy(&rd.r_netid),
String::from_utf8_lossy(&rd.r_addr));
}
&_ => { },
}
}
if main_opcode != 0 {
self.new_tx_v4(r, &xidmap, main_opcode, &aux_opcodes);
}
}
/// complete request record
pub fn process_request_record_v4<'b>(&mut self, r: &RpcPacket<'b>) {
SCLogDebug!("NFSv4 REQUEST {} procedure {} ({}) blob size {}",
r.hdr.xid, r.procedure, self.requestmap.len(), r.prog_data.len());
let mut xidmap = NFSRequestXidMap::new(r.progver, r.procedure, 0);
if r.procedure == NFSPROC4_NULL {
if let RpcRequestCreds::GssApi(ref creds) = r.creds {
if creds.procedure == 1 {
let _x = parse_req_gssapi(r.prog_data);
SCLogDebug!("RPCSEC_GSS_INIT {:?}", _x);
}
}
} else if r.procedure == NFSPROC4_COMPOUND {
let mut data = r.prog_data;
if let RpcRequestCreds::GssApi(ref creds) = r.creds {
if creds.procedure == 0 && creds.service == 2 {
SCLogDebug!("GSS INTEGRITIY: {:?}", creds);
match parse_rpc_gssapi_integrity(r.prog_data) {
Ok((_rem, rec)) => {
SCLogDebug!("GSS INTEGRITIY wrapper: {:?}", rec);
data = rec.data;
// store proc and serv for the reply
xidmap.gssapi_proc = creds.procedure;
xidmap.gssapi_service = creds.service;
},
Err(nom::Err::Incomplete(_n)) => {
SCLogDebug!("NFSPROC4_COMPOUND/GSS INTEGRITIY: INCOMPLETE {:?}", _n);
self.set_event(NFSEvent::MalformedData);
return;
},
Err(nom::Err::Error(_e)) |
Err(nom::Err::Failure(_e)) => {
SCLogDebug!("NFSPROC4_COMPOUND/GSS INTEGRITIY: Parsing failed: {:?}", _e);
self.set_event(NFSEvent::MalformedData);
return;
},
}
}
}
match parse_nfs4_request_compound(data) {
Ok((_, rd)) => {
SCLogDebug!("NFSPROC4_COMPOUND: {:?}", rd);
self.compound_request(&r, &rd, &mut xidmap);
},
Err(nom::Err::Incomplete(_n)) => {
SCLogDebug!("NFSPROC4_COMPOUND: INCOMPLETE {:?}", _n);
self.set_event(NFSEvent::MalformedData);
},
Err(nom::Err::Error(_e)) |
Err(nom::Err::Failure(_e)) => {
SCLogDebug!("NFSPROC4_COMPOUND: Parsing failed: {:?}", _e);
self.set_event(NFSEvent::MalformedData);
},
};
}
self.requestmap.insert(r.hdr.xid, xidmap);
}
fn compound_response<'b>(&mut self, r: &RpcReplyPacket<'b>,
cr: &Nfs4ResponseCompoundRecord<'b>,
xidmap: &mut NFSRequestXidMap)
{
let mut insert_filename_with_getfh = false;
let mut main_opcode_status : u32 = 0;
let mut main_opcode_status_set : bool = false;
for c in &cr.commands {
SCLogDebug!("c {:?}", c);
match c {
&Nfs4ResponseContent::ReadDir(s, ref rd) => {
if let &Some(ref rd) = rd {
SCLogDebug!("READDIRv4: status {} eof {}", s, rd.eof);
for d in &rd.listing {
if let &Some(ref d) = d {
SCLogDebug!("READDIRv4: dir {}", String::from_utf8_lossy(&d.name));
}
}
}
}
&Nfs4ResponseContent::Remove(s) => {
SCLogDebug!("REMOVE4: status {}", s);
main_opcode_status = s;
main_opcode_status_set = true;
},
&Nfs4ResponseContent::Create(s) => {
SCLogDebug!("CREATE4: status {}", s);
main_opcode_status = s;
main_opcode_status_set = true;
},
&Nfs4ResponseContent::Read(s, ref rd) => {
if let &Some(ref rd) = rd {
SCLogDebug!("READ4: xidmap {:?} status {} data {}", xidmap, s, rd.data.len());
// convert record to generic read reply
let reply = NfsReplyRead {
status: s,
attr_follows: 0,
attr_blob: &[],
count: rd.count,
eof: rd.eof,
data_len: rd.data.len() as u32,
data: rd.data,
};
self.process_read_record(r, &reply, Some(&xidmap));
}
},
&Nfs4ResponseContent::Open(s, ref rd) => {
if let &Some(ref rd) = rd {
SCLogDebug!("OPENv4: status {} opendata {:?}", s, rd);
insert_filename_with_getfh = true;
}
},
&Nfs4ResponseContent::GetFH(_s, ref rd) => {
if let &Some(ref rd) = rd {
if insert_filename_with_getfh {
self.namemap.insert(rd.value.to_vec(),
xidmap.file_name.to_vec());
}
}
},
&Nfs4ResponseContent::PutRootFH(s) => {
if s == NFS4_OK && xidmap.file_name.len() == 0 {
xidmap.file_name = b"<mount_root>".to_vec();
SCLogDebug!("filename {:?}", xidmap.file_name);
}
},
&_ => { },
}
}
if main_opcode_status_set {
let resp_handle = Vec::new();
self.mark_response_tx_done(r.hdr.xid, r.reply_state, main_opcode_status, &resp_handle);
}
}
pub fn process_reply_record_v4<'b>(&mut self, r: &RpcReplyPacket<'b>,
xidmap: &mut NFSRequestXidMap) {
if xidmap.procedure == NFSPROC4_COMPOUND {
let mut data = r.prog_data;
if xidmap.gssapi_proc == 0 && xidmap.gssapi_service == 2 {
SCLogDebug!("GSS INTEGRITIY as set by call: {:?}", xidmap);
match parse_rpc_gssapi_integrity(r.prog_data) {
Ok((_rem, rec)) => {
SCLogDebug!("GSS INTEGRITIY wrapper: {:?}", rec);
data = rec.data;
},
Err(nom::Err::Incomplete(_n)) => {
SCLogDebug!("NFSPROC4_COMPOUND/GSS INTEGRITIY: INCOMPLETE {:?}", _n);
self.set_event(NFSEvent::MalformedData);
return;
},
Err(nom::Err::Error(_e)) |
Err(nom::Err::Failure(_e)) => {
SCLogDebug!("NFSPROC4_COMPOUND/GSS INTEGRITIY: Parsing failed: {:?}", _e);
self.set_event(NFSEvent::MalformedData);
return;
},
}
}
match parse_nfs4_response_compound(data) {
Ok((_, rd)) => {
SCLogDebug!("COMPOUNDv4: {:?}", rd);
self.compound_response(&r, &rd, xidmap);
},
Err(nom::Err::Incomplete(_)) => {
self.set_event(NFSEvent::MalformedData);
},
Err(nom::Err::Error(_e)) |
Err(nom::Err::Failure(_e)) => {
SCLogDebug!("Parsing failed: {:?}", _e);
self.set_event(NFSEvent::MalformedData);
},
};
}
}
}<|fim▁end|> | {
let mut tx = self.new_tx();
tx.xid = r.hdr.xid; |
<|file_name|>deque.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A (mostly) lock-free concurrent work-stealing deque
//!
//! This module contains an implementation of the Chase-Lev work stealing deque
//! described in "Dynamic Circular Work-Stealing Deque". The implementation is
//! heavily based on the pseudocode found in the paper.
//!
//! This implementation does not want to have the restriction of a garbage
//! collector for reclamation of buffers, and instead it uses a shared pool of
//! buffers. This shared pool is required for correctness in this
//! implementation.
//!
//! The only lock-synchronized portions of this deque are the buffer allocation
//! and deallocation portions. Otherwise all operations are lock-free.
//!
//! # Example
//!
//! use std::rt::deque::BufferPool;
//!
//! let mut pool = BufferPool::new();
//! let (mut worker, mut stealer) = pool.deque();
//!
//! // Only the worker may push/pop
//! worker.push(1);
//! worker.pop();
//!
//! // Stealers take data from the other end of the deque
//! worker.push(1);
//! stealer.steal();
//!
//! // Stealers can be cloned to have many stealers stealing in parallel
//! worker.push(1);
//! let mut stealer2 = stealer.clone();
//! stealer2.steal();
// NB: the "buffer pool" strategy is not done for speed, but rather for
// correctness. For more info, see the comment on `swap_buffer`
// XXX: all atomic operations in this module use a SeqCst ordering. That is
// probably overkill
use cast;
use clone::Clone;
use iter::range;
use kinds::Send;
use libc;
use mem;
use ops::Drop;
use option::{Option, Some, None};
use ptr;
use unstable::atomics::{AtomicInt, AtomicPtr, SeqCst};
use unstable::sync::{UnsafeArc, Exclusive};
// Once the queue is less than 1/K full, then it will be downsized. Note that
// the deque requires that this number be less than 2.
static K: int = 4;
// Minimum number of bits that a buffer size should be. No buffer will resize to
// under this value, and all deques will initially contain a buffer of this
// size.
//
// The size in question is 1 << MIN_BITS
static MIN_BITS: int = 7;
struct Deque<T> {
bottom: AtomicInt,
top: AtomicInt,
array: AtomicPtr<Buffer<T>>,
pool: BufferPool<T>,
}
/// Worker half of the work-stealing deque. This worker has exclusive access to
/// one side of the deque, and uses `push` and `pop` method to manipulate it.
///
/// There may only be one worker per deque.
pub struct Worker<T> {
priv deque: UnsafeArc<Deque<T>>,
}
/// The stealing half of the work-stealing deque. Stealers have access to the
/// opposite end of the deque from the worker, and they only have access to the
/// `steal` method.
pub struct Stealer<T> {
priv deque: UnsafeArc<Deque<T>>,
}
/// When stealing some data, this is an enumeration of the possible outcomes.
#[deriving(Eq)]
pub enum Stolen<T> {
/// The deque was empty at the time of stealing
Empty,
/// The stealer lost the race for stealing data, and a retry may return more
/// data.
Abort,
/// The stealer has successfully stolen some data.
Data(T),
}
/// The allocation pool for buffers used by work-stealing deques. Right now this
/// structure is used for reclamation of memory after it is no longer in use by
/// deques.
///
/// This data structure is protected by a mutex, but it is rarely used. Deques
/// will only use this structure when allocating a new buffer or deallocating a
/// previous one.
pub struct BufferPool<T> {
priv pool: Exclusive<~[~Buffer<T>]>,
}
/// An internal buffer used by the chase-lev deque. This structure is actually
/// implemented as a circular buffer, and is used as the intermediate storage of
/// the data in the deque.
///
/// This type is implemented with *T instead of ~[T] for two reasons:
///
/// 1. There is nothing safe about using this buffer. This easily allows the
/// same value to be read twice in to rust, and there is nothing to
/// prevent this. The usage by the deque must ensure that one of the
/// values is forgotten. Furthermore, we only ever want to manually run
/// destructors for values in this buffer (on drop) because the bounds
/// are defined by the deque it's owned by.
///
/// 2. We can certainly avoid bounds checks using *T instead of ~[T], although
/// LLVM is probably pretty good at doing this already.
struct Buffer<T> {
storage: *T,
log_size: int,
}
impl<T: Send> BufferPool<T> {
/// Allocates a new buffer pool which in turn can be used to allocate new
/// deques.
pub fn new() -> BufferPool<T> {
BufferPool { pool: Exclusive::new(~[]) }
}
/// Allocates a new work-stealing deque which will send/receiving memory to
/// and from this buffer pool.
pub fn deque(&mut self) -> (Worker<T>, Stealer<T>) {
let (a, b) = UnsafeArc::new2(Deque::new(self.clone()));
(Worker { deque: a }, Stealer { deque: b })
}
fn alloc(&mut self, bits: int) -> ~Buffer<T> {
unsafe {
self.pool.with(|pool| {
match pool.iter().position(|x| x.size() >= (1 << bits)) {
Some(i) => pool.remove(i),
None => ~Buffer::new(bits)
}
})
}
}
fn free(&mut self, buf: ~Buffer<T>) {
unsafe {
let mut buf = Some(buf);
self.pool.with(|pool| {
let buf = buf.take_unwrap();
match pool.iter().position(|v| v.size() > buf.size()) {
Some(i) => pool.insert(i, buf),
None => pool.push(buf),
}
})
}
}
}
impl<T: Send> Clone for BufferPool<T> {
fn clone(&self) -> BufferPool<T> { BufferPool { pool: self.pool.clone() } }
}
impl<T: Send> Worker<T> {
/// Pushes data onto the front of this work queue.
pub fn push(&mut self, t: T) {
unsafe { (*self.deque.get()).push(t) }
}
/// Pops data off the front of the work queue, returning `None` on an empty
/// queue.
pub fn pop(&mut self) -> Option<T> {
unsafe { (*self.deque.get()).pop() }
}
/// Gets access to the buffer pool that this worker is attached to. This can
/// be used to create more deques which share the same buffer pool as this
/// deque.
pub fn pool<'a>(&'a mut self) -> &'a mut BufferPool<T> {
unsafe { &mut (*self.deque.get()).pool }
}
}
impl<T: Send> Stealer<T> {
/// Steals work off the end of the queue (opposite of the worker's end)
pub fn steal(&mut self) -> Stolen<T> {
unsafe { (*self.deque.get()).steal() }
}
/// Gets access to the buffer pool that this stealer is attached to. This
/// can be used to create more deques which share the same buffer pool as
/// this deque.
pub fn pool<'a>(&'a mut self) -> &'a mut BufferPool<T> {
unsafe { &mut (*self.deque.get()).pool }
}
}
impl<T: Send> Clone for Stealer<T> {
fn clone(&self) -> Stealer<T> { Stealer { deque: self.deque.clone() } }
}
// Almost all of this code can be found directly in the paper so I'm not
// personally going to heavily comment what's going on here.
impl<T: Send> Deque<T> {
fn new(mut pool: BufferPool<T>) -> Deque<T> {
let buf = pool.alloc(MIN_BITS);
Deque {
bottom: AtomicInt::new(0),
top: AtomicInt::new(0),
array: AtomicPtr::new(unsafe { cast::transmute(buf) }),
pool: pool,
}
}
unsafe fn push(&mut self, data: T) {
let mut b = self.bottom.load(SeqCst);
let t = self.top.load(SeqCst);
let mut a = self.array.load(SeqCst);
let size = b - t;
if size >= (*a).size() - 1 {
// You won't find this code in the chase-lev deque paper. This is
// alluded to in a small footnote, however. We always free a buffer
// when growing in order to prevent leaks.
a = self.swap_buffer(b, a, (*a).resize(b, t, 1));
b = self.bottom.load(SeqCst);
}
(*a).put(b, data);
self.bottom.store(b + 1, SeqCst);
}
unsafe fn pop(&mut self) -> Option<T> {
let b = self.bottom.load(SeqCst);
let a = self.array.load(SeqCst);
let b = b - 1;
self.bottom.store(b, SeqCst);
let t = self.top.load(SeqCst);
let size = b - t;
if size < 0 {
self.bottom.store(t, SeqCst);
return None;
}
let data = (*a).get(b);
if size > 0 {
self.maybe_shrink(b, t);
return Some(data);
}
if self.top.compare_and_swap(t, t + 1, SeqCst) == t {
self.bottom.store(t + 1, SeqCst);
return Some(data);
} else {
self.bottom.store(t + 1, SeqCst);
cast::forget(data); // someone else stole this value
return None;
}
}
unsafe fn steal(&mut self) -> Stolen<T> {
let t = self.top.load(SeqCst);
let old = self.array.load(SeqCst);
let b = self.bottom.load(SeqCst);
let a = self.array.load(SeqCst);
let size = b - t;
if size <= 0 { return Empty }
if size % (*a).size() == 0 {
if a == old && t == self.top.load(SeqCst) {
return Empty
}
return Abort
}
let data = (*a).get(t);
if self.top.compare_and_swap(t, t + 1, SeqCst) == t {
Data(data)
} else {
cast::forget(data); // someone else stole this value
Abort
}
}
unsafe fn maybe_shrink(&mut self, b: int, t: int) {
let a = self.array.load(SeqCst);
if b - t < (*a).size() / K && b - t > (1 << MIN_BITS) {
self.swap_buffer(b, a, (*a).resize(b, t, -1));
}
}
// Helper routine not mentioned in the paper which is used in growing and
// shrinking buffers to swap in a new buffer into place. As a bit of a
// recap, the whole point that we need a buffer pool rather than just
// calling malloc/free directly is that stealers can continue using buffers
// after this method has called 'free' on it. The continued usage is simply
// a read followed by a forget, but we must make sure that the memory can
// continue to be read after we flag this buffer for reclamation.
unsafe fn swap_buffer(&mut self, b: int, old: *mut Buffer<T>,
buf: Buffer<T>) -> *mut Buffer<T> {
let newbuf: *mut Buffer<T> = cast::transmute(~buf);
self.array.store(newbuf, SeqCst);
let ss = (*newbuf).size();
self.bottom.store(b + ss, SeqCst);
let t = self.top.load(SeqCst);
if self.top.compare_and_swap(t, t + ss, SeqCst) != t {
self.bottom.store(b, SeqCst);
}
self.pool.free(cast::transmute(old));
return newbuf;
}
}
#[unsafe_destructor]
impl<T: Send> Drop for Deque<T> {
fn drop(&mut self) {
let t = self.top.load(SeqCst);
let b = self.bottom.load(SeqCst);
let a = self.array.load(SeqCst);
// Free whatever is leftover in the dequeue, and then move the buffer
// back into the pool.
for i in range(t, b) {
let _: T = unsafe { (*a).get(i) };
}
self.pool.free(unsafe { cast::transmute(a) });
}
}
impl<T: Send> Buffer<T> {
unsafe fn new(log_size: int) -> Buffer<T> {
let size = (1 << log_size) * mem::size_of::<T>();
let buffer = libc::malloc(size as libc::size_t);
assert!(!buffer.is_null());
Buffer {
storage: buffer as *T,
log_size: log_size,
}
}
fn size(&self) -> int { 1 << self.log_size }
// Apparently LLVM cannot optimize (foo % (1 << bar)) into this implicitly
fn mask(&self) -> int { (1 << self.log_size) - 1 }
// This does not protect against loading duplicate values of the same cell,
// nor does this clear out the contents contained within. Hence, this is a
// very unsafe method which the caller needs to treat specially in case a
// race is lost.
unsafe fn get(&self, i: int) -> T {
ptr::read_ptr(self.storage.offset(i & self.mask()))
}
// Unsafe because this unsafely overwrites possibly uninitialized or
// initialized data.
unsafe fn put(&mut self, i: int, t: T) {
let ptr = self.storage.offset(i & self.mask());
ptr::copy_nonoverlapping_memory(ptr as *mut T, &t as *T, 1);
cast::forget(t);
}
// Again, unsafe because this has incredibly dubious ownership violations.
// It is assumed that this buffer is immediately dropped.
unsafe fn resize(&self, b: int, t: int, delta: int) -> Buffer<T> {
let mut buf = Buffer::new(self.log_size + delta);
for i in range(t, b) {
buf.put(i, self.get(i));
}
return buf;
}
}
#[unsafe_destructor]
impl<T: Send> Drop for Buffer<T> {
fn drop(&mut self) {
// It is assumed that all buffers are empty on drop.
unsafe { libc::free(self.storage as *libc::c_void) }
}
}
#[cfg(test)]
mod tests {
use prelude::*;
use super::{Data, BufferPool, Abort, Empty, Worker, Stealer};
use cast;
use rt::thread::Thread;
use rand;
use rand::Rng;
use unstable::atomics::{AtomicBool, INIT_ATOMIC_BOOL, SeqCst,
AtomicUint, INIT_ATOMIC_UINT};
use vec;
#[test]
fn smoke() {
let mut pool = BufferPool::new();
let (mut w, mut s) = pool.deque();
assert_eq!(w.pop(), None);
assert_eq!(s.steal(), Empty);
w.push(1);
assert_eq!(w.pop(), Some(1));
w.push(1);
assert_eq!(s.steal(), Data(1));
w.push(1);
assert_eq!(s.clone().steal(), Data(1));
}
#[test]
fn stealpush() {
static AMT: int = 100000;
let mut pool = BufferPool::<int>::new();
let (mut w, s) = pool.deque();
let t = do Thread::start {
let mut s = s;
let mut left = AMT;
while left > 0 {
match s.steal() {
Data(i) => {
assert_eq!(i, 1);
left -= 1;
}
Abort | Empty => {}
}
}
};
for _ in range(0, AMT) {
w.push(1);
}
t.join();
}
#[test]
fn stealpush_large() {
static AMT: int = 100000;
let mut pool = BufferPool::<(int, int)>::new();
let (mut w, s) = pool.deque();
let t = do Thread::start {
let mut s = s;
let mut left = AMT;
while left > 0 {
match s.steal() {
Data((1, 10)) => { left -= 1; }
Data(..) => fail!(),
Abort | Empty => {}
}
}
};
for _ in range(0, AMT) {
w.push((1, 10));
}
t.join();
}
fn stampede(mut w: Worker<~int>, s: Stealer<~int>,
nthreads: int, amt: uint) {
for _ in range(0, amt) {
w.push(~20);
}
let mut remaining = AtomicUint::new(amt);
let unsafe_remaining: *mut AtomicUint = &mut remaining;
let threads = range(0, nthreads).map(|_| {
let s = s.clone();
do Thread::start {
unsafe {
let mut s = s;
while (*unsafe_remaining).load(SeqCst) > 0 {
match s.steal() {
Data(~20) => {
(*unsafe_remaining).fetch_sub(1, SeqCst);
}
Data(..) => fail!(),
Abort | Empty => {}
}
}
}
}
}).to_owned_vec();
while remaining.load(SeqCst) > 0 {
match w.pop() {
Some(~20) => { remaining.fetch_sub(1, SeqCst); }
Some(..) => fail!(),
None => {}
}
}
for thread in threads.move_iter() {
thread.join();
}
}
#[test]
fn run_stampede() {
let mut pool = BufferPool::<~int>::new();
let (w, s) = pool.deque();
stampede(w, s, 8, 10000);
}
#[test]
fn many_stampede() {
static AMT: uint = 4;
let mut pool = BufferPool::<~int>::new();
let threads = range(0, AMT).map(|_| {
let (w, s) = pool.deque();
do Thread::start {
stampede(w, s, 4, 10000);
}
}).to_owned_vec();
for thread in threads.move_iter() {
thread.join();
}
}
#[test]
fn stress() {
static AMT: int = 100000;
static NTHREADS: int = 8;
static mut DONE: AtomicBool = INIT_ATOMIC_BOOL;
static mut HITS: AtomicUint = INIT_ATOMIC_UINT;
let mut pool = BufferPool::<int>::new();
let (mut w, s) = pool.deque();
let threads = range(0, NTHREADS).map(|_| {
let s = s.clone();
do Thread::start {
unsafe {
let mut s = s;
loop {
match s.steal() {
Data(2) => { HITS.fetch_add(1, SeqCst); }
Data(..) => fail!(),
_ if DONE.load(SeqCst) => break,
_ => {}
}
}
}
}
}).to_owned_vec();
let mut rng = rand::task_rng();
let mut expected = 0;
while expected < AMT {
if rng.gen_range(0, 3) == 2 {
match w.pop() {
None => {}
Some(2) => unsafe { HITS.fetch_add(1, SeqCst); },
Some(_) => fail!(),
}
} else {
expected += 1;
w.push(2);
}
}
unsafe {
while HITS.load(SeqCst) < AMT as uint {
match w.pop() {
None => {}
Some(2) => { HITS.fetch_add(1, SeqCst); },
Some(_) => fail!(),
}
}
DONE.store(true, SeqCst);
}
for thread in threads.move_iter() {
thread.join();
}
assert_eq!(unsafe { HITS.load(SeqCst) }, expected as uint);
}
#[test]
#[ignore(cfg(windows))] // apparently windows scheduling is weird?
fn no_starvation() {<|fim▁hole|> let (mut w, s) = pool.deque();
let (threads, hits) = vec::unzip(range(0, NTHREADS).map(|_| {
let s = s.clone();
let box = ~AtomicUint::new(0);
let thread_box = unsafe {
*cast::transmute::<&~AtomicUint, **mut AtomicUint>(&box)
};
(do Thread::start {
unsafe {
let mut s = s;
loop {
match s.steal() {
Data((1, 2)) => {
(*thread_box).fetch_add(1, SeqCst);
}
Data(..) => fail!(),
_ if DONE.load(SeqCst) => break,
_ => {}
}
}
}
}, box)
}));
let mut rng = rand::task_rng();
let mut myhit = false;
let mut iter = 0;
'outer: loop {
for _ in range(0, rng.gen_range(0, AMT)) {
if !myhit && rng.gen_range(0, 3) == 2 {
match w.pop() {
None => {}
Some((1, 2)) => myhit = true,
Some(_) => fail!(),
}
} else {
w.push((1, 2));
}
}
iter += 1;
debug!("loop iteration {}", iter);
for (i, slot) in hits.iter().enumerate() {
let amt = slot.load(SeqCst);
debug!("thread {}: {}", i, amt);
if amt == 0 { continue 'outer; }
}
if myhit {
break
}
}
unsafe { DONE.store(true, SeqCst); }
for thread in threads.move_iter() {
thread.join();
}
}
}<|fim▁end|> | static AMT: int = 10000;
static NTHREADS: int = 4;
static mut DONE: AtomicBool = INIT_ATOMIC_BOOL;
let mut pool = BufferPool::<(int, uint)>::new(); |
<|file_name|>social_tags.py<|end_file_name|><|fim▁begin|>from django.template import Library
from django.conf import settings
if "django.contrib.sites" in settings.INSTALLED_APPS:
from django.contrib.sites.models import Site
current_domain = lambda: Site.objects.get_current().domain
elif getattr(settings, "SITE_DOMAIN", None):
current_domain = lambda: settings.SITE_DOMAIN
else:
current_domain = lambda: "example.com"
register = Library()
def fully_qualified(url):
# if it's not a string the rest of this fn will bomb
if not isinstance(url, basestring): return ""
if url.startswith('http'):
return url
elif url.startswith("/"):
return 'http://%s%s' % (current_domain(), url)
else:
return 'http://%s' % url
@register.inclusion_tag('social_tags/twitter.html')
def twitter_share(url=None):
url = fully_qualified(url)
return locals()
@register.inclusion_tag('social_tags/facebook.html')
def facebook_share(url=None):
url = fully_qualified(url)
return locals()
@register.inclusion_tag('social_tags/linkedin.html')
def linkedin_share(url=None):
url = fully_qualified(url)
return locals()
@register.inclusion_tag('social_tags/email.html')<|fim▁hole|> url = fully_qualified(url)
return locals()
@register.inclusion_tag('social_tags/google.html')
def google_plus(url=None):
url = fully_qualified(url)
return locals()<|fim▁end|> | def email_share(url=None): |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.