text
stringlengths 2
1.04M
| meta
dict |
---|---|
<!doctype html>
<html lang="en">
<head>
<title>Leaflet Provider - layer bounds check page</title>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<link rel="stylesheet" href="//cdnjs.cloudflare.com/ajax/libs/leaflet/1.0.0-rc.1/leaflet.css" />
<link rel="stylesheet" href="vendor/leaflet.draw.css" />
<style>
html {
height: 100%;
-moz-box-sizing: border-box;
-webkit-box-sizing: border-box;
box-sizing: border-box;
vertical-align: baseline;
font: 14px/16px Arial, Helvetica, sans-serif;
}
.sidebar {
width: 30%;
}
#table th {
text-align: left;
}
#table td {
cursor: pointer;
}
#table tr:hover {
background-color: #eee;
}
#table tr.active {
background-color: #ffa;
}
#maps {
position: fixed;
width: 70%;
top: 0;
right: 0;
}
.map {
width: 100%; height: 800px;
}
</style>
<!--Fork Me on Github ribbon, we're using the awsome version from simonwhitaker available at https://github.com/simonwhitaker/github-fork-ribbon-css -->
<link rel="stylesheet" href="../css/gh-fork-ribbon.css" />
<!--[if IE]>
<link rel="stylesheet" href="../css/gh-fork-ribbon.ie.css" />
<![endif]-->
</head>
<body>
<div class="github-fork-ribbon-wrapper right">
<div class="github-fork-ribbon">
<a href="https://github.com/leaflet-extras/leaflet-providers">Fork me on GitHub</a>
</div>
</div>
<div class="sidebar">
<h1>Testing provider bounds</h1>
<p>
This page lists the providers with bounds.
Click on a provider name to view a map with the layer and a rectangle
depicting the bounds for this layer.
</p>
<p>
Sometimes you need to zoom in because the layer doesn't provide lower zoom levels.
</p>
<table id="table">
<tr><th>Provider</th></tr>
</table>
<button id="dump-bounds">Update bounds</button>
<div id="result"></div>
</div>
<div id="maps"></div>
<script src="//cdnjs.cloudflare.com/ajax/libs/leaflet/1.0.0-rc.1/leaflet-src.js"></script>
<script src="../leaflet-providers.js"></script>
<script src="vendor/leaflet.draw-src.js"></script>
<script src="shared.js"></script>
<script src="layer-bounds.js"></script>
</body>
</html>
| {
"content_hash": "fa28c172b1679fcce247c71740f316d7",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 154,
"avg_line_length": 26.137931034482758,
"alnum_prop": 0.6512752858399297,
"repo_name": "tomhughes/leaflet-providers",
"id": "50aa246e4d756aec3466479b140f4dedd966f76c",
"size": "2274",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "preview/test-bounds.html",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "5897"
},
{
"name": "HTML",
"bytes": "12274"
},
{
"name": "JavaScript",
"bytes": "31726"
}
],
"symlink_target": ""
} |
namespace Lucene
{
const int32_t Token::MIN_BUFFER_SIZE = 10;
Token::Token()
{
ConstructToken(0, 0, DEFAULT_TYPE(), 0);
}
Token::Token(int32_t start, int32_t end)
{
ConstructToken(start, end, DEFAULT_TYPE(), 0);
}
Token::Token(int32_t start, int32_t end, const String& type)
{
ConstructToken(start, end, type, 0);
}
Token::Token(int32_t start, int32_t end, int32_t flags)
{
ConstructToken(start, end, DEFAULT_TYPE(), flags);
}
Token::Token(const String& text, int32_t start, int32_t end)
{
ConstructToken(start, end, DEFAULT_TYPE(), 0);
setTermBuffer(text);
}
Token::Token(const String& text, int32_t start, int32_t end, const String& type)
{
ConstructToken(start, end, type, 0);
setTermBuffer(text);
}
Token::Token(const String& text, int32_t start, int32_t end, int32_t flags)
{
ConstructToken(start, end, DEFAULT_TYPE(), flags);
setTermBuffer(text);
}
Token::Token(CharArray startTermBuffer, int32_t termBufferOffset, int32_t termBufferLength, int32_t start, int32_t end)
{
ConstructToken(start, end, DEFAULT_TYPE(), 0);
setTermBuffer(startTermBuffer.get(), termBufferOffset, termBufferLength);
}
Token::~Token()
{
}
void Token::ConstructToken(int32_t start, int32_t end, const String& type, int32_t flags)
{
this->_termLength = 0;
this->_startOffset = start;
this->_endOffset = end;
this->_type = type;
this->flags = flags;
this->positionIncrement = 1;
}
const String& Token::DEFAULT_TYPE()
{
static String _DEFAULT_TYPE(L"word");
return _DEFAULT_TYPE;
}
void Token::setPositionIncrement(int32_t positionIncrement)
{
if (positionIncrement < 0)
boost::throw_exception(IllegalArgumentException(L"Increment must be zero or greater: " + StringUtils::toString(positionIncrement)));
this->positionIncrement = positionIncrement;
}
int32_t Token::getPositionIncrement()
{
return positionIncrement;
}
String Token::term()
{
initTermBuffer();
return String(_termBuffer.get(), _termLength);
}
void Token::setTermBuffer(const wchar_t* buffer, int32_t offset, int32_t length)
{
growTermBuffer(length);
MiscUtils::arrayCopy(buffer, offset, _termBuffer.get(), 0, length);
_termLength = length;
}
void Token::setTermBuffer(const String& buffer)
{
int32_t length = (int32_t)buffer.size();
growTermBuffer(length);
MiscUtils::arrayCopy(buffer.begin(), 0, _termBuffer.get(), 0, length);
_termLength = length;
}
void Token::setTermBuffer(const String& buffer, int32_t offset, int32_t length)
{
BOOST_ASSERT(offset <= (int32_t)buffer.length());
BOOST_ASSERT(offset + length <= (int32_t)buffer.length());
growTermBuffer(length);
MiscUtils::arrayCopy(buffer.begin(), offset, _termBuffer.get(), 0, length);
_termLength = length;
}
CharArray Token::termBuffer()
{
if (!_termBuffer)
initTermBuffer();
return _termBuffer;
}
wchar_t* Token::termBufferArray()
{
if (!_termBuffer)
initTermBuffer();
return _termBuffer.get();
}
CharArray Token::resizeTermBuffer(int32_t newSize)
{
if (!_termBuffer)
{
// The buffer is always at least MIN_BUFFER_SIZE
_termBuffer = CharArray::newInstance(MiscUtils::getNextSize(std::max(newSize, MIN_BUFFER_SIZE)));
}
else
{
if (_termBuffer.size() < newSize)
{
// Not big enough; create a new array with slight over allocation and preserve content
_termBuffer.resize(MiscUtils::getNextSize(newSize));
}
}
return _termBuffer;
}
void Token::growTermBuffer(int32_t newSize)
{
_termBuffer = resizeTermBuffer(newSize);
}
void Token::initTermBuffer()
{
if (!_termBuffer)
{
_termBuffer = CharArray::newInstance(MiscUtils::getNextSize(MIN_BUFFER_SIZE));
_termLength = 0;
}
}
int32_t Token::termLength()
{
if (!_termBuffer)
initTermBuffer();
return _termLength;
}
void Token::setTermLength(int32_t length)
{
initTermBuffer();
if (length > _termBuffer.size())
{
boost::throw_exception(IllegalArgumentException(L"length " + StringUtils::toString(length) +
L" exceeds the size of the termBuffer (" +
StringUtils::toString(_termBuffer.size()) + L")"));
}
_termLength = length;
}
int32_t Token::startOffset()
{
return _startOffset;
}
void Token::setStartOffset(int32_t offset)
{
this->_startOffset = offset;
}
int32_t Token::endOffset()
{
return _endOffset;
}
void Token::setEndOffset(int32_t offset)
{
this->_endOffset = offset;
}
void Token::setOffset(int32_t startOffset, int32_t endOffset)
{
this->_startOffset = startOffset;
this->_endOffset = endOffset;
}
String Token::type()
{
return _type;
}
void Token::setType(const String& type)
{
this->_type = type;
}
int32_t Token::getFlags()
{
return flags;
}
void Token::setFlags(int32_t flags)
{
this->flags = flags;
}
PayloadPtr Token::getPayload()
{
return this->payload;
}
void Token::setPayload(PayloadPtr payload)
{
this->payload = payload;
}
String Token::toString()
{
StringStream buffer;
initTermBuffer();
buffer << L"(";
if (!_termBuffer)
buffer << L"null";
else
buffer << term() << L"," << _startOffset << L"," << _endOffset;
if (_type != L"word")
buffer << L",type=" << _type;
if (positionIncrement != 1)
buffer << L",posIncr=" << positionIncrement;
buffer << L")";
return buffer.str();
}
void Token::clear()
{
payload.reset();
// Leave termBuffer to allow re-use
_termLength = 0;
positionIncrement = 1;
flags = 0;
_startOffset = 0;
_endOffset = 0;
_type = DEFAULT_TYPE();
}
LuceneObjectPtr Token::clone(LuceneObjectPtr other)
{
LuceneObjectPtr clone = Attribute::clone(other ? other : newLucene<Token>());
TokenPtr cloneToken(boost::dynamic_pointer_cast<Token>(clone));
cloneToken->_termLength = _termLength;
cloneToken->_startOffset = _startOffset;
cloneToken->_endOffset = _endOffset;
cloneToken->_type = _type;
cloneToken->flags = flags;
cloneToken->positionIncrement = positionIncrement;
// Do a deep clone
if (_termBuffer)
{
cloneToken->_termBuffer = CharArray::newInstance(_termBuffer.size());
MiscUtils::arrayCopy(_termBuffer.get(), 0, cloneToken->_termBuffer.get(), 0, _termBuffer.size());
}
if (payload)
cloneToken->payload = boost::dynamic_pointer_cast<Payload>(payload->clone());
return cloneToken;
}
TokenPtr Token::clone(CharArray newTermBuffer, int32_t newTermOffset, int32_t newTermLength, int32_t newStartOffset, int32_t newEndOffset)
{
TokenPtr clone(newLucene<Token>(newTermBuffer, newTermOffset, newTermLength, newStartOffset, newEndOffset));
clone->positionIncrement = positionIncrement;
clone->flags = flags;
clone->_type = _type;
if (payload)
clone->payload = boost::dynamic_pointer_cast<Payload>(payload->clone());
return clone;
}
bool Token::equals(LuceneObjectPtr other)
{
if (LuceneObject::equals(other))
return true;
TokenPtr otherToken(boost::dynamic_pointer_cast<Token>(other));
if (otherToken)
{
initTermBuffer();
otherToken->initTermBuffer();
if (_termLength == otherToken->_termLength && _startOffset == otherToken->_startOffset &&
_endOffset == otherToken->_endOffset && flags == otherToken->flags &&
positionIncrement == otherToken->positionIncrement && _type == otherToken->_type &&
(payload ? payload->equals(otherToken->payload) : !otherToken->payload))
{
for (int32_t i = 0; i < _termLength; ++i)
{
if (_termBuffer[i] != otherToken->_termBuffer[i])
return false;
}
return true;
}
else
return false;
}
else
return false;
}
int32_t Token::hashCode()
{
initTermBuffer();
int32_t code = _termLength;
code = code * 31 + _startOffset;
code = code * 31 + _endOffset;
code = code * 31 + flags;
code = code * 31 + positionIncrement;
code = code * 31 + StringUtils::hashCode(_type);
code = payload ? code * 31 + payload->hashCode() : code;
code = code * 31 + MiscUtils::hashCode(_termBuffer.get(), 0, _termLength);
return code;
}
void Token::clearNoTermBuffer()
{
payload.reset();
positionIncrement = 1;
flags = 0;
_startOffset = 0;
_endOffset = 0;
_type = DEFAULT_TYPE();
}
TokenPtr Token::reinit(CharArray newTermBuffer, int32_t newTermOffset, int32_t newTermLength, int32_t newStartOffset, int32_t newEndOffset, const String& newType)
{
clearNoTermBuffer();
payload.reset();
positionIncrement = 1;
setTermBuffer(newTermBuffer.get(), newTermOffset, newTermLength);
_startOffset = newStartOffset;
_endOffset = newEndOffset;
_type = newType;
return shared_from_this();
}
TokenPtr Token::reinit(CharArray newTermBuffer, int32_t newTermOffset, int32_t newTermLength, int32_t newStartOffset, int32_t newEndOffset)
{
clearNoTermBuffer();
setTermBuffer(newTermBuffer.get(), newTermOffset, newTermLength);
_startOffset = newStartOffset;
_endOffset = newEndOffset;
_type = DEFAULT_TYPE();
return shared_from_this();
}
TokenPtr Token::reinit(const String& newTerm, int32_t newStartOffset, int32_t newEndOffset, const String& newType)
{
clearNoTermBuffer();
setTermBuffer(newTerm);
_startOffset = newStartOffset;
_endOffset = newEndOffset;
_type = newType;
return shared_from_this();
}
TokenPtr Token::reinit(const String& newTerm, int32_t newTermOffset, int32_t newTermLength, int32_t newStartOffset, int32_t newEndOffset, const String& newType)
{
clearNoTermBuffer();
setTermBuffer(newTerm, newTermOffset, newTermLength);
_startOffset = newStartOffset;
_endOffset = newEndOffset;
_type = newType;
return shared_from_this();
}
TokenPtr Token::reinit(const String& newTerm, int32_t newStartOffset, int32_t newEndOffset)
{
clearNoTermBuffer();
setTermBuffer(newTerm);
_startOffset = newStartOffset;
_endOffset = newEndOffset;
_type = DEFAULT_TYPE();
return shared_from_this();
}
TokenPtr Token::reinit(const String& newTerm, int32_t newTermOffset, int32_t newTermLength, int32_t newStartOffset, int32_t newEndOffset)
{
clearNoTermBuffer();
setTermBuffer(newTerm, newTermOffset, newTermLength);
_startOffset = newStartOffset;
_endOffset = newEndOffset;
_type = DEFAULT_TYPE();
return shared_from_this();
}
void Token::reinit(TokenPtr prototype)
{
prototype->initTermBuffer();
setTermBuffer(prototype->_termBuffer.get(), 0, prototype->_termLength);
positionIncrement = prototype->positionIncrement;
flags = prototype->flags;
_startOffset = prototype->_startOffset;
_endOffset = prototype->_endOffset;
_type = prototype->_type;
payload = prototype->payload;
}
void Token::reinit(TokenPtr prototype, const String& newTerm)
{
setTermBuffer(newTerm);
positionIncrement = prototype->positionIncrement;
flags = prototype->flags;
_startOffset = prototype->_startOffset;
_endOffset = prototype->_endOffset;
_type = prototype->_type;
payload = prototype->payload;
}
void Token::reinit(TokenPtr prototype, CharArray newTermBuffer, int32_t offset, int32_t length)
{
setTermBuffer(newTermBuffer.get(), offset, length);
positionIncrement = prototype->positionIncrement;
flags = prototype->flags;
_startOffset = prototype->_startOffset;
_endOffset = prototype->_endOffset;
_type = prototype->_type;
payload = prototype->payload;
}
void Token::copyTo(AttributePtr target)
{
TokenPtr targetToken(boost::dynamic_pointer_cast<Token>(target));
if (targetToken)
{
targetToken->reinit(shared_from_this());
// reinit shares the payload, so clone it
if (payload)
targetToken->payload = boost::dynamic_pointer_cast<Payload>(payload->clone());
}
else
{
initTermBuffer();
TermAttributePtr targetTermAttribute(boost::dynamic_pointer_cast<TermAttribute>(target));
if (targetTermAttribute)
targetTermAttribute->setTermBuffer(_termBuffer.get(), 0, _termLength);
OffsetAttributePtr targetOffsetAttribute(boost::dynamic_pointer_cast<OffsetAttribute>(target));
if (targetOffsetAttribute)
targetOffsetAttribute->setOffset(_startOffset, _endOffset);
PositionIncrementAttributePtr targetPositionIncrementAttribute(boost::dynamic_pointer_cast<PositionIncrementAttribute>(target));
if (targetPositionIncrementAttribute)
targetPositionIncrementAttribute->setPositionIncrement(positionIncrement);
PayloadAttributePtr targetPayloadAttribute(boost::dynamic_pointer_cast<PayloadAttribute>(target));
if (targetPayloadAttribute)
targetPayloadAttribute->setPayload(payload ? boost::dynamic_pointer_cast<Payload>(payload->clone()) : PayloadPtr());
FlagsAttributePtr targetFlagsAttribute(boost::dynamic_pointer_cast<FlagsAttribute>(target));
if (targetFlagsAttribute)
targetFlagsAttribute->setFlags(flags);
TypeAttributePtr targetTypeAttribute(boost::dynamic_pointer_cast<TypeAttribute>(target));
if (targetTypeAttribute)
targetTypeAttribute->setType(_type);
}
}
AttributeFactoryPtr Token::TOKEN_ATTRIBUTE_FACTORY()
{
static AttributeFactoryPtr _TOKEN_ATTRIBUTE_FACTORY;
if (!_TOKEN_ATTRIBUTE_FACTORY)
{
_TOKEN_ATTRIBUTE_FACTORY = newLucene<TokenAttributeFactory>(AttributeFactory::DEFAULT_ATTRIBUTE_FACTORY());
CycleCheck::addStatic(_TOKEN_ATTRIBUTE_FACTORY);
}
return _TOKEN_ATTRIBUTE_FACTORY;
}
TokenAttributeFactory::TokenAttributeFactory(AttributeFactoryPtr delegate)
{
this->delegate = delegate;
}
TokenAttributeFactory::~TokenAttributeFactory()
{
}
AttributePtr TokenAttributeFactory::createAttributeInstance(const String& className)
{
return newLucene<Token>();
}
bool TokenAttributeFactory::equals(LuceneObjectPtr other)
{
if (AttributeFactory::equals(other))
return true;
TokenAttributeFactoryPtr otherTokenAttributeFactory(boost::dynamic_pointer_cast<TokenAttributeFactory>(other));
if (otherTokenAttributeFactory)
return this->delegate->equals(otherTokenAttributeFactory->delegate);
return false;
}
int32_t TokenAttributeFactory::hashCode()
{
return (delegate->hashCode() ^ 0x0a45aa31);
}
}
| {
"content_hash": "fb328a410573fa91992b28f8007aac71",
"timestamp": "",
"source": "github",
"line_count": 519,
"max_line_length": 166,
"avg_line_length": 32.27938342967245,
"alnum_prop": 0.5889691398555482,
"repo_name": "ustramooner/LucenePlusPlus",
"id": "46404ae30591c203a509230a1d785b5b6c5f2c9c",
"size": "17385",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/core/analysis/Token.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2124374"
},
{
"name": "C++",
"bytes": "6187446"
},
{
"name": "Python",
"bytes": "159441"
}
],
"symlink_target": ""
} |
class BluePing {
public:
static int ping(std::string target);
};
#endif
| {
"content_hash": "3111fd5e381b1997502bd2a658848822",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 38,
"avg_line_length": 8.777777777777779,
"alnum_prop": 0.6582278481012658,
"repo_name": "marc-despland/alarm",
"id": "14e7ed24d42bf63a510fef4ab8f0dab629a2caec",
"size": "159",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "include/blueping.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Arduino",
"bytes": "2083"
},
{
"name": "C",
"bytes": "7909"
},
{
"name": "C++",
"bytes": "114521"
},
{
"name": "Makefile",
"bytes": "1797"
},
{
"name": "Shell",
"bytes": "545"
}
],
"symlink_target": ""
} |
""" unit tests for the model and descriptor packager """
from rdkit import RDConfig
from rdkit.ML.Data import DataUtils
import unittest,os,sys
import io
from rdkit.six.moves import cPickle
from rdkit.ML.ModelPackage import Packager
from rdkit import Chem
import random
def feq(a,b,tol=1e-4):
return abs(a-b)<=tol
class TestCase(unittest.TestCase):
def setUp(self):
self.dataDir =os.path.join(RDConfig.RDCodeDir,'ML/ModelPackage/test_data')
self.testD = [
# NOTE: the confidences here can be twitchy due to changes in descriptors:
('Fc1ccc(NC(=O)c2cccnc2Oc3cccc(c3)C(F)(F)F)c(F)c1',0,0.8 ),
#(r'CN/1(=C\C=C(/C=C1)\C\2=C\C=N(C)(Cl)\C=C2)Cl',0,0.70),
(r'NS(=O)(=O)c1cc(ccc1Cl)C2(O)NC(=O)c3ccccc32',1,0.70),
]
def _verify(self,pkg,testD):
for smi,pred,conf in testD:
try:
m = Chem.MolFromSmiles(smi)
except:
sys.stderr.write('SMILES: %s failed\n'%(smi))
else:
p,c = pkg.Classify(m)
assert p==pred,'bad prediction (%d) for smiles %s'%(p,smi)
assert feq(c,conf),'bad confidence (%f) for smiles %s'%(c,smi)
def _verify2(self,pkg,testD):
for smi,pred,conf in testD:
try:
m = Chem.MolFromSmiles(smi)
except:
sys.stderr.write('SMILES: %s failed\n'%(smi))
else:
p,c = pkg.Classify(m)
assert p==pred,'bad prediction (%d) for smiles %s'%(p,smi)
assert feq(c,conf),'bad confidence (%f) for smiles %s'%(c,smi)
p,c = pkg.Classify(m)
assert p==pred,'bad prediction (%d) for smiles %s'%(p,smi)
assert feq(c,conf),'bad confidence (%f) for smiles %s'%(c,smi)
def testBuild(self):
""" tests building and screening a packager """
with open(os.path.join(self.dataDir,'Jan9_build3_calc.dsc'),'r') as calcTF:
buf = calcTF.read().replace('\r\n', '\n').encode('utf-8')
calcTF.close()
with io.BytesIO(buf) as calcF:
calc = cPickle.load(calcF)
with open(os.path.join(self.dataDir,'Jan9_build3_model.pkl'),'rb') as modelF:
model = cPickle.load(modelF)
pkg = Packager.ModelPackage(descCalc=calc,model=model)
self._verify(pkg,self.testD)
def testLoad(self):
""" tests loading and screening a packager """
with open(os.path.join(self.dataDir,'Jan9_build3_pkg.pkl'),'r') as pkgTF:
buf = pkgTF.read().replace('\r\n', '\n').encode('utf-8')
pkgTF.close()
with io.BytesIO(buf) as pkgF:
pkg = cPickle.load(pkgF)
self._verify(pkg,self.testD)
def testLoad2(self):
""" tests loading and screening a packager 2 """
with open(os.path.join(self.dataDir,'Jan9_build3_pkg.pkl'),'r') as pkgTF:
buf = pkgTF.read().replace('\r\n', '\n').encode('utf-8')
pkgTF.close()
with io.BytesIO(buf) as pkgF:
pkg = cPickle.load(pkgF)
self._verify2(pkg,self.testD)
def testPerm1(self):
""" tests the descriptor remapping stuff in a packager """
from rdkit.Chem import Descriptors
with open(os.path.join(self.dataDir,'Jan9_build3_pkg.pkl'),'r') as pkgTF:
buf = pkgTF.read().replace('\r\n', '\n').encode('utf-8')
pkgTF.close()
with io.BytesIO(buf) as pkgF:
pkg = cPickle.load(pkgF)
calc = pkg.GetCalculator()
names = calc.GetDescriptorNames()
ref = {}
DataUtils.InitRandomNumbers((23,42))
for smi,pred,conf in self.testD:
for desc in names:
fn = getattr(Descriptors,desc,lambda x:777)
m = Chem.MolFromSmiles(smi)
ref[desc] = fn(m)
for i in range(5):
perm = list(names)
random.shuffle(perm,random=random.random)
m = Chem.MolFromSmiles(smi)
for desc in perm:
fn = getattr(Descriptors,desc,lambda x:777)
val = fn(m)
assert feq(val,ref[desc],1e-4),'%s: %s(%s): %f!=%f'%(str(perm),
smi,
desc,
val,
ref[desc])
def testPerm2(self):
""" tests the descriptor remapping stuff in a packager """
with open(os.path.join(self.dataDir,'Jan9_build3_pkg.pkl'),'r') as pkgTF:
buf = pkgTF.read().replace('\r\n', '\n').encode('utf-8')
pkgTF.close()
with io.BytesIO(buf) as pkgF:
pkg = cPickle.load(pkgF)
calc = pkg.GetCalculator()
names = calc.GetDescriptorNames()
DataUtils.InitRandomNumbers((23,42))
perm = list(names)
random.shuffle(perm,random=random.random)
calc.simpleList = perm
calc.descriptorNames = perm
pkg.Init()
self._verify(pkg,self.testD)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "a862268180ef3e9f03e5a1801a2dba3e",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 81,
"avg_line_length": 35.62406015037594,
"alnum_prop": 0.5844238075137189,
"repo_name": "strets123/rdkit",
"id": "b4dce747630637d9b7597360f47705409dc59925",
"size": "4877",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "rdkit/ML/ModelPackage/UnitTestPackage.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "385"
},
{
"name": "C",
"bytes": "203078"
},
{
"name": "C#",
"bytes": "6745"
},
{
"name": "C++",
"bytes": "7068170"
},
{
"name": "CMake",
"bytes": "584702"
},
{
"name": "CSS",
"bytes": "4742"
},
{
"name": "FORTRAN",
"bytes": "7661"
},
{
"name": "HTML",
"bytes": "65468"
},
{
"name": "Java",
"bytes": "248620"
},
{
"name": "JavaScript",
"bytes": "11595"
},
{
"name": "LLVM",
"bytes": "27271"
},
{
"name": "Lex",
"bytes": "4508"
},
{
"name": "Makefile",
"bytes": "15431"
},
{
"name": "Objective-C",
"bytes": "299"
},
{
"name": "Python",
"bytes": "3033212"
},
{
"name": "QMake",
"bytes": "389"
},
{
"name": "SMT",
"bytes": "3010"
},
{
"name": "Shell",
"bytes": "8899"
},
{
"name": "Smarty",
"bytes": "5864"
},
{
"name": "Yacc",
"bytes": "49170"
}
],
"symlink_target": ""
} |
package com.gaojin.test;
public class Test {
public static void main(String[] args) {
System.out.println();
System.out.println();
}
}
class MyThread implements Runnable {
public void run() {
}
} | {
"content_hash": "d5b7a151a44e6274add566bbd787ddcc",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 44,
"avg_line_length": 16.266666666666666,
"alnum_prop": 0.5778688524590164,
"repo_name": "gaojinjake/util",
"id": "a277346cfb240a2772124cda7429aaf12af79d2c",
"size": "244",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/gaojin/test/Test.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "137162"
}
],
"symlink_target": ""
} |
'use strict';
const express = require('express');
const router = express.Router();
const bodyparser = require('../bodyparser');
const ensureAuth = require('../ensureAuth');
const token = require('../token.js');
const User = require('../models/user');
module.exports = router
.post('/signup', bodyparser, (req, res) => {
const {email, password, name} = req.body;
delete req.body.password;
//TODO: Add regex for email validity
if(!email || !password || !name) {
// NOTE: Sending back an error state seems to prevent sending a
// useful error message. So don't send an error state.
return res.send({message: 'All fields are required.'});
}
User.findOne({email})
.then( existing => {
if(existing) return res.status(500).json({message: 'That email is already in use.'});
const user = new User(req.body);
user.generateHash(password);
user.role.push('user');
console.log('new User:',user);
return user.save()
.then( user => {
return token.sign(user);
})
.then( token => {
res.json({token:token.token, name:user.name, email:user.email});
});
})
.catch( err => {
console.log('error on user signup');
console.log(err);
next(err);
});
})
.post('/signin', bodyparser, (req, res) => {
const {email, password} = req.body;
delete req.body;
User.findOne({email})
.then( user => {
if(!user || !user.compareHash(password)) {
// NOTE: Sending back an error state seems to prevent sending a
// useful error message. So don't send an error state.
return res.json({error:'Invalid email or password.'});
}
return token.sign(user)
.then( token => {
res.json({token:token.token, name:user.name, email:user.email});
});
})
.catch( err => {
console.log('error on user signin');
console.log(err);
next(err);
});
})
.get('/verify', ensureAuth, (req, res) =>{
res.status(200).send({verified: true});
})
;
| {
"content_hash": "ab852c5f563526b57467feae76fd319f",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 91,
"avg_line_length": 27.026666666666667,
"alnum_prop": 0.592501233349778,
"repo_name": "InsertCleverNameHere/cleverjobserver",
"id": "0aafaae4169980e5e7ad5b4568889b72f98c17ab",
"size": "2027",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/routes/auth.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1642"
},
{
"name": "HTML",
"bytes": "612"
},
{
"name": "JavaScript",
"bytes": "54772"
}
],
"symlink_target": ""
} |
main = interact wordCount
where wordCount input = ((++) . show . length . lines) input "\n"
| {
"content_hash": "58e2b552386285b5d08d1426a247e261",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 68,
"avg_line_length": 47.5,
"alnum_prop": 0.6421052631578947,
"repo_name": "adz/real_world_haskell",
"id": "95e570c9b29dea8a08538cf08d8f67cc1ca2fa4d",
"size": "159",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ch01/line_count2.hs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Haskell",
"bytes": "46076"
},
{
"name": "R",
"bytes": "21"
}
],
"symlink_target": ""
} |
import mock
from neutron.api import extensions as neutron_extensions
from neutron.api.v2 import attributes
from neutron import context
import neutron.db.api as db
from neutron.extensions import portbindings
from neutron import manager
from neutron.plugins.cisco.common import cisco_constants as c_const
from neutron.plugins.cisco.common import cisco_exceptions as c_exc
from neutron.plugins.cisco.db import n1kv_db_v2
from neutron.plugins.cisco.db import n1kv_models_v2
from neutron.plugins.cisco.db import network_db_v2 as cdb
from neutron.plugins.cisco import extensions
from neutron.plugins.cisco.extensions import n1kv
from neutron.plugins.cisco.extensions import network_profile
from neutron.plugins.cisco.n1kv import n1kv_client
from neutron.plugins.cisco.n1kv import n1kv_neutron_plugin
from neutron.tests.unit import _test_extension_portbindings as test_bindings
from neutron.tests.unit.cisco.n1kv import fake_client
from neutron.tests.unit import test_api_v2
from neutron.tests.unit import test_db_plugin as test_plugin
from neutron.tests.unit import test_l3_plugin
from neutron.tests.unit import test_l3_schedulers
PHYS_NET = 'some-phys-net'
VLAN_MIN = 100
VLAN_MAX = 110
class FakeResponse(object):
"""
This object is returned by mocked requests lib instead of normal response.
Initialize it with the status code, header and buffer contents you wish to
return.
"""
def __init__(self, status, response_text, headers):
self.buffer = response_text
self.status_code = status
self.headers = headers
def json(self, *args, **kwargs):
return self.buffer
def _fake_setup_vsm(self):
"""Fake establish Communication with Cisco Nexus1000V VSM."""
self.agent_vsm = True
self._populate_policy_profiles()
class NetworkProfileTestExtensionManager(object):
def get_resources(self):
# Add the resources to the global attribute map
# This is done here as the setup process won't
# initialize the main API router which extends
# the global attribute map
attributes.RESOURCE_ATTRIBUTE_MAP.update(
network_profile.RESOURCE_ATTRIBUTE_MAP)
return network_profile.Network_profile.get_resources()
def get_actions(self):
return []
def get_request_extensions(self):
return []
class N1kvPluginTestCase(test_plugin.NeutronDbPluginV2TestCase):
_plugin_name = ('neutron.plugins.cisco.n1kv.'
'n1kv_neutron_plugin.N1kvNeutronPluginV2')
tenant_id = "some_tenant"
DEFAULT_RESP_BODY = ""
DEFAULT_RESP_CODE = 200
DEFAULT_CONTENT_TYPE = ""
fmt = "json"
def _make_test_policy_profile(self, name='service_profile'):
"""
Create a policy profile record for testing purpose.
:param name: string representing the name of the policy profile to
create. Default argument value chosen to correspond to the
default name specified in config.py file.
"""
uuid = test_api_v2._uuid()
profile = {'id': uuid,
'name': name}
return n1kv_db_v2.create_policy_profile(profile)
def _make_test_profile(self,
name='default_network_profile',
segment_type=c_const.NETWORK_TYPE_VLAN,
segment_range='386-400'):
"""
Create a profile record for testing purposes.
:param name: string representing the name of the network profile to
create. Default argument value chosen to correspond to the
default name specified in config.py file.
:param segment_type: string representing the type of network segment.
:param segment_range: string representing the segment range for network
profile.
"""
db_session = db.get_session()
profile = {'name': name,
'segment_type': segment_type,
'tenant_id': self.tenant_id,
'segment_range': segment_range}
if segment_type == c_const.NETWORK_TYPE_OVERLAY:
profile['sub_type'] = 'unicast'
profile['multicast_ip_range'] = '0.0.0.0'
net_p = n1kv_db_v2.create_network_profile(db_session, profile)
n1kv_db_v2.sync_vxlan_allocations(db_session, net_p)
elif segment_type == c_const.NETWORK_TYPE_VLAN:
profile['physical_network'] = PHYS_NET
net_p = n1kv_db_v2.create_network_profile(db_session, profile)
n1kv_db_v2.sync_vlan_allocations(db_session, net_p)
return net_p
def setUp(self):
"""
Setup method for n1kv plugin tests.
First step is to define an acceptable response from the VSM to
our requests. This needs to be done BEFORE the setUp() function
of the super-class is called.
This default here works for many cases. If you need something
extra, please define your own setUp() function in your test class,
and set your DEFAULT_RESPONSE value also BEFORE calling the
setUp() of the super-function (this one here). If you have set
a value already, it will not be overwritten by this code.
"""
if not self.DEFAULT_RESP_BODY:
self.DEFAULT_RESP_BODY = {
"icehouse-pp": {"properties": {"name": "icehouse-pp",
"id": "some-uuid-1"}},
"havana_pp": {"properties": {"name": "havana_pp",
"id": "some-uuid-2"}},
"dhcp_pp": {"properties": {"name": "dhcp_pp",
"id": "some-uuid-3"}},
}
# Creating a mock HTTP connection object for requests lib. The N1KV
# client interacts with the VSM via HTTP. Since we don't have a VSM
# running in the unit tests, we need to 'fake' it by patching the HTTP
# library itself. We install a patch for a fake HTTP connection class.
# Using __name__ to avoid having to enter the full module path.
http_patcher = mock.patch(n1kv_client.requests.__name__ + ".request")
FakeHttpConnection = http_patcher.start()
# Now define the return values for a few functions that may be called
# on any instance of the fake HTTP connection class.
self.resp_headers = {"content-type": "application/json"}
FakeHttpConnection.return_value = (FakeResponse(
self.DEFAULT_RESP_CODE,
self.DEFAULT_RESP_BODY,
self.resp_headers))
# Patch some internal functions in a few other parts of the system.
# These help us move along, without having to mock up even more systems
# in the background.
# Return a dummy VSM IP address
mock.patch(n1kv_client.__name__ + ".Client._get_vsm_hosts",
new=lambda self: "127.0.0.1").start()
# Return dummy user profiles
mock.patch(cdb.__name__ + ".get_credential_name",
new=lambda self: {"user_name": "admin",
"password": "admin_password"}).start()
n1kv_neutron_plugin.N1kvNeutronPluginV2._setup_vsm = _fake_setup_vsm
neutron_extensions.append_api_extensions_path(extensions.__path__)
ext_mgr = NetworkProfileTestExtensionManager()
# Save the original RESOURCE_ATTRIBUTE_MAP
self.saved_attr_map = {}
for resource, attrs in attributes.RESOURCE_ATTRIBUTE_MAP.items():
self.saved_attr_map[resource] = attrs.copy()
# Update the RESOURCE_ATTRIBUTE_MAP with n1kv specific extended attrs.
attributes.RESOURCE_ATTRIBUTE_MAP["networks"].update(
n1kv.EXTENDED_ATTRIBUTES_2_0["networks"])
attributes.RESOURCE_ATTRIBUTE_MAP["ports"].update(
n1kv.EXTENDED_ATTRIBUTES_2_0["ports"])
self.addCleanup(self.restore_resource_attribute_map)
self.addCleanup(db.clear_db)
super(N1kvPluginTestCase, self).setUp(self._plugin_name,
ext_mgr=ext_mgr)
# Create some of the database entries that we require.
self._make_test_profile()
self._make_test_policy_profile()
def restore_resource_attribute_map(self):
# Restore the original RESOURCE_ATTRIBUTE_MAP
attributes.RESOURCE_ATTRIBUTE_MAP = self.saved_attr_map
def test_plugin(self):
self._make_network('json',
'some_net',
True,
tenant_id=self.tenant_id,
set_context=True)
req = self.new_list_request('networks', params="fields=tenant_id")
req.environ['neutron.context'] = context.Context('', self.tenant_id)
res = req.get_response(self.api)
self.assertEqual(res.status_int, 200)
body = self.deserialize('json', res)
self.assertIn('tenant_id', body['networks'][0])
class TestN1kvNetworkProfiles(N1kvPluginTestCase):
def _prepare_net_profile_data(self,
segment_type,
sub_type=None,
segment_range=None,
mcast_ip_range=None):
netp = {'name': 'netp1',
'segment_type': segment_type,
'tenant_id': self.tenant_id}
if segment_type == c_const.NETWORK_TYPE_VLAN:
netp['segment_range'] = segment_range or '100-110'
netp['physical_network'] = PHYS_NET
elif segment_type == c_const.NETWORK_TYPE_OVERLAY:
netp['segment_range'] = segment_range or '10000-10010'
netp['sub_type'] = sub_type or 'enhanced'
netp['multicast_ip_range'] = (mcast_ip_range or
"224.1.1.1-224.1.1.10")
elif segment_type == c_const.NETWORK_TYPE_TRUNK:
netp['sub_type'] = c_const.NETWORK_TYPE_VLAN
data = {"network_profile": netp}
return data
def test_create_network_profile_vlan(self):
data = self._prepare_net_profile_data(c_const.NETWORK_TYPE_VLAN)
net_p_req = self.new_create_request('network_profiles', data)
res = net_p_req.get_response(self.ext_api)
self.assertEqual(res.status_int, 201)
def test_create_network_profile_overlay(self):
data = self._prepare_net_profile_data(c_const.NETWORK_TYPE_OVERLAY)
net_p_req = self.new_create_request('network_profiles', data)
res = net_p_req.get_response(self.ext_api)
self.assertEqual(res.status_int, 201)
def test_create_network_profile_trunk(self):
data = self._prepare_net_profile_data(c_const.NETWORK_TYPE_TRUNK)
net_p_req = self.new_create_request('network_profiles', data)
res = net_p_req.get_response(self.ext_api)
self.assertEqual(res.status_int, 201)
def test_create_network_profile_trunk_missing_subtype(self):
data = self._prepare_net_profile_data(c_const.NETWORK_TYPE_TRUNK)
data['network_profile'].pop('sub_type')
net_p_req = self.new_create_request('network_profiles', data)
res = net_p_req.get_response(self.ext_api)
self.assertEqual(res.status_int, 400)
def test_create_network_profile_overlay_unreasonable_seg_range(self):
data = self._prepare_net_profile_data(c_const.NETWORK_TYPE_OVERLAY,
segment_range='10000-1000000001')
net_p_req = self.new_create_request('network_profiles', data)
res = net_p_req.get_response(self.ext_api)
self.assertEqual(res.status_int, 400)
def test_update_network_profile_plugin(self):
net_p_dict = (self.
_prepare_net_profile_data(c_const.NETWORK_TYPE_OVERLAY))
net_p_req = self.new_create_request('network_profiles', net_p_dict)
net_p = self.deserialize(self.fmt,
net_p_req.get_response(self.ext_api))
data = {'network_profile': {'name': 'netp2'}}
update_req = self.new_update_request('network_profiles',
data,
net_p['network_profile']['id'])
update_res = update_req.get_response(self.ext_api)
self.assertEqual(update_res.status_int, 200)
def test_update_network_profile_physical_network_fail(self):
net_p = self._make_test_profile(name='netp1')
data = {'network_profile': {'physical_network': PHYS_NET}}
net_p_req = self.new_update_request('network_profiles',
data,
net_p['id'])
res = net_p_req.get_response(self.ext_api)
self.assertEqual(res.status_int, 400)
def test_update_network_profile_segment_type_fail(self):
net_p = self._make_test_profile(name='netp1')
data = {'network_profile': {
'segment_type': c_const.NETWORK_TYPE_OVERLAY}}
net_p_req = self.new_update_request('network_profiles',
data,
net_p['id'])
res = net_p_req.get_response(self.ext_api)
self.assertEqual(res.status_int, 400)
def test_update_network_profile_sub_type_fail(self):
net_p_dict = (self.
_prepare_net_profile_data(c_const.NETWORK_TYPE_OVERLAY))
net_p_req = self.new_create_request('network_profiles', net_p_dict)
net_p = self.deserialize(self.fmt,
net_p_req.get_response(self.ext_api))
data = {'network_profile': {'sub_type': c_const.NETWORK_TYPE_VLAN}}
update_req = self.new_update_request('network_profiles',
data,
net_p['network_profile']['id'])
update_res = update_req.get_response(self.ext_api)
self.assertEqual(update_res.status_int, 400)
def test_update_network_profiles_with_networks_fail(self):
net_p = self._make_test_profile(name='netp1')
data = {'network_profile': {'segment_range': '200-210'}}
update_req = self.new_update_request('network_profiles',
data,
net_p['id'])
update_res = update_req.get_response(self.ext_api)
self.assertEqual(update_res.status_int, 200)
net_data = {'network': {'name': 'net1',
n1kv.PROFILE_ID: net_p['id'],
'tenant_id': 'some_tenant'}}
network_req = self.new_create_request('networks', net_data)
network_res = network_req.get_response(self.api)
self.assertEqual(network_res.status_int, 201)
data = {'network_profile': {'segment_range': '300-310'}}
update_req = self.new_update_request('network_profiles',
data,
net_p['id'])
update_res = update_req.get_response(self.ext_api)
self.assertEqual(update_res.status_int, 409)
def test_create_overlay_network_profile_invalid_multicast_fail(self):
data = self._prepare_net_profile_data(c_const.NETWORK_TYPE_OVERLAY,
sub_type=(c_const.
NETWORK_SUBTYPE_NATIVE_VXLAN),
mcast_ip_range='1.1.1.1')
net_p_req = self.new_create_request('network_profiles', data)
res = net_p_req.get_response(self.ext_api)
self.assertEqual(res.status_int, 400)
def test_create_overlay_network_profile_no_multicast_fail(self):
data = self._prepare_net_profile_data(c_const.NETWORK_TYPE_OVERLAY,
sub_type=(c_const.
NETWORK_SUBTYPE_NATIVE_VXLAN))
data['network_profile']['multicast_ip_range'] = ''
net_p_req = self.new_create_request('network_profiles', data)
res = net_p_req.get_response(self.ext_api)
self.assertEqual(res.status_int, 400)
def test_create_overlay_network_profile_wrong_split_multicast_fail(self):
data = self._prepare_net_profile_data(c_const.NETWORK_TYPE_OVERLAY,
sub_type=(c_const.
NETWORK_SUBTYPE_NATIVE_VXLAN),
mcast_ip_range=
'224.1.1.1.224.1.1.3')
net_p_req = self.new_create_request('network_profiles', data)
res = net_p_req.get_response(self.ext_api)
self.assertEqual(res.status_int, 400)
def test_create_overlay_network_profile_invalid_minip_multicast_fail(self):
data = self._prepare_net_profile_data(c_const.NETWORK_TYPE_OVERLAY,
sub_type=(c_const.
NETWORK_SUBTYPE_NATIVE_VXLAN),
mcast_ip_range=
'10.0.0.1-224.1.1.3')
net_p_req = self.new_create_request('network_profiles', data)
res = net_p_req.get_response(self.ext_api)
self.assertEqual(res.status_int, 400)
def test_create_overlay_network_profile_invalid_maxip_multicast_fail(self):
data = self._prepare_net_profile_data(c_const.NETWORK_TYPE_OVERLAY,
sub_type=(c_const.
NETWORK_SUBTYPE_NATIVE_VXLAN),
mcast_ip_range=
'224.1.1.1-20.0.0.1')
net_p_req = self.new_create_request('network_profiles', data)
res = net_p_req.get_response(self.ext_api)
self.assertEqual(res.status_int, 400)
def test_create_overlay_network_profile_correct_multicast_pass(self):
data = self._prepare_net_profile_data(c_const.NETWORK_TYPE_OVERLAY)
net_p_req = self.new_create_request('network_profiles', data)
res = net_p_req.get_response(self.ext_api)
self.assertEqual(res.status_int, 201)
def test_update_overlay_network_profile_correct_multicast_pass(self):
data = self._prepare_net_profile_data(c_const.NETWORK_TYPE_OVERLAY)
net_p_req = self.new_create_request('network_profiles', data)
res = net_p_req.get_response(self.ext_api)
self.assertEqual(res.status_int, 201)
net_p = self.deserialize(self.fmt, res)
data = {'network_profile': {'multicast_ip_range':
'224.0.1.0-224.0.1.100'}}
update_req = self.new_update_request('network_profiles',
data,
net_p['network_profile']['id'])
update_res = update_req.get_response(self.ext_api)
self.assertEqual(update_res.status_int, 200)
def test_create_overlay_network_profile_reservedip_multicast_fail(self):
data = self._prepare_net_profile_data(c_const.NETWORK_TYPE_OVERLAY,
sub_type=(c_const.
NETWORK_SUBTYPE_NATIVE_VXLAN),
mcast_ip_range=
'224.0.0.100-224.0.1.100')
net_p_req = self.new_create_request('network_profiles', data)
res = net_p_req.get_response(self.ext_api)
self.assertEqual(res.status_int, 400)
def test_update_overlay_network_profile_reservedip_multicast_fail(self):
data = self._prepare_net_profile_data(c_const.NETWORK_TYPE_OVERLAY)
net_p_req = self.new_create_request('network_profiles', data)
res = net_p_req.get_response(self.ext_api)
self.assertEqual(res.status_int, 201)
net_p = self.deserialize(self.fmt, res)
data = {'network_profile': {'multicast_ip_range':
'224.0.0.11-224.0.0.111'}}
update_req = self.new_update_request('network_profiles',
data,
net_p['network_profile']['id'])
update_res = update_req.get_response(self.ext_api)
self.assertEqual(update_res.status_int, 400)
def test_update_vlan_network_profile_multicast_fail(self):
net_p = self._make_test_profile(name='netp1')
data = {'network_profile': {'multicast_ip_range':
'224.0.1.0-224.0.1.100'}}
update_req = self.new_update_request('network_profiles',
data,
net_p['id'])
update_res = update_req.get_response(self.ext_api)
self.assertEqual(update_res.status_int, 400)
def test_update_trunk_network_profile_segment_range_fail(self):
data = self._prepare_net_profile_data(c_const.NETWORK_TYPE_TRUNK)
net_p_req = self.new_create_request('network_profiles', data)
res = net_p_req.get_response(self.ext_api)
self.assertEqual(res.status_int, 201)
net_p = self.deserialize(self.fmt, res)
data = {'network_profile': {'segment_range':
'100-200'}}
update_req = self.new_update_request('network_profiles',
data,
net_p['network_profile']['id'])
update_res = update_req.get_response(self.ext_api)
self.assertEqual(update_res.status_int, 400)
def test_update_trunk_network_profile_multicast_fail(self):
data = self._prepare_net_profile_data(c_const.NETWORK_TYPE_TRUNK)
net_p_req = self.new_create_request('network_profiles', data)
res = net_p_req.get_response(self.ext_api)
self.assertEqual(res.status_int, 201)
net_p = self.deserialize(self.fmt, res)
data = {'network_profile': {'multicast_ip_range':
'224.0.1.0-224.0.1.100'}}
update_req = self.new_update_request('network_profiles',
data,
net_p['network_profile']['id'])
update_res = update_req.get_response(self.ext_api)
self.assertEqual(update_res.status_int, 400)
def test_create_network_profile_populate_vlan_segment_pool(self):
db_session = db.get_session()
net_p_dict = self._prepare_net_profile_data(c_const.NETWORK_TYPE_VLAN)
net_p_req = self.new_create_request('network_profiles', net_p_dict)
self.deserialize(self.fmt,
net_p_req.get_response(self.ext_api))
for vlan in range(VLAN_MIN, VLAN_MAX + 1):
self.assertIsNotNone(n1kv_db_v2.get_vlan_allocation(db_session,
PHYS_NET,
vlan))
self.assertFalse(n1kv_db_v2.get_vlan_allocation(db_session,
PHYS_NET,
vlan).allocated)
self.assertRaises(c_exc.VlanIDNotFound,
n1kv_db_v2.get_vlan_allocation,
db_session,
PHYS_NET,
VLAN_MIN - 1)
self.assertRaises(c_exc.VlanIDNotFound,
n1kv_db_v2.get_vlan_allocation,
db_session,
PHYS_NET,
VLAN_MAX + 1)
def test_delete_network_profile_with_network_fail(self):
net_p = self._make_test_profile(name='netp1')
net_data = {'network': {'name': 'net1',
n1kv.PROFILE_ID: net_p['id'],
'tenant_id': 'some_tenant'}}
network_req = self.new_create_request('networks', net_data)
network_res = network_req.get_response(self.api)
self.assertEqual(network_res.status_int, 201)
self._delete('network_profiles', net_p['id'],
expected_code=409)
def test_delete_network_profile_deallocate_vlan_segment_pool(self):
db_session = db.get_session()
net_p_dict = self._prepare_net_profile_data(c_const.NETWORK_TYPE_VLAN)
net_p_req = self.new_create_request('network_profiles', net_p_dict)
net_p = self.deserialize(self.fmt,
net_p_req.get_response(self.ext_api))
self.assertIsNotNone(n1kv_db_v2.get_vlan_allocation(db_session,
PHYS_NET,
VLAN_MIN))
self._delete('network_profiles', net_p['network_profile']['id'])
for vlan in range(VLAN_MIN, VLAN_MAX + 1):
self.assertRaises(c_exc.VlanIDNotFound,
n1kv_db_v2.get_vlan_allocation,
db_session,
PHYS_NET,
vlan)
def test_create_network_profile_rollback_profile_binding(self):
"""Test rollback of profile binding if network profile create fails."""
db_session = db.get_session()
client_patch = mock.patch(n1kv_client.__name__ + ".Client",
new=fake_client.TestClientInvalidResponse)
client_patch.start()
net_p_dict = self._prepare_net_profile_data(c_const.NETWORK_TYPE_VLAN)
self.new_create_request('network_profiles', net_p_dict)
bindings = (db_session.query(n1kv_models_v2.ProfileBinding).filter_by(
profile_type="network"))
self.assertEqual(bindings.count(), 0)
class TestN1kvBasicGet(test_plugin.TestBasicGet,
N1kvPluginTestCase):
pass
class TestN1kvHTTPResponse(test_plugin.TestV2HTTPResponse,
N1kvPluginTestCase):
pass
class TestN1kvPorts(test_plugin.TestPortsV2,
N1kvPluginTestCase,
test_bindings.PortBindingsTestCase):
VIF_TYPE = portbindings.VIF_TYPE_OVS
HAS_PORT_FILTER = False
def test_create_port_with_default_n1kv_policy_profile_id(self):
"""Test port create without passing policy profile id."""
with self.port() as port:
db_session = db.get_session()
pp = n1kv_db_v2.get_policy_profile(
db_session, port['port'][n1kv.PROFILE_ID])
self.assertEqual(pp['name'], 'service_profile')
def test_create_port_with_n1kv_policy_profile_id(self):
"""Test port create with policy profile id."""
profile_obj = self._make_test_policy_profile(name='test_profile')
with self.network() as network:
data = {'port': {n1kv.PROFILE_ID: profile_obj.id,
'tenant_id': self.tenant_id,
'network_id': network['network']['id']}}
port_req = self.new_create_request('ports', data)
port = self.deserialize(self.fmt,
port_req.get_response(self.api))
self.assertEqual(port['port'][n1kv.PROFILE_ID],
profile_obj.id)
self._delete('ports', port['port']['id'])
def test_update_port_with_n1kv_policy_profile_id(self):
"""Test port update failure while updating policy profile id."""
with self.port() as port:
data = {'port': {n1kv.PROFILE_ID: 'some-profile-uuid'}}
port_req = self.new_update_request('ports',
data,
port['port']['id'])
res = port_req.get_response(self.api)
# Port update should fail to update policy profile id.
self.assertEqual(res.status_int, 400)
def test_create_first_port_invalid_parameters_fail(self):
"""Test parameters for first port create sent to the VSM."""
profile_obj = self._make_test_policy_profile(name='test_profile')
with self.network() as network:
client_patch = mock.patch(n1kv_client.__name__ + ".Client",
new=fake_client.TestClientInvalidRequest)
client_patch.start()
data = {'port': {n1kv.PROFILE_ID: profile_obj.id,
'tenant_id': self.tenant_id,
'network_id': network['network']['id'],
}}
port_req = self.new_create_request('ports', data)
res = port_req.get_response(self.api)
self.assertEqual(res.status_int, 500)
client_patch.stop()
def test_create_next_port_invalid_parameters_fail(self):
"""Test parameters for subsequent port create sent to the VSM."""
with self.port() as port:
client_patch = mock.patch(n1kv_client.__name__ + ".Client",
new=fake_client.TestClientInvalidRequest)
client_patch.start()
data = {'port': {n1kv.PROFILE_ID: port['port']['n1kv:profile_id'],
'tenant_id': port['port']['tenant_id'],
'network_id': port['port']['network_id']}}
port_req = self.new_create_request('ports', data)
res = port_req.get_response(self.api)
self.assertEqual(res.status_int, 500)
client_patch.stop()
def test_create_first_port_rollback_vmnetwork(self):
"""Test whether VMNetwork is cleaned up if port create fails on VSM."""
db_session = db.get_session()
profile_obj = self._make_test_policy_profile(name='test_profile')
with self.network() as network:
client_patch = mock.patch(n1kv_client.__name__ + ".Client",
new=fake_client.
TestClientInvalidResponse)
client_patch.start()
data = {'port': {n1kv.PROFILE_ID: profile_obj.id,
'tenant_id': self.tenant_id,
'network_id': network['network']['id'],
}}
self.new_create_request('ports', data)
self.assertRaises(c_exc.VMNetworkNotFound,
n1kv_db_v2.get_vm_network,
db_session,
profile_obj.id,
network['network']['id'])
# Explicit stop of failure response mock from controller required
# for network object clean up to succeed.
client_patch.stop()
def test_create_next_port_rollback_vmnetwork_count(self):
"""Test whether VMNetwork count if port create fails on VSM."""
db_session = db.get_session()
with self.port() as port:
pt = port['port']
old_vmn = n1kv_db_v2.get_vm_network(db_session,
pt['n1kv:profile_id'],
pt['network_id'])
client_patch = mock.patch(n1kv_client.__name__ + ".Client",
new=fake_client.
TestClientInvalidResponse)
client_patch.start()
data = {'port': {n1kv.PROFILE_ID: pt['n1kv:profile_id'],
'tenant_id': pt['tenant_id'],
'network_id': pt['network_id']}}
self.new_create_request('ports', data)
new_vmn = n1kv_db_v2.get_vm_network(db_session,
pt['n1kv:profile_id'],
pt['network_id'])
self.assertEqual(old_vmn.port_count, new_vmn.port_count)
# Explicit stop of failure response mock from controller required
# for network object clean up to succeed.
client_patch.stop()
class TestN1kvPolicyProfiles(N1kvPluginTestCase):
def test_populate_policy_profile(self):
client_patch = mock.patch(n1kv_client.__name__ + ".Client",
new=fake_client.TestClient)
client_patch.start()
instance = n1kv_neutron_plugin.N1kvNeutronPluginV2()
instance._populate_policy_profiles()
db_session = db.get_session()
profile = n1kv_db_v2.get_policy_profile(
db_session, '00000000-0000-0000-0000-000000000001')
self.assertEqual('pp-1', profile['name'])
client_patch.stop()
def test_populate_policy_profile_delete(self):
# Patch the Client class with the TestClient class
with mock.patch(n1kv_client.__name__ + ".Client",
new=fake_client.TestClient):
# Patch the _get_total_profiles() method to return a custom value
with mock.patch(fake_client.__name__ +
'.TestClient._get_total_profiles') as obj_inst:
# Return 3 policy profiles
obj_inst.return_value = 3
plugin = manager.NeutronManager.get_plugin()
plugin._populate_policy_profiles()
db_session = db.get_session()
profile = n1kv_db_v2.get_policy_profile(
db_session, '00000000-0000-0000-0000-000000000001')
# Verify that DB contains only 3 policy profiles
self.assertEqual('pp-1', profile['name'])
profile = n1kv_db_v2.get_policy_profile(
db_session, '00000000-0000-0000-0000-000000000002')
self.assertEqual('pp-2', profile['name'])
profile = n1kv_db_v2.get_policy_profile(
db_session, '00000000-0000-0000-0000-000000000003')
self.assertEqual('pp-3', profile['name'])
self.assertRaises(c_exc.PolicyProfileIdNotFound,
n1kv_db_v2.get_policy_profile,
db_session,
'00000000-0000-0000-0000-000000000004')
# Return 2 policy profiles
obj_inst.return_value = 2
plugin._populate_policy_profiles()
# Verify that the third policy profile is deleted
self.assertRaises(c_exc.PolicyProfileIdNotFound,
n1kv_db_v2.get_policy_profile,
db_session,
'00000000-0000-0000-0000-000000000003')
class TestN1kvNetworks(test_plugin.TestNetworksV2,
N1kvPluginTestCase):
def _prepare_net_data(self, net_profile_id):
return {'network': {'name': 'net1',
n1kv.PROFILE_ID: net_profile_id,
'tenant_id': self.tenant_id}}
def test_create_network_with_default_n1kv_network_profile_id(self):
"""Test network create without passing network profile id."""
with self.network() as network:
db_session = db.get_session()
np = n1kv_db_v2.get_network_profile(
db_session, network['network'][n1kv.PROFILE_ID])
self.assertEqual(np['name'], 'default_network_profile')
def test_create_network_with_n1kv_network_profile_id(self):
"""Test network create with network profile id."""
profile_obj = self._make_test_profile(name='test_profile')
data = self._prepare_net_data(profile_obj.id)
network_req = self.new_create_request('networks', data)
network = self.deserialize(self.fmt,
network_req.get_response(self.api))
self.assertEqual(network['network'][n1kv.PROFILE_ID],
profile_obj.id)
def test_update_network_with_n1kv_network_profile_id(self):
"""Test network update failure while updating network profile id."""
with self.network() as network:
data = {'network': {n1kv.PROFILE_ID: 'some-profile-uuid'}}
network_req = self.new_update_request('networks',
data,
network['network']['id'])
res = network_req.get_response(self.api)
# Network update should fail to update network profile id.
self.assertEqual(res.status_int, 400)
def test_create_network_rollback_deallocate_vlan_segment(self):
"""Test vlan segment deallocation on network create failure."""
profile_obj = self._make_test_profile(name='test_profile',
segment_range='20-23')
data = self._prepare_net_data(profile_obj.id)
client_patch = mock.patch(n1kv_client.__name__ + ".Client",
new=fake_client.TestClientInvalidResponse)
client_patch.start()
self.new_create_request('networks', data)
db_session = db.get_session()
self.assertFalse(n1kv_db_v2.get_vlan_allocation(db_session,
PHYS_NET,
20).allocated)
def test_create_network_rollback_deallocate_overlay_segment(self):
"""Test overlay segment deallocation on network create failure."""
profile_obj = self._make_test_profile('test_np',
c_const.NETWORK_TYPE_OVERLAY,
'10000-10001')
data = self._prepare_net_data(profile_obj.id)
client_patch = mock.patch(n1kv_client.__name__ + ".Client",
new=fake_client.TestClientInvalidResponse)
client_patch.start()
self.new_create_request('networks', data)
db_session = db.get_session()
self.assertFalse(n1kv_db_v2.get_vxlan_allocation(db_session,
10000).allocated)
class TestN1kvSubnets(test_plugin.TestSubnetsV2,
N1kvPluginTestCase):
def setUp(self):
super(TestN1kvSubnets, self).setUp()
def test_create_subnet_with_invalid_parameters(self):
"""Test subnet creation with invalid parameters sent to the VSM"""
with self.network() as network:
client_patch = mock.patch(n1kv_client.__name__ + ".Client",
new=fake_client.TestClientInvalidRequest)
client_patch.start()
data = {'subnet': {'network_id': network['network']['id'],
'cidr': "10.0.0.0/24"}}
subnet_req = self.new_create_request('subnets', data)
subnet_resp = subnet_req.get_response(self.api)
# Subnet creation should fail due to invalid network name
self.assertEqual(subnet_resp.status_int, 400)
class TestN1kvL3Test(test_l3_plugin.L3NatExtensionTestCase):
pass
class TestN1kvL3SchedulersTest(test_l3_schedulers.L3SchedulerTestCase):
pass
| {
"content_hash": "4c6d21cedf6735c75d620161bc02b7cb",
"timestamp": "",
"source": "github",
"line_count": 808,
"max_line_length": 79,
"avg_line_length": 49.14108910891089,
"alnum_prop": 0.5516546617639652,
"repo_name": "virtualopensystems/neutron",
"id": "0d71bf4ab99e6493507132d9e815e9efb76440d5",
"size": "40462",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "neutron/tests/unit/cisco/n1kv/test_n1kv_plugin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "21914"
},
{
"name": "JavaScript",
"bytes": "60527"
},
{
"name": "Python",
"bytes": "9873662"
},
{
"name": "Shell",
"bytes": "9202"
},
{
"name": "XSLT",
"bytes": "50907"
}
],
"symlink_target": ""
} |
<h1 mat-dialog-title>{{pageHeader}}</h1>
<div mat-dialog-content>
<div class="form-group">
<mat-form-field>
<input matInput placeholder="TagID" [(ngModel)]="data.currentTag._id" name="tagid" class="form-control" required>
</mat-form-field>
</div>
<div class="form-group">
<mat-form-field>
<input matInput placeholder="Anzeigename in Tag-Liste" [(ngModel)]="data.currentTag.alias" name="alias" class="form-control" required>
</mat-form-field>
</div>
<div class="form-group">
<mat-form-field>
<mat-select placeholder="Verknüpfter Ort" class="form-control" [(ngModel)]="data.currentTag.location" name="locationSelect">
<mat-option *ngFor="let entry of locations" [value]="entry._id">{{entry._id}}</mat-option>
</mat-select>
</mat-form-field>
</div>
</div>
<div mat-dialog-actions>
<button mat-raised-button color="primary" tabindex="2" (click)="submit()">Speichern</button>
<button mat-raised-button tabindex="-1" (click)="cancel()">Abbrechen</button>
</div>
| {
"content_hash": "680b8aa2c06f97a47cfbe478921bb7db",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 138,
"avg_line_length": 42.5,
"alnum_prop": 0.6705882352941176,
"repo_name": "hshn-labsw-nfc/schnitzel",
"id": "2d72401cea3d2bc63319fa2bbea63b9ab0b81e0e",
"size": "1021",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "client/src/app/admin/menue/tags/tag-detail/tag-detail.component.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "79"
},
{
"name": "CSS",
"bytes": "7598"
},
{
"name": "HTML",
"bytes": "25207"
},
{
"name": "JavaScript",
"bytes": "315512"
},
{
"name": "Shell",
"bytes": "86"
},
{
"name": "TypeScript",
"bytes": "105379"
}
],
"symlink_target": ""
} |
#region License
#endregion
using System;
namespace WebSocketSharp {
/// <summary>
/// Provides the credentials for HTTP authentication (Basic/Digest).
/// </summary>
public class WsCredential {
#region Private Fields
string _domain;
string _password;
string _userName;
#endregion
#region Internal Constructors
internal WsCredential()
{
}
internal WsCredential(string userName, string password)
: this(userName, password, null)
{
}
internal WsCredential(string userName, string password, string domain)
{
_userName = userName;
_password = password;
_domain = domain;
}
#endregion
#region Public Properties
/// <summary>
/// Gets the name of the user domain associated with the credentials.
/// </summary>
/// <value>
/// A <see cref="string"/> that contains the name of the user domain associated with the credentials.
/// Currently, returns the request uri of a WebSocket opening handshake.
/// </value>
public string Domain {
get {
return _domain ?? String.Empty;
}
internal set {
_domain = value;
}
}
/// <summary>
/// Gets the password for the user name associated with the credentials.
/// </summary>
/// <value>
/// A <see cref="string"/> that contains the password for the user name associated with the credentials.
/// </value>
public string Password {
get {
return _password ?? String.Empty;
}
internal set {
_password = value;
}
}
/// <summary>
/// Gets the user name associated with the credentials.
/// </summary>
/// <value>
/// A <see cref="string"/> that contains the user name associated with the credentials.
/// </value>
public string UserName {
get {
return _userName ?? String.Empty;
}
internal set {
_userName = value;
}
}
#endregion
}
}
| {
"content_hash": "6fb712e56c8fc3b2b69c16e01401790f",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 108,
"avg_line_length": 21.094736842105263,
"alnum_prop": 0.593812375249501,
"repo_name": "NIAEFEUP/Kommando",
"id": "918755590565de23cee25dfdf3297c1ca97a4a49",
"size": "3167",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "viewer/websocket-sharp/WsCredential.cs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C#",
"bytes": "1589984"
},
{
"name": "CSS",
"bytes": "4148"
},
{
"name": "JavaScript",
"bytes": "17674"
},
{
"name": "Shell",
"bytes": "1244"
}
],
"symlink_target": ""
} |
package com.amazonaws.codesamples.document;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.auth.profile.ProfileCredentialsProvider;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClient;
import com.amazonaws.services.dynamodbv2.document.DynamoDB;
import com.amazonaws.services.dynamodbv2.document.Item;
import com.amazonaws.services.dynamodbv2.document.ItemCollection;
import com.amazonaws.services.dynamodbv2.document.ScanOutcome;
import com.amazonaws.services.dynamodbv2.document.Table;
import com.amazonaws.services.dynamodbv2.document.spec.ScanSpec;
import com.amazonaws.services.dynamodbv2.model.AttributeDefinition;
import com.amazonaws.services.dynamodbv2.model.KeySchemaElement;
import com.amazonaws.services.dynamodbv2.model.KeyType;
import com.amazonaws.services.dynamodbv2.model.ProvisionedThroughput;
public class DocumentAPIParallelScan {
// total number of sample items
static int scanItemCount = 300;
// number of items each scan request should return
static int scanItemLimit = 10;
// number of logical segments for parallel scan
static int parallelScanThreads = 16;
// table that will be used for scanning
static String parallelScanTestTableName = "ParallelScanTest";
static DynamoDB dynamoDB = new DynamoDB(
new AmazonDynamoDBClient(new ProfileCredentialsProvider()));
public static void main(String[] args) throws Exception {
try {
// Clean up the table
deleteTable(parallelScanTestTableName);
createTable(parallelScanTestTableName, 10L, 5L, "Id", "N");
// Upload sample data for scan
uploadSampleProducts(parallelScanTestTableName, scanItemCount);
// Scan the table using multiple threads
parallelScan(parallelScanTestTableName, scanItemLimit, parallelScanThreads);
}
catch (AmazonServiceException ase) {
System.err.println(ase.getMessage());
}
}
private static void parallelScan(String tableName, int itemLimit, int numberOfThreads) {
System.out.println("Scanning " + tableName + " using " + numberOfThreads
+ " threads " + itemLimit + " items at a time");
ExecutorService executor = Executors.newFixedThreadPool(numberOfThreads);
// Divide DynamoDB table into logical segments
// Create one task for scanning each segment
// Each thread will be scanning one segment
int totalSegments = numberOfThreads;
for (int segment = 0; segment < totalSegments; segment++) {
// Runnable task that will only scan one segment
ScanSegmentTask task = new ScanSegmentTask(tableName, itemLimit, totalSegments, segment);
// Execute the task
executor.execute(task);
}
shutDownExecutorService(executor);
}
// Runnable task for scanning a single segment of a DynamoDB table
private static class ScanSegmentTask implements Runnable {
// DynamoDB table to scan
private String tableName;
// number of items each scan request should return
private int itemLimit;
// Total number of segments
// Equals to total number of threads scanning the table in parallel
private int totalSegments;
// Segment that will be scanned with by this task
private int segment;
public ScanSegmentTask(String tableName, int itemLimit, int totalSegments, int segment) {
this.tableName = tableName;
this.itemLimit = itemLimit;
this.totalSegments = totalSegments;
this.segment = segment;
}
@Override
public void run() {
System.out.println("Scanning " + tableName + " segment " + segment + " out of " + totalSegments + " segments " + itemLimit + " items at a time...");
int totalScannedItemCount = 0;
Table table = dynamoDB.getTable(tableName);
try {
ScanSpec spec = new ScanSpec()
.withMaxResultSize(itemLimit)
.withTotalSegments(totalSegments)
.withSegment(segment);
ItemCollection<ScanOutcome> items = table.scan(spec);
Iterator<Item> iterator = items.iterator();
Item currentItem = null;
while (iterator.hasNext()) {
totalScannedItemCount++;
currentItem = iterator.next();
System.out.println(currentItem.toString());
}
} catch (Exception e) {
System.err.println(e.getMessage());
} finally {
System.out.println("Scanned " + totalScannedItemCount
+ " items from segment " + segment + " out of "
+ totalSegments + " of " + tableName);
}
}
}
private static void uploadSampleProducts(String tableName, int itemCount) {
System.out.println("Adding " + itemCount + " sample items to " + tableName);
for (int productIndex = 0; productIndex < itemCount; productIndex++) {
uploadProduct(tableName, productIndex);
}
}
private static void uploadProduct(String tableName, int productIndex) {
Table table = dynamoDB.getTable(tableName);
try {
System.out.println("Processing record #" + productIndex);
Item item = new Item()
.withPrimaryKey("Id", productIndex)
.withString("Title", "Book " + productIndex + " Title")
.withString("ISBN", "111-1111111111")
.withStringSet(
"Authors",
new HashSet<String>(Arrays.asList("Author1")))
.withNumber("Price", 2)
.withString("Dimensions", "8.5 x 11.0 x 0.5")
.withNumber("PageCount", 500)
.withBoolean("InPublication", true)
.withString("ProductCategory", "Book");
table.putItem(item);
} catch (Exception e) {
System.err.println("Failed to create item " + productIndex + " in " + tableName);
System.err.println(e.getMessage());
}
}
private static void deleteTable(String tableName){
try {
Table table = dynamoDB.getTable(tableName);
table.delete();
System.out.println("Waiting for " + tableName
+ " to be deleted...this may take a while...");
table.waitForDelete();
} catch (Exception e) {
System.err.println("Failed to delete table " + tableName);
e.printStackTrace(System.err);
}
}
private static void createTable(
String tableName, long readCapacityUnits, long writeCapacityUnits,
String hashKeyName, String hashKeyType) {
createTable(tableName, readCapacityUnits, writeCapacityUnits,
hashKeyName, hashKeyType, null, null);
}
private static void createTable(
String tableName, long readCapacityUnits, long writeCapacityUnits,
String hashKeyName, String hashKeyType,
String rangeKeyName, String rangeKeyType) {
try {
System.out.println("Creating table " + tableName);
List<KeySchemaElement> keySchema = new ArrayList<KeySchemaElement>();
keySchema.add(new KeySchemaElement()
.withAttributeName(hashKeyName)
.withKeyType(KeyType.HASH));
List<AttributeDefinition> attributeDefinitions = new ArrayList<AttributeDefinition>();
attributeDefinitions.add(new AttributeDefinition()
.withAttributeName(hashKeyName)
.withAttributeType(hashKeyType));
if (rangeKeyName != null){
keySchema.add(new KeySchemaElement()
.withAttributeName(rangeKeyName)
.withKeyType(KeyType.RANGE));
attributeDefinitions.add(new AttributeDefinition()
.withAttributeName(rangeKeyName)
.withAttributeType(rangeKeyType));
}
Table table = dynamoDB.createTable(tableName,
keySchema,
attributeDefinitions,
new ProvisionedThroughput()
.withReadCapacityUnits(readCapacityUnits)
.withWriteCapacityUnits(writeCapacityUnits));
System.out.println("Waiting for " + tableName
+ " to be created...this may take a while...");
table.waitForActive();
} catch (Exception e) {
System.err.println("Failed to create table " + tableName);
e.printStackTrace(System.err);
}
}
private static void shutDownExecutorService(ExecutorService executor) {
executor.shutdown();
try {
if (!executor.awaitTermination(10, TimeUnit.SECONDS)) {
executor.shutdownNow();
}
} catch (InterruptedException e) {
executor.shutdownNow();
// Preserve interrupt status
Thread.currentThread().interrupt();
}
}
}
| {
"content_hash": "d247c85308e34e9568a7de6ebb664edf",
"timestamp": "",
"source": "github",
"line_count": 255,
"max_line_length": 160,
"avg_line_length": 38.549019607843135,
"alnum_prop": 0.6019328585961343,
"repo_name": "awslabs/aws-dynamodb-examples",
"id": "84b7d0988706e2be463a9631441cd3583b22b2c4",
"size": "10401",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/main/java/com/amazonaws/codesamples/document/DocumentAPIParallelScan.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "321478"
}
],
"symlink_target": ""
} |
package com.azure.resourcemanager.desktopvirtualization.implementation;
import com.azure.core.management.SystemData;
import com.azure.core.util.Context;
import com.azure.resourcemanager.desktopvirtualization.fluent.models.MsixPackageInner;
import com.azure.resourcemanager.desktopvirtualization.models.MsixPackage;
import com.azure.resourcemanager.desktopvirtualization.models.MsixPackageApplications;
import com.azure.resourcemanager.desktopvirtualization.models.MsixPackageDependencies;
import com.azure.resourcemanager.desktopvirtualization.models.MsixPackagePatch;
import java.time.OffsetDateTime;
import java.util.Collections;
import java.util.List;
public final class MsixPackageImpl implements MsixPackage, MsixPackage.Definition, MsixPackage.Update {
private MsixPackageInner innerObject;
private final com.azure.resourcemanager.desktopvirtualization.DesktopVirtualizationManager serviceManager;
public String id() {
return this.innerModel().id();
}
public String name() {
return this.innerModel().name();
}
public String type() {
return this.innerModel().type();
}
public SystemData systemData() {
return this.innerModel().systemData();
}
public String imagePath() {
return this.innerModel().imagePath();
}
public String packageName() {
return this.innerModel().packageName();
}
public String packageFamilyName() {
return this.innerModel().packageFamilyName();
}
public String displayName() {
return this.innerModel().displayName();
}
public String packageRelativePath() {
return this.innerModel().packageRelativePath();
}
public Boolean isRegularRegistration() {
return this.innerModel().isRegularRegistration();
}
public Boolean isActive() {
return this.innerModel().isActive();
}
public List<MsixPackageDependencies> packageDependencies() {
List<MsixPackageDependencies> inner = this.innerModel().packageDependencies();
if (inner != null) {
return Collections.unmodifiableList(inner);
} else {
return Collections.emptyList();
}
}
public String version() {
return this.innerModel().version();
}
public OffsetDateTime lastUpdated() {
return this.innerModel().lastUpdated();
}
public List<MsixPackageApplications> packageApplications() {
List<MsixPackageApplications> inner = this.innerModel().packageApplications();
if (inner != null) {
return Collections.unmodifiableList(inner);
} else {
return Collections.emptyList();
}
}
public String resourceGroupName() {
return resourceGroupName;
}
public MsixPackageInner innerModel() {
return this.innerObject;
}
private com.azure.resourcemanager.desktopvirtualization.DesktopVirtualizationManager manager() {
return this.serviceManager;
}
private String resourceGroupName;
private String hostPoolName;
private String msixPackageFullName;
private MsixPackagePatch updateMsixPackage;
public MsixPackageImpl withExistingHostPool(String resourceGroupName, String hostPoolName) {
this.resourceGroupName = resourceGroupName;
this.hostPoolName = hostPoolName;
return this;
}
public MsixPackage create() {
this.innerObject =
serviceManager
.serviceClient()
.getMsixPackages()
.createOrUpdateWithResponse(
resourceGroupName, hostPoolName, msixPackageFullName, this.innerModel(), Context.NONE)
.getValue();
return this;
}
public MsixPackage create(Context context) {
this.innerObject =
serviceManager
.serviceClient()
.getMsixPackages()
.createOrUpdateWithResponse(
resourceGroupName, hostPoolName, msixPackageFullName, this.innerModel(), context)
.getValue();
return this;
}
MsixPackageImpl(
String name, com.azure.resourcemanager.desktopvirtualization.DesktopVirtualizationManager serviceManager) {
this.innerObject = new MsixPackageInner();
this.serviceManager = serviceManager;
this.msixPackageFullName = name;
}
public MsixPackageImpl update() {
this.updateMsixPackage = new MsixPackagePatch();
return this;
}
public MsixPackage apply() {
this.innerObject =
serviceManager
.serviceClient()
.getMsixPackages()
.updateWithResponse(
resourceGroupName, hostPoolName, msixPackageFullName, updateMsixPackage, Context.NONE)
.getValue();
return this;
}
public MsixPackage apply(Context context) {
this.innerObject =
serviceManager
.serviceClient()
.getMsixPackages()
.updateWithResponse(resourceGroupName, hostPoolName, msixPackageFullName, updateMsixPackage, context)
.getValue();
return this;
}
MsixPackageImpl(
MsixPackageInner innerObject,
com.azure.resourcemanager.desktopvirtualization.DesktopVirtualizationManager serviceManager) {
this.innerObject = innerObject;
this.serviceManager = serviceManager;
this.resourceGroupName = Utils.getValueFromIdByName(innerObject.id(), "resourcegroups");
this.hostPoolName = Utils.getValueFromIdByName(innerObject.id(), "hostPools");
this.msixPackageFullName = Utils.getValueFromIdByName(innerObject.id(), "msixPackages");
}
public MsixPackage refresh() {
this.innerObject =
serviceManager
.serviceClient()
.getMsixPackages()
.getWithResponse(resourceGroupName, hostPoolName, msixPackageFullName, Context.NONE)
.getValue();
return this;
}
public MsixPackage refresh(Context context) {
this.innerObject =
serviceManager
.serviceClient()
.getMsixPackages()
.getWithResponse(resourceGroupName, hostPoolName, msixPackageFullName, context)
.getValue();
return this;
}
public MsixPackageImpl withImagePath(String imagePath) {
this.innerModel().withImagePath(imagePath);
return this;
}
public MsixPackageImpl withPackageName(String packageName) {
this.innerModel().withPackageName(packageName);
return this;
}
public MsixPackageImpl withPackageFamilyName(String packageFamilyName) {
this.innerModel().withPackageFamilyName(packageFamilyName);
return this;
}
public MsixPackageImpl withDisplayName(String displayName) {
if (isInCreateMode()) {
this.innerModel().withDisplayName(displayName);
return this;
} else {
this.updateMsixPackage.withDisplayName(displayName);
return this;
}
}
public MsixPackageImpl withPackageRelativePath(String packageRelativePath) {
this.innerModel().withPackageRelativePath(packageRelativePath);
return this;
}
public MsixPackageImpl withIsRegularRegistration(Boolean isRegularRegistration) {
if (isInCreateMode()) {
this.innerModel().withIsRegularRegistration(isRegularRegistration);
return this;
} else {
this.updateMsixPackage.withIsRegularRegistration(isRegularRegistration);
return this;
}
}
public MsixPackageImpl withIsActive(Boolean isActive) {
if (isInCreateMode()) {
this.innerModel().withIsActive(isActive);
return this;
} else {
this.updateMsixPackage.withIsActive(isActive);
return this;
}
}
public MsixPackageImpl withPackageDependencies(List<MsixPackageDependencies> packageDependencies) {
this.innerModel().withPackageDependencies(packageDependencies);
return this;
}
public MsixPackageImpl withVersion(String version) {
this.innerModel().withVersion(version);
return this;
}
public MsixPackageImpl withLastUpdated(OffsetDateTime lastUpdated) {
this.innerModel().withLastUpdated(lastUpdated);
return this;
}
public MsixPackageImpl withPackageApplications(List<MsixPackageApplications> packageApplications) {
this.innerModel().withPackageApplications(packageApplications);
return this;
}
private boolean isInCreateMode() {
return this.innerModel().id() == null;
}
}
| {
"content_hash": "880f77ad2f5403186e6494d11e2e6ddf",
"timestamp": "",
"source": "github",
"line_count": 273,
"max_line_length": 117,
"avg_line_length": 32.26739926739927,
"alnum_prop": 0.6613690543762062,
"repo_name": "Azure/azure-sdk-for-java",
"id": "ec574fed95777ff5426e117d070a1d33579afba8",
"size": "8966",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/desktopvirtualization/azure-resourcemanager-desktopvirtualization/src/main/java/com/azure/resourcemanager/desktopvirtualization/implementation/MsixPackageImpl.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "8762"
},
{
"name": "Bicep",
"bytes": "15055"
},
{
"name": "CSS",
"bytes": "7676"
},
{
"name": "Dockerfile",
"bytes": "2028"
},
{
"name": "Groovy",
"bytes": "3237482"
},
{
"name": "HTML",
"bytes": "42090"
},
{
"name": "Java",
"bytes": "432409546"
},
{
"name": "JavaScript",
"bytes": "36557"
},
{
"name": "Jupyter Notebook",
"bytes": "95868"
},
{
"name": "PowerShell",
"bytes": "737517"
},
{
"name": "Python",
"bytes": "240542"
},
{
"name": "Scala",
"bytes": "1143898"
},
{
"name": "Shell",
"bytes": "18488"
},
{
"name": "XSLT",
"bytes": "755"
}
],
"symlink_target": ""
} |
import argparse
import os
from scheduled_bots.utils import make_deletion_templates, create_rfd
from wikidataintegrator import wdi_core, wdi_helpers
try:
from scheduled_bots.local import WDUSER, WDPASS
except ImportError:
if "WDUSER" in os.environ and "WDPASS" in os.environ:
WDUSER = os.environ['WDUSER']
WDPASS = os.environ['WDPASS']
else:
raise ValueError("WDUSER and WDPASS must be specified in local.py or as environment variables")
def get_deprecated_items(releases):
# releases is a list of qids who when used as "stated in" we should delete the item
# this is for the old refs. Should already be deleted
'''
query = """SELECT ?item ?itemLabel ?ipr WHERE {
?item p:P2926 ?s .
?s ps:P2926 ?ipr .
?s prov:wasDerivedFrom ?ref .
?ref pr:P348 "58.0" .
#SERVICE wikibase:label { bd:serviceParam wikibase:language "[AUTO_LANGUAGE],en". }
}"""
bindings = wdi_core.WDItemEngine.execute_sparql_query(query)['results']['bindings']
qids = {x['item']['value'].rsplit("/")[-1] for x in bindings}
'''
query = """
SELECT ?item ?itemLabel ?iprurl WHERE {
?item p:P2926 ?s .
?s ps:P2926 ?ipr .
?s prov:wasDerivedFrom ?ref .
?ref pr:P248 ?release .
values ?release **releases_str** .
BIND(IRI(REPLACE(?ipr, '^(.+)$', ?formatterurl)) AS ?iprurl).
wd:P2926 wdt:P1630 ?formatterurl .
}"""
releases_str = '{' + " ".join(['wd:' + x for x in releases]) + '}'
query = query.replace("**releases_str**", releases_str)
print(query)
bindings = wdi_core.WDItemEngine.execute_sparql_query(query)['results']['bindings']
qids2 = {x['item']['value'].rsplit("/")[-1] for x in bindings}
items = qids2
return items
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("current_release", help="The current release. (e.g.: '64.0')")
parser.add_argument('--title', help='deletion request title', type=str, default="Delete deprecated Interpro Items")
parser.add_argument('--reason', help='deletion request reason', type=str,
default="These items are deprecated")
parser.add_argument('--force', help='force run if deleting a large number of genes', action='store_true')
parser.add_argument('--dummy', help='dont actually create the deletion request', action='store_true')
args = parser.parse_args()
current_release = args.current_release
release_qid = wdi_helpers.id_mapper('P393', (('P629', "Q3047275"),)) # interpro releases
to_remove = {v for k, v in release_qid.items() if k != current_release}
print(to_remove)
qids = get_deprecated_items(to_remove)
print("|".join(qids))
print(len(qids))
if len(qids) > 200 and not args.force:
raise ValueError(
"Trying to delete {} items. If you really want to do this, re run with --force".format(len(qids)))
if len(qids) > 0:
s = make_deletion_templates(qids, args.title, args.reason)
if not args.dummy:
create_rfd(s, WDUSER, WDPASS)
log_path = "deletion_log.txt"
with open(log_path, 'w') as f:
f.write("\n".join(qids))
| {
"content_hash": "f08707a635b3f7451d8d4e25fbc46853",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 119,
"avg_line_length": 38.71084337349398,
"alnum_prop": 0.6308745720510427,
"repo_name": "SuLab/scheduled-bots",
"id": "f2c0ae848262c8778c9d6e227c6658ad132d6865",
"size": "3213",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "scheduled_bots/interpro/DeleteBot.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1296"
},
{
"name": "Jupyter Notebook",
"bytes": "1049300"
},
{
"name": "Python",
"bytes": "709603"
},
{
"name": "Shell",
"bytes": "5313"
}
],
"symlink_target": ""
} |
class CBlockIndex;
class CZMQAbstractNotifier;
typedef CZMQAbstractNotifier* (*CZMQNotifierFactory)();
class CZMQAbstractNotifier
{
public:
CZMQAbstractNotifier() : psocket(0) { }
virtual ~CZMQAbstractNotifier();
template <typename T>
static CZMQAbstractNotifier* Create()
{
return new T();
}
std::string GetType() const { return type; }
void SetType(const std::string &t) { type = t; }
std::string GetAddress() const { return address; }
void SetAddress(const std::string &a) { address = a; }
virtual bool Initialize(void *pcontext) = 0;
virtual void Shutdown() = 0;
virtual bool NotifyBlock(const CBlockIndex *pindex);
virtual bool NotifyBlock(const CBlock& pblock);
virtual bool NotifyTransaction(const CTransaction &transaction);
protected:
void *psocket;
std::string type;
std::string address;
};
#endif // BITCOIN_ZMQ_ZMQABSTRACTNOTIFIER_H
| {
"content_hash": "d470534e08c6ff357ebe18bcce136b71",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 68,
"avg_line_length": 25.916666666666668,
"alnum_prop": 0.6913183279742765,
"repo_name": "litecoinz-project/litecoinz",
"id": "8873b71d80477c489e357c110d95c9e2a1183f7c",
"size": "1232",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/zmq/zmqabstractnotifier.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28453"
},
{
"name": "C",
"bytes": "730195"
},
{
"name": "C++",
"bytes": "6761814"
},
{
"name": "HTML",
"bytes": "20970"
},
{
"name": "Java",
"bytes": "30291"
},
{
"name": "M4",
"bytes": "226422"
},
{
"name": "Makefile",
"bytes": "154159"
},
{
"name": "Objective-C",
"bytes": "6536"
},
{
"name": "Objective-C++",
"bytes": "7240"
},
{
"name": "Python",
"bytes": "906305"
},
{
"name": "Shell",
"bytes": "103060"
}
],
"symlink_target": ""
} |
'use strict';
module.exports = function(grunt) {
//--------------------------------------------------------------------------
// SETUP CONFIG
//--------------------------------------------------------------------------
// setup task config
var config = {
// Arrays of relevant code classified by type
files: {
js: {
src: [
'lib/*.js',
'*.js'
]
}
},
// This handles automatic version bumping in travis
bump: {
options: {
files: ['package.json'],
updateConfigs: [],
commit: true,
commitMessage: 'Release v%VERSION%',
commitFiles: ['package.json', 'bower.json'],
createTag: true,
tagName: 'v%VERSION%',
tagMessage: 'Version %VERSION%',
push: false
}
},
// Some linting and code standards
jshint: {
options: {
jshintrc: '.jshintrc',
reporter: require('jshint-stylish')
},
all: ['Gruntfile.js', '<%= files.js.src %>']
},
jscs: {
src: ['Gruntfile.js', '<%= files.js.src %>'],
options: {
config: '.jscsrc'
}
}
};
//--------------------------------------------------------------------------
// LOAD TASKS
//--------------------------------------------------------------------------
// load task config
grunt.initConfig(config);
// load external tasks
//grunt.loadTasks('tasks');
// load grunt-* tasks from package.json dependencies
require('matchdep').filterAll('grunt-*').forEach(grunt.loadNpmTasks);
//--------------------------------------------------------------------------
// SETUP WORKFLOWS
//--------------------------------------------------------------------------
grunt.registerTask('bump-patch', [
'bump-only:patch'
]);
grunt.registerTask('test', [
'jshint',
'jscs'
]);
};
| {
"content_hash": "bb00d46cdcadc9525b0e651fd753dd54",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 78,
"avg_line_length": 23.55,
"alnum_prop": 0.3970276008492569,
"repo_name": "gremy/kalabox-app-pantheon",
"id": "6afb25797677e1fc81694ac9ec86c6e1bed13def",
"size": "1884",
"binary": false,
"copies": "6",
"ref": "refs/heads/v0.10",
"path": "app/node_modules/kalabox-plugin-drush/Gruntfile.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "103591"
},
{
"name": "Nginx",
"bytes": "1189"
},
{
"name": "PHP",
"bytes": "2958"
},
{
"name": "Perl",
"bytes": "14800"
},
{
"name": "Shell",
"bytes": "17143"
}
],
"symlink_target": ""
} |
package org.apache.ignite.loadtests.cache;
import org.apache.ignite.*;
import org.apache.ignite.cache.*;
import org.apache.ignite.cache.store.*;
import org.apache.ignite.configuration.*;
import org.apache.ignite.internal.*;
import org.apache.ignite.internal.util.typedef.internal.*;
import org.apache.ignite.spi.discovery.tcp.*;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.*;
import org.apache.ignite.testframework.junits.common.*;
import javax.cache.configuration.*;
import java.util.*;
import java.util.concurrent.atomic.*;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.*;
/**
* Basic store test.
*/
public class GridCacheWriteBehindStoreLoadTest extends GridCommonAbstractTest {
/** Flush frequency. */
private static final int WRITE_FROM_BEHIND_FLUSH_FREQUENCY = 1000;
/** Run time is 24 hours. */
private static final long runTime = 24L * 60 * 60 * 60 * 1000;
/** Specify if test keys should be randomly generated. */
private boolean rndKeys;
/** Number of distinct keys if they are generated randomly. */
private int keysCnt = 20 * 1024;
/** Number of threads that concurrently update cache. */
private int threadCnt;
/** No-op cache store. */
private static final CacheStore store = new CacheStoreAdapter() {
/** {@inheritDoc} */
@Override public Object load(Object key) {
return null;
}
/** {@inheritDoc} */
@Override public void write(javax.cache.Cache.Entry e) {
// No-op.
}
/** {@inheritDoc} */
@Override public void delete(Object key) {
// No-op.
}
};
/**
* Constructor
*/
public GridCacheWriteBehindStoreLoadTest() {
super(true /*start grid. */);
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
IgniteCache<Object, Object> cache = jcache();
if (cache != null)
cache.clear();
}
/**
* @return Caching mode.
*/
protected CacheMode cacheMode() {
return CacheMode.PARTITIONED;
}
/** {@inheritDoc} */
@SuppressWarnings({"unchecked"})
@Override protected final IgniteConfiguration getConfiguration() throws Exception {
IgniteConfiguration c = super.getConfiguration();
TcpDiscoverySpi disco = new TcpDiscoverySpi();
disco.setIpFinder(new TcpDiscoveryVmIpFinder(true));
c.setDiscoverySpi(disco);
CacheConfiguration cc = defaultCacheConfiguration();
cc.setCacheMode(cacheMode());
cc.setWriteSynchronizationMode(FULL_SYNC);
cc.setSwapEnabled(false);
cc.setCacheStoreFactory(new FactoryBuilder.SingletonFactory(store));
cc.setReadThrough(true);
cc.setWriteThrough(true);
cc.setLoadPreviousValue(true);
cc.setWriteBehindEnabled(true);
cc.setWriteBehindFlushFrequency(WRITE_FROM_BEHIND_FLUSH_FREQUENCY);
c.setCacheConfiguration(cc);
return c;
}
/**
* @throws Exception If failed.
*/
public void testLoadCacheSequentialKeys() throws Exception {
rndKeys = false;
threadCnt = 10;
loadCache();
}
/**
* @throws Exception If failed.
*/
public void testLoadCacheRandomKeys() throws Exception {
rndKeys = true;
threadCnt = 10;
loadCache();
}
/**
* @throws Exception If failed.
*/
private void loadCache() throws Exception {
final AtomicBoolean running = new AtomicBoolean(true);
final IgniteCache<Object, Object> cache = jcache();
final AtomicLong keyCntr = new AtomicLong();
long start = System.currentTimeMillis();
IgniteInternalFuture<?> fut = multithreadedAsync(new Runnable() {
@SuppressWarnings({"NullableProblems"})
@Override public void run() {
Random rnd = new Random();
while (running.get()) {
long putNum = keyCntr.incrementAndGet();
long key = rndKeys ? rnd.nextInt(keysCnt) : putNum;
cache.put(key, "val" + key);
}
}
}, threadCnt, "put");
long prevPutCnt = 0;
while (System.currentTimeMillis() - start < runTime) {
// Print stats every minute.
U.sleep(60 * 1000);
long cnt = keyCntr.get();
long secondsElapsed = (System.currentTimeMillis() - start) / 1000;
info(">>> Running for " + secondsElapsed + " seconds");
info(">>> Puts: [total=" + cnt + ", avg=" + (cnt / secondsElapsed) + " (ops/sec), lastMinute=" +
((cnt - prevPutCnt) / 60) + "(ops/sec)]");
prevPutCnt = cnt;
}
running.set(false);
fut.get();
}
/**
* @return Will return 0 to disable timeout.
*/
@Override protected long getTestTimeout() {
return 0;
}
}
| {
"content_hash": "2382529e35ba63d9938560f8e57a96d5",
"timestamp": "",
"source": "github",
"line_count": 187,
"max_line_length": 108,
"avg_line_length": 26.823529411764707,
"alnum_prop": 0.6008771929824561,
"repo_name": "gridgain/apache-ignite",
"id": "de703acb454424f1cff6a22e210564d262be2682",
"size": "5818",
"binary": false,
"copies": "1",
"ref": "refs/heads/sprint-2",
"path": "modules/core/src/test/java/org/apache/ignite/loadtests/cache/GridCacheWriteBehindStoreLoadTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "4522"
},
{
"name": "C++",
"bytes": "28098"
},
{
"name": "CSS",
"bytes": "17209"
},
{
"name": "HTML",
"bytes": "260837"
},
{
"name": "Java",
"bytes": "17999177"
},
{
"name": "JavaScript",
"bytes": "1085"
},
{
"name": "PHP",
"bytes": "18446"
},
{
"name": "Scala",
"bytes": "732170"
},
{
"name": "Scilab",
"bytes": "3923545"
},
{
"name": "Shell",
"bytes": "407266"
}
],
"symlink_target": ""
} |
mkdir -p build
cd build
cmake -DCMAKE_CXX_COMPILER=$CXX -DCMAKE_CXX_FLAGS="$CXXFLAGS" \
-DBUILD_SHARED_LIBS=OFF -G "Unix Makefiles" ..
make
# Compile fuzzer.
$CXX $CXXFLAGS -I../include $LIB_FUZZING_ENGINE \
../src/test_lib_json/fuzz.cpp -o $OUT/jsoncpp_fuzzer \
lib/libjsoncpp.a
# Add dictionary.
cp $SRC/jsoncpp/src/test_lib_json/fuzz.dict $OUT/jsoncpp_fuzzer.dict
| {
"content_hash": "ddb7d00edb9485962008077a91c06a12",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 68,
"avg_line_length": 29.46153846153846,
"alnum_prop": 0.7023498694516971,
"repo_name": "FeliciaLim/oss-fuzz",
"id": "6c55653c10724eb1cf727e60e2ebd282728437a7",
"size": "1059",
"binary": false,
"copies": "1",
"ref": "refs/heads/opus",
"path": "projects/jsoncpp/build.sh",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "7113"
},
{
"name": "C++",
"bytes": "29021"
},
{
"name": "Groovy",
"bytes": "8689"
},
{
"name": "HTML",
"bytes": "603"
},
{
"name": "Python",
"bytes": "25585"
},
{
"name": "Shell",
"bytes": "70002"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<!--[if IE]><![endif]-->
<html>
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
<title>JK.Common Documentation </title>
<meta name="viewport" content="width=device-width">
<meta name="title" content="JK.Common Documentation ">
<meta name="generator" content="docfx 2.55.0.0">
<link rel="shortcut icon" href="../../favicon.ico">
<link rel="stylesheet" href="../../styles/docfx.vendor.css">
<link rel="stylesheet" href="../../styles/docfx.css">
<link rel="stylesheet" href="../../styles/main.css">
<meta property="docfx:navrel" content="../../toc.html">
<meta property="docfx:tocrel" content="toc.html">
<meta property="docfx:rel" content="../../">
</head>
<body data-spy="scroll" data-target="#affix" data-offset="120">
<div id="wrapper">
<header>
<nav id="autocollapse" class="navbar navbar-inverse ng-scope" role="navigation">
<div class="container">
<div class="navbar-header">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target="#navbar">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="../../index.html" style="font-size:2em;line-height: inherit;margin-right: 1em;">JK</a>
</div>
<div class="collapse navbar-collapse" id="navbar">
<form class="navbar-form navbar-right" role="search" id="search">
<div class="form-group">
<input type="text" class="form-control" id="search-query" placeholder="Search" autocomplete="off">
</div>
</form>
</div>
</div>
</nav>
<div class="subnav navbar navbar-default">
<div class="container hide-when-search" id="breadcrumb">
<ul class="breadcrumb">
<li></li>
</ul>
</div>
</div>
</header>
<div class="container body-content">
<div id="search-results">
<div class="search-list"></div>
<div class="sr-items">
<p><i class="glyphicon glyphicon-refresh index-loading"></i></p>
</div>
<ul id="pagination"></ul>
</div>
</div>
<div role="main" class="container body-content hide-when-search">
<div class="sidenav hide-when-search">
<a class="btn toc-toggle collapse" data-toggle="collapse" href="#sidetoggle" aria-expanded="false" aria-controls="sidetoggle">Show / Hide Table of Contents</a>
<div class="sidetoggle collapse" id="sidetoggle">
<div id="sidetoc"></div>
</div>
</div>
<div class="article row grid-right">
<div class="col-md-10">
<article class="content wrap" id="_content" data-uid="JK.Common.ListItem`1">
<h1 id="JK_Common_ListItem_1" data-uid="JK.Common.ListItem`1" class="text-break">Class ListItem<T>
</h1>
<div class="markdown level0 summary"><p>Class meant to represent data to be displayed in any of
the .NET list controls such as the System.Web.UI.WebControls DropDownList,
the System.Windows.Controls ComboBox, etc.</p>
</div>
<div class="markdown level0 conceptual"></div>
<div class="inheritance">
<h5>Inheritance</h5>
<div class="level0"><span class="xref">System.Object</span></div>
<div class="level1"><span class="xref">ListItem<T></span></div>
<div class="level2"><a class="xref" href="JK.Common.ListItem.html">ListItem</a></div>
</div>
<div class="inheritedMembers">
<h5>Inherited Members</h5>
<div>
<span class="xref">System.Object.Equals(System.Object)</span>
</div>
<div>
<span class="xref">System.Object.Equals(System.Object, System.Object)</span>
</div>
<div>
<span class="xref">System.Object.GetHashCode()</span>
</div>
<div>
<span class="xref">System.Object.GetType()</span>
</div>
<div>
<span class="xref">System.Object.MemberwiseClone()</span>
</div>
<div>
<span class="xref">System.Object.ReferenceEquals(System.Object, System.Object)</span>
</div>
<div>
<span class="xref">System.Object.ToString()</span>
</div>
</div>
<h6><strong>Namespace</strong>: <a class="xref" href="JK.Common.html">JK.Common</a></h6>
<h6><strong>Assembly</strong>: JK.Common.dll</h6>
<h5 id="JK_Common_ListItem_1_syntax">Syntax</h5>
<div class="codewrapper">
<pre><code class="lang-csharp hljs">public class ListItem<T></code></pre>
</div>
<h5 class="typeParameters">Type Parameters</h5>
<table class="table table-bordered table-striped table-condensed">
<thead>
<tr>
<th>Name</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td><span class="parametername">T</span></td>
<td><p>Type of the object's identifier</p>
</td>
</tr>
</tbody>
</table>
<h3 id="properties">Properties
</h3>
<span class="small pull-right mobile-hide">
<span class="divider">|</span>
<a href="https://github.com/jeremyknight-me/JK.Common/new/moveGhPagesToDocsFolder/apiSpec/new?filename=JK_Common_ListItem_1_Text.md&value=---%0Auid%3A%20JK.Common.ListItem%601.Text%0Asummary%3A%20'*You%20can%20override%20summary%20for%20the%20API%20here%20using%20*MARKDOWN*%20syntax'%0A---%0A%0A*Please%20type%20below%20more%20information%20about%20this%20API%3A*%0A%0A">Improve this Doc</a>
</span>
<span class="small pull-right mobile-hide">
<a href="https://github.com/jeremyknight-me/JK.Common/blob/moveGhPagesToDocsFolder/src/JK.Common/ListItemT.cs/#L12">View Source</a>
</span>
<a id="JK_Common_ListItem_1_Text_" data-uid="JK.Common.ListItem`1.Text*"></a>
<h4 id="JK_Common_ListItem_1_Text" data-uid="JK.Common.ListItem`1.Text">Text</h4>
<div class="markdown level1 summary"></div>
<div class="markdown level1 conceptual"></div>
<h5 class="decalaration">Declaration</h5>
<div class="codewrapper">
<pre><code class="lang-csharp hljs">public string Text { get; set; }</code></pre>
</div>
<h5 class="propertyValue">Property Value</h5>
<table class="table table-bordered table-striped table-condensed">
<thead>
<tr>
<th>Type</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td><span class="xref">System.String</span></td>
<td></td>
</tr>
</tbody>
</table>
<span class="small pull-right mobile-hide">
<span class="divider">|</span>
<a href="https://github.com/jeremyknight-me/JK.Common/new/moveGhPagesToDocsFolder/apiSpec/new?filename=JK_Common_ListItem_1_Value.md&value=---%0Auid%3A%20JK.Common.ListItem%601.Value%0Asummary%3A%20'*You%20can%20override%20summary%20for%20the%20API%20here%20using%20*MARKDOWN*%20syntax'%0A---%0A%0A*Please%20type%20below%20more%20information%20about%20this%20API%3A*%0A%0A">Improve this Doc</a>
</span>
<span class="small pull-right mobile-hide">
<a href="https://github.com/jeremyknight-me/JK.Common/blob/moveGhPagesToDocsFolder/src/JK.Common/ListItemT.cs/#L11">View Source</a>
</span>
<a id="JK_Common_ListItem_1_Value_" data-uid="JK.Common.ListItem`1.Value*"></a>
<h4 id="JK_Common_ListItem_1_Value" data-uid="JK.Common.ListItem`1.Value">Value</h4>
<div class="markdown level1 summary"></div>
<div class="markdown level1 conceptual"></div>
<h5 class="decalaration">Declaration</h5>
<div class="codewrapper">
<pre><code class="lang-csharp hljs">public T Value { get; set; }</code></pre>
</div>
<h5 class="propertyValue">Property Value</h5>
<table class="table table-bordered table-striped table-condensed">
<thead>
<tr>
<th>Type</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td><span class="xref">T</span></td>
<td></td>
</tr>
</tbody>
</table>
<h3 id="extensionmethods">Extension Methods</h3>
<div>
<a class="xref" href="JK.Common.Extensions.ClassExtensions.html#JK_Common_Extensions_ClassExtensions_IsNull__1___0_">ClassExtensions.IsNull<T>(T)</a>
</div>
<div>
<a class="xref" href="JK.Common.Extensions.ClassExtensions.html#JK_Common_Extensions_ClassExtensions_IsNotNull__1___0_">ClassExtensions.IsNotNull<T>(T)</a>
</div>
</article>
</div>
<div class="hidden-sm col-md-2" role="complementary">
<div class="sideaffix">
<div class="contribution">
<ul class="nav">
<li>
<a href="https://github.com/jeremyknight-me/JK.Common/new/moveGhPagesToDocsFolder/apiSpec/new?filename=JK_Common_ListItem_1.md&value=---%0Auid%3A%20JK.Common.ListItem%601%0Asummary%3A%20'*You%20can%20override%20summary%20for%20the%20API%20here%20using%20*MARKDOWN*%20syntax'%0A---%0A%0A*Please%20type%20below%20more%20information%20about%20this%20API%3A*%0A%0A" class="contribution-link">Improve this Doc</a>
</li>
<li>
<a href="https://github.com/jeremyknight-me/JK.Common/blob/moveGhPagesToDocsFolder/src/JK.Common/ListItemT.cs/#L9" class="contribution-link">View Source</a>
</li>
</ul>
</div>
<nav class="bs-docs-sidebar hidden-print hidden-xs hidden-sm affix" id="affix">
<!-- <p><a class="back-to-top" href="#top">Back to top</a><p> -->
</nav>
</div>
</div>
</div>
</div>
<footer>
<div class="grad-bottom"></div>
<div class="footer">
<div class="container">
<span class="pull-right">
<a href="#top">Back to top</a>
</span>
<span>Copyright © Jeremy Knight. Generated with <strong>DocFX</strong></span>
</div>
</div>
</footer>
</div>
<script type="text/javascript" src="../../styles/docfx.vendor.js"></script>
<script type="text/javascript" src="../../styles/docfx.js"></script>
<script type="text/javascript" src="../../styles/main.js"></script>
</body>
</html>
| {
"content_hash": "b4e02ee16a0bca28763352c77313a195",
"timestamp": "",
"source": "github",
"line_count": 249,
"max_line_length": 432,
"avg_line_length": 41.98795180722892,
"alnum_prop": 0.6035389765662362,
"repo_name": "digital-lagniappe/DL.Common",
"id": "6153895ec4fe72a4c1332b4e201e9b7eaa4beea0",
"size": "10457",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "docs/api/common/JK.Common.ListItem-1.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "94"
},
{
"name": "C#",
"bytes": "195710"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>KNX - Log</title>
<link href="css/style.css" rel="stylesheet" type="text/css">
<script type="text/javascript" src="js/jquery-1.9.1.min.js"></script>
<script type="text/javascript" src="js/jquery.cookie.js"></script>
<script type="text/javascript" src="js/script.js"></script>
<script type="text/javascript">
$(document).ready(function() {
var xToken = $.cookie("knxcookie");
// on affiche la progressbar
$('#popupBg').css("display", "block");
// fonction permettant de récupérer les logs
// entrée : -
// sortie : logs (payload)
$.ajax({
type: 'GET',
url: '/admin/log',
headers: {
'X-Token': xToken
},
statusCode: {
401: function () {
window.location = "index.html";
},
500: function () {
window.location = "index.html";
}
},
success: function(data, status, response) {
var output = data;
$('#logArea').html(output);
$('#popupBg').css("display", "none");
},
error: function (response, status, error) {
//alert('Error ' + error);
},
});
});
</script>
</head>
<body>
<div id="popupBg">
<div id="popupContainer">
<h2>It's loading ...</h2>
<progress id="progressBar" max="100">
<strong>It's loading ....</strong>
</progress>
</div>
</div>
<div id="container"><!-- début de la page -->
<!-- titre et navigation -->
<header>
<img src="img/logo_knx.gif" height="40" alt="KNX">
<img src="img/eif.png" height="40" alt="EIF">
<h1>KNX - WoT Gateway</h1>
</header>
<nav>
<ul>
<li><a href="configuration.html">Configuration</a></li>
<li><a href="data_subscribers.html">Data subscribers</a></li>
<li><a class="selected" href="log.html">Log</a></li>
<li><a id="logout" href="index.html">Logout</a></li>
</ul>
<div class="clear"></div>
</nav>
<!-- contenu de la page -->
<div id="content">
<h2>Log</h2>
<textarea name="logArea" id="logArea" placeholder="The log data ..."></textarea>
</div>
</div><!-- fin de la page -->
</body>
</html>
| {
"content_hash": "e3cdf9f2d97d6fcad3a3d6168985d1e3",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 93,
"avg_line_length": 30.012820512820515,
"alnum_prop": 0.5296881674498077,
"repo_name": "heia-fr/wot_gateways",
"id": "b95924657a20598b665c6a4f18c7f0a848190965",
"size": "2345",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ga-web/src/main/webapp/knx/log.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7043"
},
{
"name": "HTML",
"bytes": "68275"
},
{
"name": "Java",
"bytes": "502858"
},
{
"name": "JavaScript",
"bytes": "114355"
},
{
"name": "XSLT",
"bytes": "8127"
}
],
"symlink_target": ""
} |
<?php
namespace app;
use alkemann\h2l\{ Request, Response, Router, Log };
use alkemann\h2l\exceptions\InvalidUrl;
use alkemann\h2l\util\Http;
use alkemann\jsonapi\exceptions\InvalidRequestContainer;
use alkemann\jsonapi\response\{Result, Error};
use alkemann\jsonapi\Controller;
use app\People;
class Api extends Controller
{
static $routes = [
// Url function request method
['%^/api/v1/people/(?<id>\d+)%', 'person', Http::GET],
['%^/api/v1/people/(?<id>\d+)%', 'update_person', Http::PATCH],
['/api/v1/people', 'new_person', Http::POST],
['/api/v1/people', 'people', Http::GET],
['/api/v1/version', 'version', Http::GET],
];
public function new_person(Request $request): ?Response
{
$person = $this->populateModelFromRequest(People::class, $request);
$person->save();
return (new Result($person, Http::CODE_CREATED))
->withLocation($request->fullUrl('/api/v1/people/' . $person->id))
;
}
public function update_person(Request $request): ?Response
{
$person = People::get($request->param('id'));
if (!$person) {
return new Error([], Http::CODE_NOT_FOUND);
}
$data = $this->getValidatedRequestDataForModel(People::class, $request);
$person->save($data);
return new Result($person);
}
public function person(Request $r): ?Response
{
$person = People::get($r->param('id'));
return new Result($person);
}
public function people(Request $r): ?Response
{
$data = People::find();
return (new Result($data))
->withLinks(['self' => $r->fullUrl()])
;
}
public static function version(Request $r): Response
{
return new Result(['v' => '1.0']);
}
}
| {
"content_hash": "9f1698be2b5bdb15b7290ee99ee705a7",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 80,
"avg_line_length": 31.758064516129032,
"alnum_prop": 0.5403758252920264,
"repo_name": "alkemann/jsonapi",
"id": "2f8b451ef4ef40bcf85cff3e1a591aaabc4c3842",
"size": "1969",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/peoples/app/Api.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "30111"
}
],
"symlink_target": ""
} |
{% extends "blogs/base.html" %}
{% block ext_head %}
<title>易先生的世界</title>
<meta name="description" content="">
<meta name="author" content="">
<!-- Le styles -->
<link href="/static/bootstrap/css/bootstrap.css" rel="stylesheet">
<style type="text/css">
body {
padding-top: 60px;
padding-bottom: 40px;
}
.hero-unit {
padding: 10px;
}
.sidebar-nav {
padding: 9px 0;
}
</style>
<link href="/static/bootstrap/css/bootstrap-responsive.css" rel="stylesheet">
<!-- HTML5 shim, for IE6-8 support of HTML5 elements -->
<!--[if lt IE 9]>
<script src="http://html5shim.googlecode.com/svn/trunk/html5.js"></script>
<![endif]-->
<!-- Fav and touch icons -->
<link rel="apple-touch-icon-precomposed" sizes="144x144" href="../assets/ico/apple-touch-icon-144-precomposed.png">
<link rel="apple-touch-icon-precomposed" sizes="114x114" href="../assets/ico/apple-touch-icon-114-precomposed.png">
<link rel="apple-touch-icon-precomposed" sizes="72x72" href="../assets/ico/apple-touch-icon-72-precomposed.png">
<link rel="apple-touch-icon-precomposed" href="../assets/ico/apple-touch-icon-57-precomposed.png">
<link rel="shortcut icon" href="../assets/ico/favicon.png">
<script type='text/javascript'>
(function (d, t) {
var bh = d.createElement(t), s = d.getElementsByTagName(t)[0];
bh.type = 'text/javascript';
bh.src = '//www.bugherd.com/sidebarv2.js?apikey=nr5zwnalnnlydiuby5dxnw';
s.parentNode.insertBefore(bh, s);
})(document, 'script');
</script>
{% endblock %}
{% block navbar %}
{% include "blogs/nav.html" with active_nav="contact" %}
{% endblock %}
{% block content %}
<div class="row-fluid">
<div class="span12">
<h1 align="left">
我的微博:<a href="http://weibo.com/u/1708225792" target="_blank">http://weibo.com/u/1708225792</a>
</h1>
</div><!--/span-->
</div><!--/row-->
{% endblock %}
| {
"content_hash": "8767e532edf74eb40f3e127eaac4db49",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 117,
"avg_line_length": 34.41379310344828,
"alnum_prop": 0.6117234468937875,
"repo_name": "yijingping/django-blogs",
"id": "a7aeb84c9d01744db7463cf2fede24f761eb2a36",
"size": "2018",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "blogs/templates/blogs/contact.html",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "8029"
}
],
"symlink_target": ""
} |
<div class="pagination-container">
<ul>
<li class="prev-btn" (click)="onPreviousClick()"><a href="#"><</a></li>
<li><input name="pages" [(ngModel)]="page" (change)="onPageChange($event)"><span>  /  {{noOfPages}}</span></li>
<li class="next-btn" (click)="onNextClick()"><a href="#">></a></li>
</ul>
</div>
| {
"content_hash": "37be8ad9ea4dc3db959db9675ae1cf25",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 139,
"avg_line_length": 50.142857142857146,
"alnum_prop": 0.584045584045584,
"repo_name": "sanuradhag/angular-switchable-grid",
"id": "ce90e64f24b4e6381e0796b7ed0bd1161f91a6fd",
"size": "351",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/app/grid/pagination/pagination.component.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "35421"
},
{
"name": "HTML",
"bytes": "39362"
},
{
"name": "JavaScript",
"bytes": "1876"
},
{
"name": "TypeScript",
"bytes": "51077"
}
],
"symlink_target": ""
} |
import { addType } from 'src/util/typerenderer';
const status = {
100: {
description: 'Order to confirm',
color: 'orange'
},
200: {
description: 'Product to order',
color: 'lightblue'
},
300: {
description: 'Product ordered',
color: 'lightblue'
},
400: {
description: 'Product arrived',
color: 'lightgray'
},
500: {
description: 'Product released',
color: 'lightgreen'
},
600: {
description: 'Product to revalidate',
color: 'orange'
},
700: {
description: 'Product refused',
color: 'pink'
},
800: {
description: 'Product expired',
color: 'pink'
},
900: {
description: 'Product lost',
color: 'pink'
},
1000: {
description: 'Product empty',
color: 'pink'
}
};
// register type renderer
function toscreen($element, value, root, options) {
$element.html(getStatusDescription(+value));
if (options.withColor) {
$element.css('background-color', getStatusColor(+value));
}
}
addType('stockstatus', { toscreen });
export function getStatusDescription(code) {
if (!status[code]) return 'Status does not exist';
return status[code].description;
}
export function getStatusColor(code) {
if (!status[code]) return '#FFFFFF';
return status[code].color;
}
export function getAllStatus() {
return JSON.parse(JSON.stringify(status));
}
| {
"content_hash": "470368322361024a62b84bc0099c0df5",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 61,
"avg_line_length": 20.08823529411765,
"alnum_prop": 0.6332357247437774,
"repo_name": "cheminfo-js/visualizer-helper",
"id": "70af2c3c1099d4d57c74defb173db46f2035c1aa",
"size": "1367",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "eln/StockHelper.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1742"
},
{
"name": "HTML",
"bytes": "1899"
},
{
"name": "JavaScript",
"bytes": "813508"
}
],
"symlink_target": ""
} |
http_path = "/"
css_dir = ""
sass_dir = "sass"
images_dir = "img"
javascripts_dir = "js"
# You can select your preferred output style here (can be overridden via the command line):
output_style = :compressed
# To enable relative paths to assets via compass helper functions. Uncomment:
# relative_assets = true
# To disable debugging comments that display the original location of your selectors. Uncomment:
# line_comments = false
# If you prefer the indented syntax, you might want to regenerate this
# project again passing --syntax sass, or you can uncomment this:
# preferred_syntax = :sass
# and then run:
# sass-convert -R --from scss --to sass sass scss && rm -rf sass && mv scss sass
| {
"content_hash": "963b188774ee233eafe0733f21359c28",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 96,
"avg_line_length": 33.23809523809524,
"alnum_prop": 0.7320916905444126,
"repo_name": "fmitchell/d8apifuncnyccamp2014",
"id": "b434946b07990ce53b3f3dfb243bbbf3a0f0c949",
"size": "800",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "phase2/config.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "128759"
},
{
"name": "JavaScript",
"bytes": "185686"
},
{
"name": "Ruby",
"bytes": "800"
}
],
"symlink_target": ""
} |
window.GM_Log = function(message)
{
return UpgradrShell.Log(message);
}
window.GM_SetValue = function(key, value)
{
return UpgradrShell.SetValue(key, value);
}
window.GM_GetValue = function(key, defaultValue)
{
if (!UpgradrShell.HasValue(key)) return defaultValue;
return UpgradrShell.GetValue(key);
} | {
"content_hash": "76efc9ace7960385c52741cfa78f7aa0",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 55,
"avg_line_length": 20.8125,
"alnum_prop": 0.6936936936936937,
"repo_name": "darwin/upgradr",
"id": "041738cd7d4bacf0070b0eecd49b80c44ffac3e0",
"size": "508",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ieaddon/Setup/InstallFiles/SystemScripts/MonkeyAPI.js",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
module Azure::ServiceFabric::V6_4_0_36
module Models
#
# Information about service package deployed on a Service Fabric node.
#
class DeployedServicePackageInfo
include MsRestAzure
# @return [String] The name of the service package as specified in the
# service manifest.
attr_accessor :name
# @return [String] The version of the service package specified in
# service manifest.
attr_accessor :version
# @return [DeploymentStatus] Specifies the status of a deployed
# application or service package on a Service Fabric node. Possible
# values include: 'Invalid', 'Downloading', 'Activating', 'Active',
# 'Upgrading', 'Deactivating'
attr_accessor :status
# @return [String] The ActivationId of a deployed service package. If
# ServicePackageActivationMode specified at the time of creating the
# service
# is 'SharedProcess' (or if it is not specified, in which case it
# defaults to 'SharedProcess'), then value of ServicePackageActivationId
# is always an empty string.
attr_accessor :service_package_activation_id
#
# Mapper for DeployedServicePackageInfo class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'DeployedServicePackageInfo',
type: {
name: 'Composite',
class_name: 'DeployedServicePackageInfo',
model_properties: {
name: {
client_side_validation: true,
required: false,
serialized_name: 'Name',
type: {
name: 'String'
}
},
version: {
client_side_validation: true,
required: false,
serialized_name: 'Version',
type: {
name: 'String'
}
},
status: {
client_side_validation: true,
required: false,
serialized_name: 'Status',
type: {
name: 'String'
}
},
service_package_activation_id: {
client_side_validation: true,
required: false,
serialized_name: 'ServicePackageActivationId',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| {
"content_hash": "258f39cff86a617d62b8a10d39ae3b63",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 78,
"avg_line_length": 31.511904761904763,
"alnum_prop": 0.5323007177937288,
"repo_name": "Azure/azure-sdk-for-ruby",
"id": "97bec42407fef4432b4d00120c789b781083e14e",
"size": "2811",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data/azure_service_fabric/lib/6.4.0.36/generated/azure_service_fabric/models/deployed_service_package_info.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "345216400"
},
{
"name": "Shell",
"bytes": "305"
}
],
"symlink_target": ""
} |
<?php
// No direct access
defined('_HZEXEC_') or die();
/**
* Helper class for reviews
*/
class PlgPublicationsReviewsHelper extends \Hubzero\Base\Obj
{
/**
* Execute an action
*
* @return void
*/
public function execute()
{
// Incoming action
$action = Request::getString('action', '');
$this->loggedin = true;
if ($action)
{
// Check the user's logged-in status
if (User::isGuest())
{
$this->loggedin = false;
return;
}
}
// Perform an action
switch ($action)
{
case 'addreview':
$this->editreview();
break;
case 'editreview':
$this->editreview();
break;
case 'savereview':
$this->savereview();
break;
case 'deletereview':
$this->deletereview();
break;
case 'savereply':
$this->savereply();
break;
case 'deletereply':
$this->deletereply();
break;
case 'rateitem':
$this->rateitem();
break;
}
}
/**
* Save a reply
*
* @return void
*/
private function savereply()
{
// Check for request forgeries
Request::checkToken();
// Is the user logged in?
if (User::isGuest())
{
$this->setError(Lang::txt('PLG_PUBLICATIONS_REVIEWS_LOGIN_NOTICE'));
return;
}
$publication =& $this->publication;
// Trim and addslashes all posted items
$comment = Request::getArray('comment', array(), 'post');
if (!$publication->exists())
{
// Cannot proceed
$this->setError(Lang::txt('PLG_PUBLICATIONS_REVIEWS_COMMENT_ERROR_NO_REFERENCE_ID'));
return;
}
$database = App::get('db');
$row = \Hubzero\Item\Comment::blank()->set($comment);
$message = $row->id ? Lang::txt('PLG_PUBLICATIONS_REVIEWS_EDITS_SAVED') : Lang::txt('PLG_PUBLICATIONS_REVIEWS_COMMENT_POSTED');
// Perform some text cleaning, etc.
$row->set('content', \Hubzero\Utility\Sanitize::clean($row->get('content')));
$row->set('anonymous', ($row->get('anonymous') ? $row->get('anonymous') : 0));
$row->set('state', ($row->get('id') ? $row->get('state') : 0));
// Save the data
if (!$row->save())
{
$this->setError($row->getError());
return;
}
// Redirect
App::redirect(Route::url($publication->link('reviews')), $message);
}
/**
* Delete a reply
*
* @return void
*/
public function deletereply()
{
$publication =& $this->publication;
// Incoming
$replyid = Request::getInt('comment', 0);
// Do we have a review ID?
if (!$replyid)
{
$this->setError(Lang::txt('PLG_PUBLICATIONS_REVIEWS_COMMENT_ERROR_NO_REFERENCE_ID'));
return;
}
// Do we have a publication ID?
if (!$publication->exists())
{
$this->setError(Lang::txt('PLG_PUBLICATIONS_REVIEWS_NO_RESOURCE_ID'));
return;
}
// Delete the review
$reply = \Hubzero\Item\Comment::oneOrFail($replyid);
// Permissions check
if ($reply->get('created_by') != User::get('id'))
{
return;
}
$reply->set('state', $reply::STATE_DELETED);
$reply->save();
// Redirect
App::redirect(
Route::url($publication->link('reviews')),
Lang::txt('PLG_PUBLICATIONS_REVIEWS_COMMENT_DELETED')
);
}
/**
* Rate an item
*
* @return void
*/
public function rateitem()
{
$database = App::get('db');
$publication =& $this->publication;
$id = Request::getInt('refid', 0);
$ajax = Request::getInt('no_html', 0);
$cat = Request::getString('category', 'pubreview');
$vote = Request::getString('vote', '');
$ip = Request::ip();
if (!$id || !$publication->exists())
{
// Cannot proceed
return;
}
// Is the user logged in?
if (User::isGuest())
{
$this->setError(Lang::txt('PLG_PUBLICATIONS_REVIEWS_LOGIN_NOTICE'));
return;
}
// Load answer
$rev = new \Components\Publications\Tables\Review($database);
$rev->load($id);
$voted = $rev->getVote($id, $cat, User::get('id'), 'v.id');
if ($vote)
{
require_once __DIR__ . DS . 'models' . DS . 'vote.php';
$v = \Plugins\Publications\Reviews\Models\Vote::oneByUserAndPublication(User::get('id'), $id);
$v->set(array(
'referenceid' => $id,
'category' => $cat,
'voter' => User::get('id'),
'ip' => $ip,
'voted' => Date::toSql(),
'helpful' => $vote
));
if (!$v->save())
{
$this->setError($v->getError());
return;
}
}
// update display
if ($ajax)
{
$response = $rev->getRating($publication->get('id'), User::get('id'));
$view = new \Hubzero\Plugin\View(
array(
'folder' =>'publications',
'element'=>'reviews',
'name' =>'browse',
'layout' =>'_rateitem'
)
);
$view->option = $this->_option;
$view->item = new PublicationsModelReview($response[0]);
$view->rid = $publication->get('id');
$view->display();
exit();
}
App::redirect(Route::url($publication->get('reviews')));
}
/**
* Edit a review
*
* @return void
*/
public function editreview()
{
// Is the user logged in?
if (User::isGuest())
{
$this->setError(Lang::txt('PLG_PUBLICATIONS_REVIEWS_LOGIN_NOTICE'));
return;
}
$publication =& $this->publication;
// Do we have an ID?
if (!$publication->exists())
{
// No - fail! Can't do anything else without an ID
$this->setError(Lang::txt('PLG_PUBLICATIONS_REVIEWS_NO_RESOURCE_ID'));
return;
}
// Incoming
$myr = Request::getInt('myrating', 0);
$database = App::get('db');
$review = new \Components\Publications\Tables\Review($database);
$review->loadUserReview($publication->get('id'), User::get('id'), $publication->get('version_id'));
if (!$review->id)
{
// New review, get the user's ID
$review->created_by = User::get('id');
$review->publication_id = $publication->get('id');
$review->publication_version_id = $publication->get('version_id');
$review->tags = '';
$review->rating = 3;
}
else
{
// Editing a review, do some prep work
$review->comment = str_replace('<br />', '', $review->comment);
$this->publication->getTagsForEditing($review->created_by);
$review->tags = ($this->publication->_tagsForEditing) ? $this->publication->_tagsForEditing : '';
}
$review->rating = ($myr) ? $myr : $review->rating;
// Store the object in our registry
$this->myreview = $review;
return;
}
/**
* Save a review
*
* @return void
*/
public function savereview()
{
// Is the user logged in?
if (User::isGuest())
{
$this->setError(Lang::txt('PLG_PUBLICATIONS_REVIEWS_LOGIN_NOTICE'));
return;
}
$publication =& $this->publication;
// Do we have a publication ID?
if (!$publication->exists())
{
// No ID - fail! Can't do anything else without an ID
$this->setError(Lang::txt('PLG_PUBLICATIONS_REVIEWS_NO_RESOURCE_ID'));
return;
}
$database = App::get('db');
// Bind the form data to our object
$row = new \Components\Publications\Tables\Review($database);
if (!$row->bind($_POST))
{
$this->setError($row->getError());
return;
}
// Perform some text cleaning, etc.
$row->id = Request::getInt('reviewid', 0);
$row->state = 1;
$row->comment = \Hubzero\Utility\Sanitize::stripAll($row->comment);
$row->anonymous = ($row->anonymous == 1 || $row->anonymous == '1') ? $row->anonymous : 0;
$row->created = ($row->created) ? $row->created : Date::toSql();
$row->created_by = User::get('id');
$message = $row->id ? Lang::txt('PLG_PUBLICATIONS_REVIEWS_EDITS_SAVED') : Lang::txt('PLG_PUBLICATIONS_REVIEWS_REVIEW_POSTED');
// Check for missing (required) fields
if (!$row->check())
{
$this->setError($row->getError());
return;
}
// Save the data
if (!$row->store())
{
$this->setError($row->getError());
return;
}
// Calculate the new average rating for the parent publication
$publication->table()->calculateRating();
$publication->table()->updateRating();
// Process tags
$tags = trim(Request::getString('review_tags', ''));
if ($tags)
{
$rt = new \Components\Publications\Helpers\Tags($database);
$rt->tag_object($row->created_by, $publication->get('id'), $tags, 1, 0);
}
// Get version authors
$users = $publication->table('Author')->getAuthors($publication->get('version_id'), 1, 1, true);
// Build the subject
$subject = Config::get('sitename') . ' ' . Lang::txt('PLG_PUBLICATIONS_REVIEWS_CONTRIBUTIONS');
// Message
$eview = new \Hubzero\Plugin\View(
array(
'folder' =>'publications',
'element' =>'reviews',
'name' =>'emails'
)
);
$eview->option = $this->_option;
$eview->juser = User::getInstance();
$eview->publication = $publication;
$message = $eview->loadTemplate();
$message = str_replace("\n", "\r\n", $message);
// Build the "from" data for the e-mail
$from = array();
$from['name'] = Config::get('sitename').' '.Lang::txt('PLG_PUBLICATIONS_REVIEWS_CONTRIBUTIONS');
$from['email'] = Config::get('mailfrom');
// Send message
if (!Event::trigger('xmessage.onSendMessage', array(
'publications_new_comment',
$subject,
$message,
$from,
$users,
$this->_option
)
))
{
$this->setError(Lang::txt('PLG_PUBLICATIONS_REVIEWS_FAILED_TO_MESSAGE'));
}
App::redirect(Route::url($publication->link('reviews')), $message);
return;
}
/**
* Delete a review
*
* @return void
*/
public function deletereview()
{
$database = App::get('db');
$publication =& $this->publication;
// Incoming
$reviewid = Request::getInt('comment', 0);
// Do we have a review ID?
if (!$reviewid)
{
$this->setError(Lang::txt('PLG_PUBLICATIONS_REVIEWS_NO_ID'));
return;
}
// Do we have a publication ID?
if (!$publication->exists())
{
$this->setError(Lang::txt('PLG_PUBLICATIONS_REVIEWS_NO_RESOURCE_ID'));
return;
}
$review = new \Components\Publications\Tables\Review($database);
$review->load($reviewid);
// Permissions check
if ($review->created_by != User::get('id'))
{
return;
}
$review->state = 2;
$review->store();
// Delete the review's comments
$comments1 = \Hubzero\Item\Comment::all()
->whereEquals('parent', $reviewid)
->whereEquals('item_id', $publication->get('id'))
->whereEquals('item_type', 'pubreview')
->ordered()
->rows();
foreach ($comments1 as $comment1)
{
$comment1->set('state', $comment1::STATE_DELETED);
$comment1->save();
}
// Recalculate the average rating for the parent publication
$publication->table()->calculateRating();
$publication->table()->updateRating();
App::redirect(
Route::url($publication->link('reviews')),
Lang::txt('PLG_PUBLICATIONS_REVIEWS_REVIEW_DELETED')
);
return;
}
}
| {
"content_hash": "5c03555c8e64c268c982cce1de2cfa1c",
"timestamp": "",
"source": "github",
"line_count": 465,
"max_line_length": 129,
"avg_line_length": 22.974193548387095,
"alnum_prop": 0.5964616680707666,
"repo_name": "zooley/hubzero-cms",
"id": "5bb1175d20a77b2e3b16010a85580f82b74f2bb4",
"size": "10831",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "core/plugins/publications/reviews/helper.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "171251"
},
{
"name": "AngelScript",
"bytes": "1638"
},
{
"name": "CSS",
"bytes": "2719736"
},
{
"name": "HTML",
"bytes": "1289374"
},
{
"name": "JavaScript",
"bytes": "12613354"
},
{
"name": "PHP",
"bytes": "24941743"
},
{
"name": "Shell",
"bytes": "10678"
},
{
"name": "TSQL",
"bytes": "572"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<ivy-module version="2.0" xmlns:e="http://ant.apache.org/ivy/extra">
<info organisation="default"
module="fpps-week04-assign01-build"
revision="0.1-SNAPSHOT"
status="integration"
publication="20170106145812"
e:sbtVersion="0.13" e:scalaVersion="2.10"
>
<description>
fpps-week04-assign01-build
</description>
</info>
<configurations>
<conf name="compile" visibility="public" description=""/>
<conf name="runtime" visibility="public" description="" extends="compile"/>
<conf name="test" visibility="public" description="" extends="runtime"/>
<conf name="provided" visibility="public" description=""/>
<conf name="optional" visibility="public" description=""/>
<conf name="compile-internal" visibility="private" description="" extends="compile,optional,provided"/>
<conf name="runtime-internal" visibility="private" description="" extends="runtime,optional"/>
<conf name="test-internal" visibility="private" description="" extends="test,optional,provided"/>
<conf name="plugin" visibility="private" description=""/>
<conf name="sources" visibility="public" description=""/>
<conf name="docs" visibility="public" description=""/>
<conf name="pom" visibility="public" description=""/>
<conf name="scala-tool" visibility="private" description=""/>
</configurations>
<publications>
<artifact name="fpps-week04-assign01-build" type="pom" ext="pom" conf="pom"/>
<artifact name="fpps-week04-assign01-build" type="jar" ext="jar" conf="compile"/>
<artifact name="fpps-week04-assign01-build" type="src" ext="jar" conf="sources" e:classifier="sources"/>
<artifact name="fpps-week04-assign01-build" type="doc" ext="jar" conf="docs" e:classifier="javadoc"/>
</publications>
<dependencies>
<dependency org="org.scala-lang" name="scala-compiler" rev="2.10.5" conf="scala-tool->default,optional(default)"/>
<dependency org="org.scala-lang" name="scala-library" rev="2.10.5" conf="scala-tool->default,optional(default);provided->default(compile)"/>
<dependency org="org.scala-sbt" name="sbt" rev="0.13.9" conf="provided->default(compile)"/>
<dependency org="org.scalastyle" name="scalastyle_2.10" rev="0.8.0" conf="compile->default(compile)"/>
<dependency org="org.scalaj" name="scalaj-http_2.10" rev="2.2.1" conf="compile->default(compile)"/>
<dependency org="commons-codec" name="commons-codec" rev="1.10" conf="compile->default(compile)"/>
<dependency org="org.apache.commons" name="commons-lang3" rev="3.4" conf="compile->default(compile)"/>
<dependency org="com.typesafe.sbteclipse" name="sbteclipse-plugin" rev="4.0.0" conf="compile->default(compile)" e:sbtVersion="0.13" e:scalaVersion="2.10"/>
<override org="org.scala-lang" module="scala-library" matcher="exact" rev="2.10.5"/>
<override org="org.scala-lang" module="scala-compiler" matcher="exact" rev="2.10.5"/>
<override org="org.scala-lang" module="scala-reflect" matcher="exact" rev="2.10.5"/>
</dependencies>
</ivy-module>
| {
"content_hash": "f57a6cb39fc8f150c33226748759e34c",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 157,
"avg_line_length": 62.25,
"alnum_prop": 0.7111780455153949,
"repo_name": "juliocnsouzadev/scala_datascience",
"id": "621b9aab21607ff05c5669277e3bcec6cea564e8",
"size": "2988",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fuctional_programming_principles/fpps-week04-assign01/project/target/resolution-cache/default/fpps-week04-assign01-build/scala_2.10/sbt_0.13/0.1-SNAPSHOT/resolved.xml.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "78678"
},
{
"name": "JavaScript",
"bytes": "237778"
},
{
"name": "Scala",
"bytes": "656149"
},
{
"name": "XSLT",
"bytes": "377874"
}
],
"symlink_target": ""
} |
package org.antlr.intellij.plugin.profiler;
import com.intellij.openapi.editor.CaretModel;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.ScrollType;
import com.intellij.openapi.editor.ScrollingModel;
import com.intellij.openapi.editor.event.EditorMouseEvent;
import com.intellij.openapi.editor.markup.EffectType;
import com.intellij.openapi.editor.markup.HighlighterLayer;
import com.intellij.openapi.editor.markup.HighlighterTargetArea;
import com.intellij.openapi.editor.markup.MarkupModel;
import com.intellij.openapi.editor.markup.RangeHighlighter;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.ui.JBColor;
import com.intellij.ui.components.JBCheckBox;
import com.intellij.ui.components.JBLabel;
import com.intellij.ui.table.JBTable;
import com.intellij.uiDesigner.core.GridConstraints;
import com.intellij.uiDesigner.core.GridLayoutManager;
import com.intellij.uiDesigner.core.Spacer;
import org.antlr.intellij.plugin.ANTLRv4PluginController;
import org.antlr.intellij.plugin.preview.PreviewState;
import org.antlr.runtime.CommonToken;
import org.antlr.v4.runtime.Parser;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.atn.AmbiguityInfo;
import org.antlr.v4.runtime.atn.ContextSensitivityInfo;
import org.antlr.v4.runtime.atn.DecisionEventInfo;
import org.antlr.v4.runtime.atn.DecisionInfo;
import org.antlr.v4.runtime.atn.DecisionState;
import org.antlr.v4.runtime.atn.LookaheadEventInfo;
import org.antlr.v4.runtime.atn.ParseInfo;
import org.antlr.v4.runtime.atn.PredicateEvalInfo;
import org.antlr.v4.runtime.atn.SemanticContext;
import org.antlr.v4.runtime.misc.Interval;
import org.antlr.v4.tool.Grammar;
import org.antlr.v4.tool.Rule;
import javax.swing.BorderFactory;
import javax.swing.JCheckBox;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.ListSelectionModel;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.table.AbstractTableModel;
import javax.swing.table.DefaultTableCellRenderer;
import javax.swing.table.DefaultTableModel;
import javax.swing.table.JTableHeader;
import javax.swing.table.TableCellRenderer;
import javax.swing.table.TableModel;
import javax.swing.table.TableRowSorter;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.Insets;
import java.awt.Point;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseEvent;
public class ProfilerPanel {
public static final Color AMBIGUITY_COLOR = new Color(138, 0, 0);
public static final Color FULLCTX_COLOR = new Color(255, 128, 0);
public static final Color PREDEVAL_COLOR = new Color(110, 139, 61);
public static final Color DEEPESTLOOK_COLOR = new Color(0, 128, 128);
public static final Key<DecisionEventInfo> DECISION_EVENT_INFO_KEY = Key.create("DECISION_EVENT_INFO");
public Project project;
public PreviewState previewState;
protected JPanel outerPanel;
protected JPanel statsPanel;
protected JLabel parseTimeField;
protected JLabel predictionTimeField;
protected JLabel lookaheadBurdenField;
protected JLabel cacheMissRateField;
protected JLabel inputSizeField;
protected JLabel numTokensField;
protected JCheckBox expertCheckBox;
protected JLabel ambiguityColorLabel;
protected JLabel contextSensitivityColorLabel;
protected JLabel predEvaluationColorLabel;
protected JBTable profilerDataTable;
protected JLabel deepestLookaheadLabel;
public void grammarFileSaved(PreviewState previewState, VirtualFile grammarFile) {
// leave model and such alone.
}
public void switchToGrammar(PreviewState previewState, VirtualFile grammarFile) {
DefaultTableModel model = new DefaultTableModel();
profilerDataTable.setModel(model);
profilerDataTable.setRowSorter(new TableRowSorter<AbstractTableModel>(model));
}
public void mouseEnteredGrammarEditorEvent(VirtualFile vfile, EditorMouseEvent e) {
MarkupModel markupModel = e.getEditor().getMarkupModel();
markupModel.removeAllHighlighters();
}
public JPanel getComponent() {
return outerPanel;
}
public JBTable getProfilerDataTable() {
return profilerDataTable;
}
public ProfilerPanel(Project project) {
this.project = project;
}
public void setProfilerData(PreviewState previewState,
long parseTime_ns) {
this.previewState = previewState;
Parser parser = previewState.parsingResult.parser;
ParseInfo parseInfo = parser.getParseInfo();
updateTableModelPerExpertCheckBox(parseInfo);
long parseTimeMS = (long)(parseTime_ns/(1000.0*1000.0));
parseTimeField.setText(String.valueOf(parseTimeMS));
int predTimeMS = (int)(parseInfo.getTotalTimeInPrediction()/(1000.0*1000.0));
predictionTimeField.setText(
String.format("%d = %3.2f%%", predTimeMS, 100*((double)predTimeMS)/parseTimeMS)
);
TokenStream tokens = parser.getInputStream();
int numTokens = tokens.size();
Token lastToken = tokens.get(numTokens-1);
int numChar = lastToken.getStopIndex();
int numLines = lastToken.getLine();
if ( lastToken.getType()==Token.EOF ) {
if ( numTokens<=1 ) {
numLines = 0;
}
else {
Token secondToLastToken = tokens.get(numTokens-2);
numLines = secondToLastToken.getLine();
}
}
inputSizeField.setText(String.format("%d char, %d lines",
numChar,
numLines));
numTokensField.setText(String.valueOf(numTokens));
double look =
parseInfo.getTotalSLLLookaheadOps()+
parseInfo.getTotalLLLookaheadOps();
lookaheadBurdenField.setText(
String.format("%d/%d = %3.2f", (long)look, numTokens, look/numTokens)
);
double atnLook = parseInfo.getTotalATNLookaheadOps();
cacheMissRateField.setText(
String.format("%d/%d = %3.2f%%", (long)atnLook, (long)look, atnLook*100.0/look)
);
}
public void updateTableModelPerExpertCheckBox(ParseInfo parseInfo) {
AbstractTableModel model;
if ( expertCheckBox.isSelected() ) {
model = new ExpertProfilerTableDataModel(parseInfo);
}
else {
model = new SimpleProfilerTableDataModel(parseInfo);
}
profilerDataTable.setModel(model);
profilerDataTable.setRowSorter(new TableRowSorter<AbstractTableModel>(model));
}
public void selectDecisionInGrammar(PreviewState previewState, int decision) {
ANTLRv4PluginController controller = ANTLRv4PluginController.getInstance(project);
Editor grammarEditor = controller.getCurrentGrammarEditor();
DecisionState decisionState = previewState.g.atn.getDecisionState(decision);
Interval region = previewState.g.getStateToGrammarRegion(decisionState.stateNumber);
if ( region==null ) {
System.err.println("decision "+decision+" has state "+decisionState.stateNumber+" but no region");
return;
}
MarkupModel markupModel = grammarEditor.getMarkupModel();
markupModel.removeAllHighlighters();
org.antlr.runtime.TokenStream tokens = previewState.g.tokenStream;
if ( region.a>=tokens.size() || region.b>=tokens.size() ) {
// System.out.println("out of range: " + region + " tokens.size()=" + tokens.size());
return;
}
CommonToken startToken = (CommonToken)tokens.get(region.a);
CommonToken stopToken = (CommonToken)tokens.get(region.b);
JBColor effectColor = JBColor.darkGray;
DecisionInfo decisionInfo = previewState.parsingResult.parser.getParseInfo().getDecisionInfo()[decision];
if ( decisionInfo.predicateEvals.size()>0 ) {
effectColor = new JBColor(PREDEVAL_COLOR, AMBIGUITY_COLOR);
}
if ( decisionInfo.contextSensitivities.size()>0 ) {
effectColor = new JBColor(FULLCTX_COLOR, AMBIGUITY_COLOR);
}
if ( decisionInfo.ambiguities.size()>0 ) {
effectColor = new JBColor(AMBIGUITY_COLOR, AMBIGUITY_COLOR);
}
TextAttributes attr =
new TextAttributes(JBColor.BLACK, JBColor.WHITE, effectColor,
EffectType.ROUNDED_BOX, Font.PLAIN);
markupModel.addRangeHighlighter(
startToken.getStartIndex(),
stopToken.getStopIndex()+1,
HighlighterLayer.SELECTION, // layer
attr,
HighlighterTargetArea.EXACT_RANGE
);
// System.out.println("dec " + decision + " from " + startToken + " to " + stopToken);
ScrollingModel scrollingModel = grammarEditor.getScrollingModel();
CaretModel caretModel = grammarEditor.getCaretModel();
caretModel.moveToOffset(startToken.getStartIndex());
scrollingModel.scrollToCaret(ScrollType.MAKE_VISIBLE);
}
public void tagAmbiguousDecisionsInGrammar(PreviewState previewState) {
// ANTLRv4PluginController controller = ANTLRv4PluginController.getInstance(project);
// Editor grammarEditor = controller.getCurrentGrammarEditor();
//
// ParseInfo parseInfo = previewState.parsingResult.parser.getParseInfo();
// DecisionInfo[] decisionInfo = parseInfo.getDecisionInfo();
//
// for (DecisionState decisionState : previewState.g.atn.decisionToState) {
// if ( decisionInfo[decisionState.decision].ambiguities.size()==0 ) {
// continue;
// }
// Interval region = previewState.g.getStateToGrammarRegion(decisionState.stateNumber);
// if ( region==null ) {
// System.err.println("decision "+decisionState.decision+" has state "+decisionState.stateNumber+" but no region");
// return;
// }
// MarkupModel markupModel = grammarEditor.getMarkupModel();
// markupModel.removeAllHighlighters();
// org.antlr.runtime.TokenStream tokens = previewState.g.tokenStream;
// if ( region.a>=tokens.size()||region.b>=tokens.size() ) {
//// System.out.println("out of range: " + region + " tokens.size()=" + tokens.size());
// return;
// }
// CommonToken startToken = (CommonToken)tokens.get(region.a);
// CommonToken stopToken = (CommonToken)tokens.get(region.b);
// TextAttributes attr =
// new TextAttributes(JBColor.BLACK, JBColor.WHITE, JBColor.darkGray,
// EffectType.ROUNDED_BOX, Font.PLAIN);
// markupModel.addRangeHighlighter(
// startToken.getStartIndex(),
// stopToken.getStopIndex()+1,
// HighlighterLayer.SELECTION, // layer
// attr,
// HighlighterTargetArea.EXACT_RANGE
// );
//
//// System.out.println("dec " + decision + " from " + startToken + " to " + stopToken);
//
// ScrollingModel scrollingModel = grammarEditor.getScrollingModel();
// CaretModel caretModel = grammarEditor.getCaretModel();
// caretModel.moveToOffset(startToken.getStartIndex());
// scrollingModel.scrollToCaret(ScrollType.MAKE_VISIBLE);
// }
}
public void highlightPhrases(PreviewState previewState, int decision) {
if ( previewState.parsingResult==null ) {
return;
}
ParseInfo parseInfo = previewState.parsingResult.parser.getParseInfo();
Editor editor = previewState.getEditor();
ScrollingModel scrollingModel = editor.getScrollingModel();
CaretModel caretModel = editor.getCaretModel();
MarkupModel markupModel = editor.getMarkupModel();
markupModel.removeAllHighlighters();
DecisionInfo decisionInfo = parseInfo.getDecisionInfo()[decision];
Token firstToken = null;
// deepest lookahead
long maxLook = Math.max(decisionInfo.LL_MaxLook, decisionInfo.SLL_MaxLook);
if ( maxLook>1 ) // ignore k=1
{
LookaheadEventInfo maxLookEvent = decisionInfo.SLL_MaxLookEvent;
if ( decisionInfo.LL_MaxLook>decisionInfo.SLL_MaxLook ) {
maxLookEvent = decisionInfo.LL_MaxLookEvent;
}
Token t = addDecisionEventHighlighter(previewState, markupModel,
maxLookEvent,
DEEPESTLOOK_COLOR,
EffectType.BOLD_DOTTED_LINE);
firstToken = t;
}
// pred evals
for (PredicateEvalInfo predEvalInfo : decisionInfo.predicateEvals) {
Token t = addDecisionEventHighlighter(previewState, markupModel, predEvalInfo, PREDEVAL_COLOR, EffectType.ROUNDED_BOX);
if ( firstToken==null ) firstToken = t;
}
// context-sensitivities
for (ContextSensitivityInfo ctxSensitivityInfo : decisionInfo.contextSensitivities) {
Token t = addDecisionEventHighlighter(previewState, markupModel, ctxSensitivityInfo, FULLCTX_COLOR, EffectType.ROUNDED_BOX);
if ( firstToken==null ) firstToken = t;
}
// ambiguities (might overlay context-sensitivities)
for (AmbiguityInfo ambiguityInfo : decisionInfo.ambiguities) {
Token t = addDecisionEventHighlighter(previewState, markupModel, ambiguityInfo, AMBIGUITY_COLOR, EffectType.ROUNDED_BOX);
if ( firstToken==null ) firstToken = t;
}
if ( firstToken!=null ) {
caretModel.moveToOffset(firstToken.getStartIndex());
scrollingModel.scrollToCaret(ScrollType.MAKE_VISIBLE);
}
}
public Token addDecisionEventHighlighter(PreviewState previewState, MarkupModel markupModel,
DecisionEventInfo info, Color errorStripeColor,
EffectType effectType) {
TokenStream tokens = previewState.parsingResult.parser.getInputStream();
Token startToken = tokens.get(info.startIndex);
Token stopToken = tokens.get(info.stopIndex);
TextAttributes textAttributes =
new TextAttributes(JBColor.BLACK, JBColor.WHITE, errorStripeColor,
effectType, Font.PLAIN);
textAttributes.setErrorStripeColor(errorStripeColor);
final RangeHighlighter rangeHighlighter =
markupModel.addRangeHighlighter(
startToken.getStartIndex(), stopToken.getStopIndex()+1,
HighlighterLayer.ADDITIONAL_SYNTAX, textAttributes,
HighlighterTargetArea.EXACT_RANGE);
rangeHighlighter.putUserData(DECISION_EVENT_INFO_KEY, info);
rangeHighlighter.setErrorStripeMarkColor(errorStripeColor);
return startToken;
}
public static String getSemanticContextDisplayString(PredicateEvalInfo pred,
PreviewState previewState,
SemanticContext semctx,
int alt,
boolean result) {
Grammar g = previewState.g;
String semanticContextDisplayString = g.getSemanticContextDisplayString(semctx);
if ( semctx instanceof SemanticContext.PrecedencePredicate ) {
int ruleIndex = previewState.parsingResult.parser.getATN().decisionToState.get(pred.decision).ruleIndex;
Rule rule = g.getRule(ruleIndex);
int precedence = ((SemanticContext.PrecedencePredicate)semctx).precedence;
// precedence = n - originalAlt + 1, So:
int originalAlt = rule.getOriginalNumberOfAlts()-precedence+1;
alt = originalAlt;
}
return semanticContextDisplayString+" => alt "+alt+" is "+result;
}
{
// GUI initializer generated by IntelliJ IDEA GUI Designer
// >>> IMPORTANT!! <<<
// DO NOT EDIT OR ADD ANY CODE HERE!
$$$setupUI$$$();
}
/**
* Method generated by IntelliJ IDEA GUI Designer
* >>> IMPORTANT!! <<<
* DO NOT edit this method OR call it in your code!
*
* @noinspection ALL
*/
private void $$$setupUI$$$() {
createUIComponents();
outerPanel = new JPanel();
outerPanel.setLayout(new BorderLayout(0, 0));
statsPanel = new JPanel();
statsPanel.setLayout(new GridLayoutManager(12, 3, new Insets(0, 5, 0, 0), -1, -1));
outerPanel.add(statsPanel, BorderLayout.EAST);
final JLabel label1 = new JLabel();
label1.setText("Parse time (ms):");
statsPanel.add(label1, new GridConstraints(2, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK|GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_CAN_SHRINK|GridConstraints.SIZEPOLICY_CAN_GROW, null, new Dimension(130, 16), null, 0, false));
final JLabel label2 = new JLabel();
label2.setText("Prediction time (ms):");
statsPanel.add(label2, new GridConstraints(3, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, new Dimension(130, 16), null, 0, false));
final JLabel label3 = new JLabel();
label3.setText("Lookahead burden:");
statsPanel.add(label3, new GridConstraints(4, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK|GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_CAN_SHRINK|GridConstraints.SIZEPOLICY_CAN_GROW, null, new Dimension(130, 16), null, 0, false));
final JLabel label4 = new JLabel();
label4.setText("DFA cache miss rate:");
statsPanel.add(label4, new GridConstraints(5, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK|GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_CAN_SHRINK|GridConstraints.SIZEPOLICY_CAN_GROW, null, new Dimension(130, 16), null, 0, false));
final Spacer spacer1 = new Spacer();
statsPanel.add(spacer1, new GridConstraints(11, 0, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_VERTICAL, 1, GridConstraints.SIZEPOLICY_WANT_GROW, null, new Dimension(-1, 14), null, 0, false));
final Spacer spacer2 = new Spacer();
statsPanel.add(spacer2, new GridConstraints(2, 2, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_WANT_GROW, 1, null, null, null, 0, false));
parseTimeField = new JLabel();
parseTimeField.setText("0");
statsPanel.add(parseTimeField, new GridConstraints(2, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
predictionTimeField = new JLabel();
predictionTimeField.setText("0");
statsPanel.add(predictionTimeField, new GridConstraints(3, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
lookaheadBurdenField = new JLabel();
lookaheadBurdenField.setText("0");
statsPanel.add(lookaheadBurdenField, new GridConstraints(4, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
cacheMissRateField = new JLabel();
cacheMissRateField.setText("0");
statsPanel.add(cacheMissRateField, new GridConstraints(5, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
final JLabel label5 = new JLabel();
label5.setText("Input size:");
statsPanel.add(label5, new GridConstraints(0, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, new Dimension(130, 16), null, 0, false));
inputSizeField = new JLabel();
inputSizeField.setText("0");
statsPanel.add(inputSizeField, new GridConstraints(0, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
final JLabel label6 = new JLabel();
label6.setText("Number of tokens:");
statsPanel.add(label6, new GridConstraints(1, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
numTokensField = new JLabel();
numTokensField.setText("0");
statsPanel.add(numTokensField, new GridConstraints(1, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
final JPanel panel1 = new JPanel();
panel1.setLayout(new GridLayoutManager(4, 1, new Insets(0, 0, 0, 0), -1, -1));
statsPanel.add(panel1, new GridConstraints(7, 0, 4, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_BOTH, GridConstraints.SIZEPOLICY_CAN_SHRINK|GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_CAN_SHRINK|GridConstraints.SIZEPOLICY_CAN_GROW, null, null, null, 0, false));
panel1.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), null));
ambiguityColorLabel.setText("Ambiguity");
panel1.add(ambiguityColorLabel, new GridConstraints(0, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
contextSensitivityColorLabel.setText("Context-sensitivity");
panel1.add(contextSensitivityColorLabel, new GridConstraints(1, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
predEvaluationColorLabel.setText("Predicate evaluation");
panel1.add(predEvaluationColorLabel, new GridConstraints(2, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
deepestLookaheadLabel.setText("Deepest lookahead");
panel1.add(deepestLookaheadLabel, new GridConstraints(3, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
expertCheckBox.setText("Show expert columns");
statsPanel.add(expertCheckBox, new GridConstraints(6, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK|GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
final JScrollPane scrollPane1 = new JScrollPane();
outerPanel.add(scrollPane1, BorderLayout.CENTER);
profilerDataTable.setPreferredScrollableViewportSize(new Dimension(800, 400));
scrollPane1.setViewportView(profilerDataTable);
}
/**
* @noinspection ALL
*/
public JComponent $$$getRootComponent$$$() {
return outerPanel;
}
class ProfileTableCellRenderer extends DefaultTableCellRenderer {
public Component getTableCellRendererComponent(JTable table, Object value,
boolean isSelected, boolean hasFocus,
int row, int column) {
Component c = super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column);
if ( previewState==null || previewState.parsingResult==null ) {
return c;
}
ParseInfo parseInfo = previewState.parsingResult.parser.getParseInfo();
int decision = profilerDataTable.convertRowIndexToModel(row);
DecisionInfo[] decisions = parseInfo.getDecisionInfo();
if ( decision>=decisions.length ) {
return c;
}
DecisionInfo decisionInfo = decisions[decision];
if ( decisionInfo.ambiguities.size()>0 ) {
setForeground(AMBIGUITY_COLOR);
}
else if ( decisionInfo.contextSensitivities.size()>0 ) {
setForeground(FULLCTX_COLOR);
}
else if ( decisionInfo.predicateEvals.size()>0 ) {
setForeground(PREDEVAL_COLOR);
}
return c;
}
}
private void createUIComponents() {
expertCheckBox = new JBCheckBox();
expertCheckBox.setSelected(false);
expertCheckBox.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
ParseInfo parseInfo = previewState.parsingResult.parser.getParseInfo();
updateTableModelPerExpertCheckBox(parseInfo);
}
});
profilerDataTable = new JBTable() {
@Override
protected JTableHeader createDefaultTableHeader() {
return new JTableHeader(columnModel) {
public String getToolTipText(MouseEvent e) {
Point p = e.getPoint();
int index = columnModel.getColumnIndexAtX(p.x);
int realIndex = columnModel.getColumn(index).getModelIndex();
TableModel model = getModel();
if ( model instanceof ProfilerTableDataModel ) {
return ((ProfilerTableDataModel)model).getColumnToolTips()[realIndex];
}
return model.getColumnName(realIndex);
}
};
}
@Override
public TableCellRenderer getDefaultRenderer(Class<?> columnClass) {
return new ProfileTableCellRenderer();
}
};
ListSelectionModel selectionModel = profilerDataTable.getSelectionModel();
selectionModel.addListSelectionListener(
new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent e) {
// previewState, project set later
if ( e.getValueIsAdjusting() ) {
return; // this seems to be "mouse down" but not mouse up
}
// get state for current grammar editor tab
if ( project==null ) {
return;
}
PreviewState previewState = ANTLRv4PluginController.getInstance(project).getPreviewState();
if ( previewState!=null && profilerDataTable.getModel().getClass()!=DefaultTableModel.class ) {
int selectedRow = profilerDataTable.getSelectedRow();
if ( selectedRow==-1 ) {
selectedRow = 0;
}
int decision = profilerDataTable.convertRowIndexToModel(selectedRow);
int numberOfDecisions = previewState.g.atn.getNumberOfDecisions();
if ( decision<=numberOfDecisions ) {
selectDecisionInGrammar(previewState, decision);
highlightPhrases(previewState, decision);
}
}
}
}
);
selectionModel.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
ambiguityColorLabel = new JBLabel("Ambiguity");
ambiguityColorLabel.setForeground(AMBIGUITY_COLOR);
contextSensitivityColorLabel = new JBLabel("Context sensitivity");
contextSensitivityColorLabel.setForeground(FULLCTX_COLOR);
predEvaluationColorLabel = new JBLabel("Predicate evaluation");
predEvaluationColorLabel.setForeground(PREDEVAL_COLOR);
deepestLookaheadLabel = new JBLabel("Deepest lookahead");
deepestLookaheadLabel.setForeground(DEEPESTLOOK_COLOR);
}
}
| {
"content_hash": "77ad70b9027ea9aa1e4aa2ff922a1200",
"timestamp": "",
"source": "github",
"line_count": 552,
"max_line_length": 310,
"avg_line_length": 46.130434782608695,
"alnum_prop": 0.7604853911404336,
"repo_name": "rojaster/intellij-plugin-v4",
"id": "a7cc56a98ed06262c87dcc81ce040466f2d9781f",
"size": "25464",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/java/org/antlr/intellij/plugin/profiler/ProfilerPanel.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ANTLR",
"bytes": "16387"
},
{
"name": "Java",
"bytes": "293176"
},
{
"name": "Python",
"bytes": "1096"
}
],
"symlink_target": ""
} |
#ifndef MAP_AREA_HPP
#define MAP_AREA_HPP
#include <SFML/System.hpp>
#include <SFML/Graphics.hpp>
/* regroupe the tile of a common zone */
/* T is a tile class */
#define MAP_AREA_SIZE 8//2 4 8 16 32 64 128
namespace map
{
template<class T> class AreaManager;
template<class T> class AreaLoader;
template<class T>
class Area
{
public:
/* X,Y = map coords (to multiply with MAP_AREA_SIZE)*/
explicit Area(const int& X, const int& Y);
~Area();
Area(const Area&) = delete;
Area& operator=(const Area&) = delete;
/* X,Y = map coords*/
inline T* operator()(const int& X,const int& Y){
clock.restart();
return tiles[Y][X];
};
void draw(sf::RenderTarget& target, sf::RenderStates states= sf::RenderStates::Default);
static sf::Vector2i mapPixelToCoords(const int& X,const int& Y);
static sf::Vector2i mapPixelToCoords(const sf::Vector2i& pos);
static sf::Vector2i mapCoordsToPixel(const int& X,const int& Y);
private:
friend class AreaManager<T>;
friend class AreaLoader<T>;
T* tiles[MAP_AREA_SIZE][MAP_AREA_SIZE];
sf::Clock clock;
};
};
#include "Area.tpl"
#endif
| {
"content_hash": "8bc85dbe31e5f87b689d56b425f861de",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 100,
"avg_line_length": 25.471698113207548,
"alnum_prop": 0.562962962962963,
"repo_name": "Krozark/2D-infinite-map",
"id": "2d01d71f8b915bce5f0104f560f41949434e09a0",
"size": "1350",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Map/Area.hpp",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C++",
"bytes": "15762"
},
{
"name": "Makefile",
"bytes": "1161"
},
{
"name": "Smarty",
"bytes": "16143"
}
],
"symlink_target": ""
} |
<Type Name="TempFileMFDStream" FullName="Manos.Server.TempFileMFDStream">
<TypeSignature Language="C#" Value="public class TempFileMFDStream : Manos.Server.IMFDStream" />
<AssemblyInfo>
<AssemblyName>Manos</AssemblyName>
<AssemblyVersion>0.0.4.0</AssemblyVersion>
</AssemblyInfo>
<Base>
<BaseTypeName>System.Object</BaseTypeName>
</Base>
<Interfaces>
<Interface>
<InterfaceName>Manos.Server.IMFDStream</InterfaceName>
</Interface>
</Interfaces>
<Docs>
<summary>To be added.</summary>
<remarks>To be added.</remarks>
</Docs>
<Members>
<Member MemberName=".ctor">
<MemberSignature Language="C#" Value="public TempFileMFDStream (Manos.Server.IOStream iostream);" />
<MemberType>Constructor</MemberType>
<AssemblyInfo>
<AssemblyVersion>0.0.4.0</AssemblyVersion>
</AssemblyInfo>
<Parameters>
<Parameter Name="iostream" Type="Manos.Server.IOStream" />
</Parameters>
<Docs>
<param name="iostream">To be added.</param>
<summary>To be added.</summary>
<remarks>To be added.</remarks>
</Docs>
</Member>
<Member MemberName="BeginFile">
<MemberSignature Language="C#" Value="public void BeginFile (string name);" />
<MemberType>Method</MemberType>
<AssemblyInfo>
<AssemblyVersion>0.0.4.0</AssemblyVersion>
</AssemblyInfo>
<ReturnValue>
<ReturnType>System.Void</ReturnType>
</ReturnValue>
<Parameters>
<Parameter Name="name" Type="System.String" />
</Parameters>
<Docs>
<param name="name">To be added.</param>
<summary>To be added.</summary>
<remarks>To be added.</remarks>
</Docs>
</Member>
<Member MemberName="EndFile">
<MemberSignature Language="C#" Value="public Manos.Server.UploadedFile EndFile ();" />
<MemberType>Method</MemberType>
<AssemblyInfo>
<AssemblyVersion>0.0.4.0</AssemblyVersion>
</AssemblyInfo>
<ReturnValue>
<ReturnType>Manos.Server.UploadedFile</ReturnType>
</ReturnValue>
<Parameters />
<Docs>
<summary>To be added.</summary>
<returns>To be added.</returns>
<remarks>To be added.</remarks>
</Docs>
</Member>
<Member MemberName="ReadBytes">
<MemberSignature Language="C#" Value="public void ReadBytes (int count, Manos.Server.IMFDReadCallback callback);" />
<MemberType>Method</MemberType>
<AssemblyInfo>
<AssemblyVersion>0.0.4.0</AssemblyVersion>
</AssemblyInfo>
<ReturnValue>
<ReturnType>System.Void</ReturnType>
</ReturnValue>
<Parameters>
<Parameter Name="count" Type="System.Int32" />
<Parameter Name="callback" Type="Manos.Server.IMFDReadCallback" />
</Parameters>
<Docs>
<param name="count">To be added.</param>
<param name="callback">To be added.</param>
<summary>To be added.</summary>
<remarks>To be added.</remarks>
</Docs>
</Member>
<Member MemberName="ReadUntil">
<MemberSignature Language="C#" Value="public void ReadUntil (string delimiter, Manos.Server.IMFDReadCallback callback);" />
<MemberType>Method</MemberType>
<AssemblyInfo>
<AssemblyVersion>0.0.4.0</AssemblyVersion>
</AssemblyInfo>
<ReturnValue>
<ReturnType>System.Void</ReturnType>
</ReturnValue>
<Parameters>
<Parameter Name="delimiter" Type="System.String" />
<Parameter Name="callback" Type="Manos.Server.IMFDReadCallback" />
</Parameters>
<Docs>
<param name="delimiter">To be added.</param>
<param name="callback">To be added.</param>
<summary>To be added.</summary>
<remarks>To be added.</remarks>
</Docs>
</Member>
</Members>
</Type>
| {
"content_hash": "edf916e3f6180ae654d55a8f1cc09975",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 129,
"avg_line_length": 35.08181818181818,
"alnum_prop": 0.6284011401917595,
"repo_name": "jacksonh/manos",
"id": "a9c041d2885af3169ff567309297925d326dd4ad",
"size": "3859",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "docs/api/en/Manos.Server/TempFileMFDStream.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "84"
},
{
"name": "C",
"bytes": "277639"
},
{
"name": "C#",
"bytes": "860154"
},
{
"name": "C++",
"bytes": "21096"
},
{
"name": "CSS",
"bytes": "7697"
},
{
"name": "Groff",
"bytes": "406198"
},
{
"name": "HTML",
"bytes": "3395"
},
{
"name": "JavaScript",
"bytes": "13637"
},
{
"name": "Makefile",
"bytes": "1599"
},
{
"name": "Objective-C",
"bytes": "403"
},
{
"name": "Perl",
"bytes": "214405"
},
{
"name": "Ruby",
"bytes": "673"
},
{
"name": "Shell",
"bytes": "21533"
}
],
"symlink_target": ""
} |
#ifndef __PYX_HAVE__lxml__etree
#define __PYX_HAVE__lxml__etree
struct LxmlDocument;
struct LxmlElement;
struct LxmlElementTree;
struct LxmlElementTagMatcher;
struct LxmlElementIterator;
struct LxmlElementBase;
struct LxmlElementClassLookup;
struct LxmlFallbackElementClassLookup;
/* "/home/stefan/source/Python/lxml/lxml-release/src/lxml/lxml.etree.pyx":302
* cdef class _BaseParser
* cdef class QName
* ctypedef public xmlNode* (*_node_to_node_function)(xmlNode*) # <<<<<<<<<<<<<<
*
* ################################################################################
*/
typedef xmlNode *(*_node_to_node_function)(xmlNode *);
/* "/home/stefan/source/Python/lxml/lxml-release/src/lxml/lxml.etree.pyx":298
*
* # forward declarations
* cdef public class _Document [ type LxmlDocumentType, object LxmlDocument ] # <<<<<<<<<<<<<<
* cdef public class _Element [ type LxmlElementType, object LxmlElement ]
* cdef class _BaseParser
*/
struct LxmlDocument {
PyObject_HEAD
struct __pyx_vtabstruct_4lxml_5etree__Document *__pyx_vtab;
int _ns_counter;
PyObject *_prefix_tail;
xmlDoc *_c_doc;
struct __pyx_obj_4lxml_5etree__BaseParser *_parser;
};
/* "/home/stefan/source/Python/lxml/lxml-release/src/lxml/lxml.etree.pyx":299
* # forward declarations
* cdef public class _Document [ type LxmlDocumentType, object LxmlDocument ]
* cdef public class _Element [ type LxmlElementType, object LxmlElement ] # <<<<<<<<<<<<<<
* cdef class _BaseParser
* cdef class QName
*/
struct LxmlElement {
PyObject_HEAD
PyObject *_gc_doc;
struct LxmlDocument *_doc;
xmlNode *_c_node;
PyObject *_tag;
};
/* "/home/stefan/source/Python/lxml/lxml-release/src/lxml/lxml.etree.pyx":1735
*
*
* cdef public class _ElementTree [ type LxmlElementTreeType, # <<<<<<<<<<<<<<
* object LxmlElementTree ]:
* cdef _Document _doc
*/
struct LxmlElementTree {
PyObject_HEAD
struct __pyx_vtabstruct_4lxml_5etree__ElementTree *__pyx_vtab;
struct LxmlDocument *_doc;
struct LxmlElement *_context_node;
};
/* "/home/stefan/source/Python/lxml/lxml-release/src/lxml/lxml.etree.pyx":2396
*
*
* cdef public class _ElementTagMatcher [ object LxmlElementTagMatcher, # <<<<<<<<<<<<<<
* type LxmlElementTagMatcherType ]:
* """
*/
struct LxmlElementTagMatcher {
PyObject_HEAD
struct __pyx_vtabstruct_4lxml_5etree__ElementTagMatcher *__pyx_vtab;
PyObject *_pystrings;
int _node_type;
char *_href;
char *_name;
};
/* "/home/stefan/source/Python/lxml/lxml-release/src/lxml/lxml.etree.pyx":2427
* self._name = NULL
*
* cdef public class _ElementIterator(_ElementTagMatcher) [ # <<<<<<<<<<<<<<
* object LxmlElementIterator, type LxmlElementIteratorType ]:
* """
*/
struct LxmlElementIterator {
struct LxmlElementTagMatcher __pyx_base;
struct LxmlElement *_node;
_node_to_node_function _next_element;
};
/* "/home/stefan/source/Python/lxml/lxml-release/src/lxml/classlookup.pxi":6
* # Custom Element classes
*
* cdef public class ElementBase(_Element) [ type LxmlElementBaseType, # <<<<<<<<<<<<<<
* object LxmlElementBase ]:
* u"""ElementBase(*children, attrib=None, nsmap=None, **_extra)
*/
struct LxmlElementBase {
struct LxmlElement __pyx_base;
};
/* "/home/stefan/source/Python/lxml/lxml-release/src/lxml/classlookup.pxi":211
* # Element class lookup
*
* ctypedef public object (*_element_class_lookup_function)(object, _Document, xmlNode*) # <<<<<<<<<<<<<<
*
* # class to store element class lookup functions
*/
typedef PyObject *(*_element_class_lookup_function)(PyObject *, struct LxmlDocument *, xmlNode *);
/* "/home/stefan/source/Python/lxml/lxml-release/src/lxml/classlookup.pxi":214
*
* # class to store element class lookup functions
* cdef public class ElementClassLookup [ type LxmlElementClassLookupType, # <<<<<<<<<<<<<<
* object LxmlElementClassLookup ]:
* u"""ElementClassLookup(self)
*/
struct LxmlElementClassLookup {
PyObject_HEAD
_element_class_lookup_function _lookup_function;
};
/* "/home/stefan/source/Python/lxml/lxml-release/src/lxml/classlookup.pxi":223
* self._lookup_function = NULL # use default lookup
*
* cdef public class FallbackElementClassLookup(ElementClassLookup) \ # <<<<<<<<<<<<<<
* [ type LxmlFallbackElementClassLookupType,
* object LxmlFallbackElementClassLookup ]:
*/
struct LxmlFallbackElementClassLookup {
struct LxmlElementClassLookup __pyx_base;
struct __pyx_vtabstruct_4lxml_5etree_FallbackElementClassLookup *__pyx_vtab;
struct LxmlElementClassLookup *fallback;
_element_class_lookup_function _fallback_function;
};
#ifndef __PYX_HAVE_API__lxml__etree
#ifndef __PYX_EXTERN_C
#ifdef __cplusplus
#define __PYX_EXTERN_C extern "C"
#else
#define __PYX_EXTERN_C extern
#endif
#endif
__PYX_EXTERN_C DL_IMPORT(PyTypeObject) LxmlElementType;
__PYX_EXTERN_C DL_IMPORT(PyTypeObject) LxmlElementTagMatcherType;
__PYX_EXTERN_C DL_IMPORT(PyTypeObject) LxmlElementClassLookupType;
__PYX_EXTERN_C DL_IMPORT(PyTypeObject) LxmlFallbackElementClassLookupType;
__PYX_EXTERN_C DL_IMPORT(PyTypeObject) LxmlDocumentType;
__PYX_EXTERN_C DL_IMPORT(PyTypeObject) LxmlElementTreeType;
__PYX_EXTERN_C DL_IMPORT(PyTypeObject) LxmlElementIteratorType;
__PYX_EXTERN_C DL_IMPORT(PyTypeObject) LxmlElementBaseType;
__PYX_EXTERN_C DL_IMPORT(struct LxmlElement) *deepcopyNodeToDocument(struct LxmlDocument *, xmlNode *);
__PYX_EXTERN_C DL_IMPORT(struct LxmlElementTree) *elementTreeFactory(struct LxmlElement *);
__PYX_EXTERN_C DL_IMPORT(struct LxmlElementTree) *newElementTree(struct LxmlElement *, PyObject *);
__PYX_EXTERN_C DL_IMPORT(struct LxmlElement) *elementFactory(struct LxmlDocument *, xmlNode *);
__PYX_EXTERN_C DL_IMPORT(struct LxmlElement) *makeElement(PyObject *, struct LxmlDocument *, PyObject *, PyObject *, PyObject *, PyObject *, PyObject *);
__PYX_EXTERN_C DL_IMPORT(struct LxmlElement) *makeSubElement(struct LxmlElement *, PyObject *, PyObject *, PyObject *, PyObject *, PyObject *);
__PYX_EXTERN_C DL_IMPORT(void) setElementClassLookupFunction(_element_class_lookup_function, PyObject *);
__PYX_EXTERN_C DL_IMPORT(PyObject) *lookupDefaultElementClass(PyObject *, PyObject *, xmlNode *);
__PYX_EXTERN_C DL_IMPORT(PyObject) *lookupNamespaceElementClass(PyObject *, PyObject *, xmlNode *);
__PYX_EXTERN_C DL_IMPORT(PyObject) *callLookupFallback(struct LxmlFallbackElementClassLookup *, struct LxmlDocument *, xmlNode *);
__PYX_EXTERN_C DL_IMPORT(int) tagMatches(xmlNode *, const xmlChar *, const xmlChar *);
__PYX_EXTERN_C DL_IMPORT(struct LxmlDocument) *documentOrRaise(PyObject *);
__PYX_EXTERN_C DL_IMPORT(struct LxmlElement) *rootNodeOrRaise(PyObject *);
__PYX_EXTERN_C DL_IMPORT(int) hasText(xmlNode *);
__PYX_EXTERN_C DL_IMPORT(int) hasTail(xmlNode *);
__PYX_EXTERN_C DL_IMPORT(PyObject) *textOf(xmlNode *);
__PYX_EXTERN_C DL_IMPORT(PyObject) *tailOf(xmlNode *);
__PYX_EXTERN_C DL_IMPORT(int) setNodeText(xmlNode *, PyObject *);
__PYX_EXTERN_C DL_IMPORT(int) setTailText(xmlNode *, PyObject *);
__PYX_EXTERN_C DL_IMPORT(PyObject) *attributeValue(xmlNode *, xmlAttr *);
__PYX_EXTERN_C DL_IMPORT(PyObject) *attributeValueFromNsName(xmlNode *, const xmlChar *, const xmlChar *);
__PYX_EXTERN_C DL_IMPORT(PyObject) *getAttributeValue(struct LxmlElement *, PyObject *, PyObject *);
__PYX_EXTERN_C DL_IMPORT(PyObject) *iterattributes(struct LxmlElement *, int);
__PYX_EXTERN_C DL_IMPORT(PyObject) *collectAttributes(xmlNode *, int);
__PYX_EXTERN_C DL_IMPORT(int) setAttributeValue(struct LxmlElement *, PyObject *, PyObject *);
__PYX_EXTERN_C DL_IMPORT(int) delAttribute(struct LxmlElement *, PyObject *);
__PYX_EXTERN_C DL_IMPORT(int) delAttributeFromNsName(xmlNode *, const xmlChar *, const xmlChar *);
__PYX_EXTERN_C DL_IMPORT(int) hasChild(xmlNode *);
__PYX_EXTERN_C DL_IMPORT(xmlNode) *findChild(xmlNode *, Py_ssize_t);
__PYX_EXTERN_C DL_IMPORT(xmlNode) *findChildForwards(xmlNode *, Py_ssize_t);
__PYX_EXTERN_C DL_IMPORT(xmlNode) *findChildBackwards(xmlNode *, Py_ssize_t);
__PYX_EXTERN_C DL_IMPORT(xmlNode) *nextElement(xmlNode *);
__PYX_EXTERN_C DL_IMPORT(xmlNode) *previousElement(xmlNode *);
__PYX_EXTERN_C DL_IMPORT(void) appendChild(struct LxmlElement *, struct LxmlElement *);
__PYX_EXTERN_C DL_IMPORT(PyObject) *pyunicode(const xmlChar *);
__PYX_EXTERN_C DL_IMPORT(PyObject) *utf8(PyObject *);
__PYX_EXTERN_C DL_IMPORT(PyObject) *getNsTag(PyObject *);
__PYX_EXTERN_C DL_IMPORT(PyObject) *getNsTagWithEmptyNs(PyObject *);
__PYX_EXTERN_C DL_IMPORT(PyObject) *namespacedName(xmlNode *);
__PYX_EXTERN_C DL_IMPORT(PyObject) *namespacedNameFromNsName(const xmlChar *, const xmlChar *);
__PYX_EXTERN_C DL_IMPORT(void) iteratorStoreNext(struct LxmlElementIterator *, struct LxmlElement *);
__PYX_EXTERN_C DL_IMPORT(void) initTagMatch(struct LxmlElementTagMatcher *, PyObject *);
__PYX_EXTERN_C DL_IMPORT(xmlNs) *findOrBuildNodeNsPrefix(struct LxmlDocument *, xmlNode *, const xmlChar *, const xmlChar *);
#endif /* !__PYX_HAVE_API__lxml__etree */
#if PY_MAJOR_VERSION < 3
PyMODINIT_FUNC initetree(void);
#else
PyMODINIT_FUNC PyInit_etree(void);
#endif
#endif /* !__PYX_HAVE__lxml__etree */
| {
"content_hash": "baeb133379685af1f6cd7a8c0e685570",
"timestamp": "",
"source": "github",
"line_count": 213,
"max_line_length": 153,
"avg_line_length": 44.009389671361504,
"alnum_prop": 0.6992745892895242,
"repo_name": "neumerance/cloudloon2",
"id": "40b01b6c5bebf3b9a30f975b69a11e94675d3bbc",
"size": "9374",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": ".venv/lib/python2.7/site-packages/lxml/lxml.etree.h",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "178040"
},
{
"name": "JavaScript",
"bytes": "460971"
},
{
"name": "Perl",
"bytes": "1954"
},
{
"name": "Python",
"bytes": "3227734"
},
{
"name": "Ruby",
"bytes": "76"
},
{
"name": "Shell",
"bytes": "14108"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8" standalone="yes" ?>
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
<channel>
<title>钱国正博客</title>
<link>http://example.org/</link>
<description>Recent content on 钱国正博客</description>
<generator>Hugo -- gohugo.io</generator>
<language>en-us</language>
<atom:link href="http://example.org/index.xml" rel="self" type="application/rss+xml" />
</channel>
</rss> | {
"content_hash": "46b1d6c8bb085981ceaba11de3140258",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 91,
"avg_line_length": 36.583333333333336,
"alnum_prop": 0.6514806378132119,
"repo_name": "qianguozheng/qianguozheng.github.io",
"id": "e0b31ffd9654673c23d3b74fe8271b5c21f8bf9c",
"size": "459",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "public/index.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "47840"
},
{
"name": "HTML",
"bytes": "234187"
},
{
"name": "Shell",
"bytes": "403"
}
],
"symlink_target": ""
} |
// Import the bullshit module
var bs = require('nabg');
// Import the Twitter API module - see https://www.npmjs.com/package/twitter
var Twitter = require('twit');
// Import the Cron module for post scheduling.
var cron = require('cron').CronJob;
// HTTP server module to serve profile page redirect
var http = require('http');
/* NABGBot Configuration:
* TWITTER_API_KEYS - Fill this in with the API keys you get from https://apps.twitter.com/
* RESPONSE_API_KEYS - You can generate another set of API keys for the response stream, or you can
* use the first set of keys, instead. I like to use 2 different sets so that if the response keys hit the quota
* and become disabled temporarily, the bot can still post statuses without interruption.
* IP and PORT - These are automatically determined while on Openshift, but will default to 127.0.0.1:8082
* TWITTER_URL - The full URL to your bot's twitter page. Used for the redirect
* BOT_NAME: the Twitter username of your bot. This is needed to track replies.
* CRON - Posting schedule (standard Cron syntax)
*/
var CONFIG = {
TWITTER_API_KEYS: {
consumer_key: ' ',
consumer_secret: ' ',
access_token: ' ',
access_token_secret: ' '
},
RESPONSE_API_KEYS: {
consumer_key: ' ',
consumer_secret: ' ',
access_token: ' ',
access_token_secret: ' '
},
BOT_NAME: 'changeme',
IP: process.env.OPENSHIFT_NODEJS_IP || "127.0.0.1",
PORT: process.env.OPENSHIFT_NODEJS_PORT || process.env.PORT || 8082,
TWITTER_URL: "https://twitter.com/nabgbot",
CRON: {
TIME: '00 0,30 * * * *',
TIMEZONE: 'America/New_York'
}
};
console.info('NABG_INFO: Starting '+CONFIG.TWITTER_URL+' on '+CONFIG.IP+':'+CONFIG.PORT);
// Create Twitter API client instances
var client = new Twitter(CONFIG.TWITTER_API_KEYS);
var tweetResponses = new Twitter(CONFIG.RESPONSE_API_KEYS);
var responseStream = tweetResponses.stream('statuses/filter', {
track: '@' + CONFIG.BOT_NAME
});
responseStream.on('tweet', postReply);
responseStream.on('disconnect', function(disconnectMessage) {
console.info('RESPONSE STREAM DISCONNECT: ' + JSON.stringify(disconnectMessage));
responseStream.start();
});
// Generates a post that is less than 140 chars long
function generatePost() {
// Generate post with between 1 or 2 sentences by default.
var post = bs.ionize(Math.floor(Math.random()*2)+1);
// Ensure that posts are <= 140 characters and are NOT empty.
while(post.length > 140 || typeof post === 'undefined' || post.length === 0)
post = bs.ionize(1);
return post;
}
function getUsers(tweet) {
var mentions = '@' + tweet.user.screen_name + ' ';
for (var i = 0; i < tweet.entities.user_mentions.length; i++)
if (tweet.entities.user_mentions[i].screen_name != CONFIG.BOT_NAME)
mentions += '@' + tweet.entities.user_mentions[i].screen_name + ' ';
return mentions;
}
/* Will attempt to resend tweet until success.
* Failure here is fairly rare. No worries regarding flooding the API because:
a) API rate limiting will kick in, then reset in 15 minutes (see https://dev.twitter.com/rest/public/rate-limiting/)
b) Though generatePost occasionally returns an empty string, the twitter API will not accept this and so a new post gets generated anyways ¯\_(ツ)_/¯
c) generatePost always returns a valid post in less than 3 attempts at maximum.
*/
function sendPost() {
client.post('statuses/update', {status: generatePost()}, function(error, tweet, response){
// Rerun this method on failure (see above).
if (error)
console.error('NABG_DEBUG: '+CONFIG.TWITTER_URL+' Twitter returned an error: '+JSON.stringify(response));
});
}
function postReply(tweet) {
try {
//Never reply to self.
if (tweet.user.screen_name == CONFIG.BOT_NAME)
return;
var mentions = getUsers(tweet);
var tweetContent = mentions + generatePost();
var numTries = 30; // Max # of generation attempts
while (tweetContent.length > 140 && numTries > 0) {
tweetContent = mentions + generatePost();
numTries--;
}
if(numTries <= 0)
tweetContent = mentions + ", open your heart to spiritualism";
var params = {
status: tweetContent,
in_reply_to_status_id: tweet.id_str
};
tweetResponses.post('statuses/update', params, function(err, data, response) {
if (err)
console.info('NABGBOT_DEBUG: ' + JSON.stringify(err));
});
} catch (e) {
console.info('NABGBOT_DEBUG: ' + CONFIG.TWITTER_URL + ' Caught ' + e.name + ': ' + e.message);
}
}
// Make Twitter API calls at a configurable interval, with graceful error handling and logging
var sched = new cron({
cronTime: CONFIG.CRON.TIME,
onTick: function() {
try {
sendPost();
} catch(err) {
console.error('NABG_DEBUG: '+CONFIG.TWITTER_URL+' Caught '+err.name+': '+err.message);
}
},
start: false,
timeZone: CONFIG.CRON.TIMEZONE
});
sched.start();
sendPost();
// HTTP requests made to the application's external URL will redirect to the bot's twitter profile.
http.createServer(function(req, res) {
res.writeHead(301, {Location: CONFIG.TWITTER_URL});
res.end();
}).listen(CONFIG.PORT, CONFIG.IP);
| {
"content_hash": "39325a71e4e9e4315e1becccbfeb1a35",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 150,
"avg_line_length": 37.308724832214764,
"alnum_prop": 0.6337470768123763,
"repo_name": "ctrezevant/new-age-bs-bot",
"id": "22d78695db9b2e98bcab22214184cfe257610c50",
"size": "5563",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "5563"
}
],
"symlink_target": ""
} |
package supermarket.model.checkout;
import java.math.BigDecimal;
import java.util.List;
public class Receipt {
private List<BookingLine> bookings;
private List<TaxLine> taxLines;
private BigDecimal total;
public List<BookingLine> getBookings() {
return bookings;
}
public void setBookings(List<BookingLine> bookings) {
this.bookings = bookings;
}
public List<TaxLine> getTaxLines() {
return taxLines;
}
public void setTaxLines(List<TaxLine> taxLines) {
this.taxLines = taxLines;
}
public BigDecimal getTotal() {
return total;
}
public void setTotal(BigDecimal total) {
this.total = total;
}
}
| {
"content_hash": "ae97a15f5010482283b05f98647909e8",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 54,
"avg_line_length": 18.470588235294116,
"alnum_prop": 0.7372611464968153,
"repo_name": "paulroho/SupermarketKata",
"id": "6905495388190d79b369a4cdab005f1e38b439b4",
"size": "628",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "java/src/main/java/supermarket/model/checkout/Receipt.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "27864"
},
{
"name": "Gherkin",
"bytes": "6819"
},
{
"name": "Go",
"bytes": "9494"
},
{
"name": "Java",
"bytes": "13906"
}
],
"symlink_target": ""
} |
module PrestaShop
class Contact < Model
resource :contacts
end
end | {
"content_hash": "0036be710e11325dbb147ec572621bf0",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 26,
"avg_line_length": 12,
"alnum_prop": 0.6547619047619048,
"repo_name": "iaintshine/presta_shop",
"id": "81f23cdfee2d1bfc05e64d614fd65a9130e995c7",
"size": "84",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/presta_shop/models/contact.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "73877"
}
],
"symlink_target": ""
} |
<?php
namespace Cerad\Bundle\GameBundle\Tests\EntityRepository;
use Symfony\Bundle\FrameworkBundle\Test\WebTestCase;
class ProjectRepositoryTest extends WebTestCase
{
protected $managerId = 'cerad.project.repository';
public function testService()
{
$client = static::createClient();
$manager = $client->getContainer()->get($this->managerId);
$this->assertEquals('Cerad\Bundle\GameBundle\EntityRepository\ProjectRepository', get_class($manager));
$this->assertEquals('Cerad\Bundle\GameBundle\Entity\Project', $manager->getClassName());
}
public function testResetDatabase()
{
$client = static::createClient();
$manager = $client->getContainer()->get($this->managerId);
$manager->resetDatabase();
}
public function testCreate()
{
$client = static::createClient();
$manager = $client->getContainer()->get($this->managerId);
$params = array('sport' => 'Soccer','season' => 'SP2013','domain' => 'NASOA','domainSub' => 'MSSL');
$project = $manager->createEntity($params);
$this->assertEquals('Cerad\Bundle\GameBundle\Entity\Project', get_class($project));
$this->assertEquals('SOCCERNASOAMSSLSP2013', $project->getHash());
$manager->persist($project);
$manager->flush ($project);
$this->assertEquals(1, $manager->getCount());
}
public function testProjectLoad()
{
$client = static::createClient();
$manager = $client->getContainer()->get($this->managerId);
$project = $manager->find('1');
$this->assertTrue(is_object($project));
$this->assertEquals('Cerad\Bundle\GameBundle\Entity\Project', get_class($project));
$project = $manager->loadForHash('SOCCERNASOAMSSLSP2013');
$this->assertEquals('Cerad\Bundle\GameBundle\Entity\Project', get_class($project));
$project = $manager->loadForHash('SOCCERSP2013NASOAMSSLxxx');
$this->assertFalse(is_object($project));
$this->assertEquals(false, $project);
}
public function testProjectQuery()
{
$client = static::createClient();
$manager = $client->getContainer()->get($this->managerId);
$logger = new \Doctrine\DBAL\Logging\DebugStack();
$manager
->getDatabaseConnection()
->getConfiguration()
->setSQLLogger($logger);
$hash = 'SOCCERNASOAMSSLSP2013';
$project1 = $manager->loadForHash($hash);
$project2 = $manager->loadForHash($hash);
// Equals 1 only because of the manager's cache
$this->assertEquals(1,count($logger->queries));
$this->assertEquals($hash,$project2->getHash());
// This does not work without a request
// $profile = $client->getProfile();
}
public function testProjectProcess()
{
$client = static::createClient();
$manager = $client->getContainer()->get($this->managerId);
$params = array('sport' => 'Soccer','season' => 'SP2013','domain' => 'NASOA','domainSub' => 'AHSAA');
$project1 = $manager->processEntity($params,true);
$hash1 = $project1->getHash();
$project2 = $manager->loadForHash('SOCCERSP2013NASOAMSSL');
$project1x = $manager->loadForHash($hash1);
// Need a simple way to get the counts
$this->assertEquals(2,$manager->getCount());
}
}
?>
| {
"content_hash": "cf4da8a6dc269beeeb69e62aea9a842c",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 111,
"avg_line_length": 32.13157894736842,
"alnum_prop": 0.5809445809445809,
"repo_name": "cerad/cerad",
"id": "ad8eee5ce21a0fa9344b603d36f9c16506d9db9e",
"size": "3663",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Cerad/Bundle/GameBundle/Tests/EntityRepository/ProjectRepositoryTest.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "12343"
},
{
"name": "JavaScript",
"bytes": "2092"
},
{
"name": "PHP",
"bytes": "847017"
}
],
"symlink_target": ""
} |
/* MACHINE GENERATED FILE, DO NOT EDIT */
#include <jni.h>
#include "extgl.h"
typedef void (APIENTRY *glDrawElementsBaseVertexPROC) (GLenum mode, GLsizei count, GLenum type, const GLvoid * indices, GLint basevertex);
typedef void (APIENTRY *glDrawRangeElementsBaseVertexPROC) (GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const GLvoid * indices, GLint basevertex);
typedef void (APIENTRY *glDrawElementsInstancedBaseVertexPROC) (GLenum mode, GLsizei count, GLenum type, const GLvoid * indices, GLsizei primcount, GLint basevertex);
typedef void (APIENTRY *glProvokingVertexPROC) (GLenum mode);
typedef void (APIENTRY *glTexImage2DMultisamplePROC) (GLenum target, GLsizei samples, GLint internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations);
typedef void (APIENTRY *glTexImage3DMultisamplePROC) (GLenum target, GLsizei samples, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations);
typedef void (APIENTRY *glGetMultisamplefvPROC) (GLenum pname, GLuint index, GLfloat * val);
typedef void (APIENTRY *glSampleMaskiPROC) (GLuint index, GLbitfield mask);
typedef void (APIENTRY *glFramebufferTexturePROC) (GLenum target, GLenum attachment, GLuint texture, GLint level);
typedef GLsync (APIENTRY *glFenceSyncPROC) (GLenum condition, GLbitfield flags);
typedef GLboolean (APIENTRY *glIsSyncPROC) (GLsync sync);
typedef void (APIENTRY *glDeleteSyncPROC) (GLsync sync);
typedef GLenum (APIENTRY *glClientWaitSyncPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout);
typedef void (APIENTRY *glWaitSyncPROC) (GLsync sync, GLbitfield flags, GLuint64 timeout);
typedef void (APIENTRY *glGetInteger64vPROC) (GLenum pname, GLint64 * data);
typedef void (APIENTRY *glGetInteger64i_vPROC) (GLenum value, GLuint index, GLint64 * data);
typedef void (APIENTRY *glGetSyncivPROC) (GLsync sync, GLenum pname, GLsizei bufSize, GLsizei * length, GLint * values);
JNIEXPORT void JNICALL Java_org_lwjgl_opengl_GL32_nglDrawElementsBaseVertex(JNIEnv *env, jclass clazz, jint mode, jint count, jint type, jlong indices, jint basevertex, jlong function_pointer) {
const GLvoid *indices_address = (const GLvoid *)(intptr_t)indices;
glDrawElementsBaseVertexPROC glDrawElementsBaseVertex = (glDrawElementsBaseVertexPROC)((intptr_t)function_pointer);
glDrawElementsBaseVertex(mode, count, type, indices_address, basevertex);
}
JNIEXPORT void JNICALL Java_org_lwjgl_opengl_GL32_nglDrawElementsBaseVertexBO(JNIEnv *env, jclass clazz, jint mode, jint count, jint type, jlong indices_buffer_offset, jint basevertex, jlong function_pointer) {
const GLvoid *indices_address = (const GLvoid *)(intptr_t)offsetToPointer(indices_buffer_offset);
glDrawElementsBaseVertexPROC glDrawElementsBaseVertex = (glDrawElementsBaseVertexPROC)((intptr_t)function_pointer);
glDrawElementsBaseVertex(mode, count, type, indices_address, basevertex);
}
JNIEXPORT void JNICALL Java_org_lwjgl_opengl_GL32_nglDrawRangeElementsBaseVertex(JNIEnv *env, jclass clazz, jint mode, jint start, jint end, jint count, jint type, jlong indices, jint basevertex, jlong function_pointer) {
const GLvoid *indices_address = (const GLvoid *)(intptr_t)indices;
glDrawRangeElementsBaseVertexPROC glDrawRangeElementsBaseVertex = (glDrawRangeElementsBaseVertexPROC)((intptr_t)function_pointer);
glDrawRangeElementsBaseVertex(mode, start, end, count, type, indices_address, basevertex);
}
JNIEXPORT void JNICALL Java_org_lwjgl_opengl_GL32_nglDrawRangeElementsBaseVertexBO(JNIEnv *env, jclass clazz, jint mode, jint start, jint end, jint count, jint type, jlong indices_buffer_offset, jint basevertex, jlong function_pointer) {
const GLvoid *indices_address = (const GLvoid *)(intptr_t)offsetToPointer(indices_buffer_offset);
glDrawRangeElementsBaseVertexPROC glDrawRangeElementsBaseVertex = (glDrawRangeElementsBaseVertexPROC)((intptr_t)function_pointer);
glDrawRangeElementsBaseVertex(mode, start, end, count, type, indices_address, basevertex);
}
JNIEXPORT void JNICALL Java_org_lwjgl_opengl_GL32_nglDrawElementsInstancedBaseVertex(JNIEnv *env, jclass clazz, jint mode, jint count, jint type, jlong indices, jint primcount, jint basevertex, jlong function_pointer) {
const GLvoid *indices_address = (const GLvoid *)(intptr_t)indices;
glDrawElementsInstancedBaseVertexPROC glDrawElementsInstancedBaseVertex = (glDrawElementsInstancedBaseVertexPROC)((intptr_t)function_pointer);
glDrawElementsInstancedBaseVertex(mode, count, type, indices_address, primcount, basevertex);
}
JNIEXPORT void JNICALL Java_org_lwjgl_opengl_GL32_nglDrawElementsInstancedBaseVertexBO(JNIEnv *env, jclass clazz, jint mode, jint count, jint type, jlong indices_buffer_offset, jint primcount, jint basevertex, jlong function_pointer) {
const GLvoid *indices_address = (const GLvoid *)(intptr_t)offsetToPointer(indices_buffer_offset);
glDrawElementsInstancedBaseVertexPROC glDrawElementsInstancedBaseVertex = (glDrawElementsInstancedBaseVertexPROC)((intptr_t)function_pointer);
glDrawElementsInstancedBaseVertex(mode, count, type, indices_address, primcount, basevertex);
}
JNIEXPORT void JNICALL Java_org_lwjgl_opengl_GL32_nglProvokingVertex(JNIEnv *env, jclass clazz, jint mode, jlong function_pointer) {
glProvokingVertexPROC glProvokingVertex = (glProvokingVertexPROC)((intptr_t)function_pointer);
glProvokingVertex(mode);
}
JNIEXPORT void JNICALL Java_org_lwjgl_opengl_GL32_nglTexImage2DMultisample(JNIEnv *env, jclass clazz, jint target, jint samples, jint internalformat, jint width, jint height, jboolean fixedsamplelocations, jlong function_pointer) {
glTexImage2DMultisamplePROC glTexImage2DMultisample = (glTexImage2DMultisamplePROC)((intptr_t)function_pointer);
glTexImage2DMultisample(target, samples, internalformat, width, height, fixedsamplelocations);
}
JNIEXPORT void JNICALL Java_org_lwjgl_opengl_GL32_nglTexImage3DMultisample(JNIEnv *env, jclass clazz, jint target, jint samples, jint internalformat, jint width, jint height, jint depth, jboolean fixedsamplelocations, jlong function_pointer) {
glTexImage3DMultisamplePROC glTexImage3DMultisample = (glTexImage3DMultisamplePROC)((intptr_t)function_pointer);
glTexImage3DMultisample(target, samples, internalformat, width, height, depth, fixedsamplelocations);
}
JNIEXPORT void JNICALL Java_org_lwjgl_opengl_GL32_nglGetMultisamplefv(JNIEnv *env, jclass clazz, jint pname, jint index, jlong val, jlong function_pointer) {
GLfloat *val_address = (GLfloat *)(intptr_t)val;
glGetMultisamplefvPROC glGetMultisamplefv = (glGetMultisamplefvPROC)((intptr_t)function_pointer);
glGetMultisamplefv(pname, index, val_address);
}
JNIEXPORT void JNICALL Java_org_lwjgl_opengl_GL32_nglSampleMaski(JNIEnv *env, jclass clazz, jint index, jint mask, jlong function_pointer) {
glSampleMaskiPROC glSampleMaski = (glSampleMaskiPROC)((intptr_t)function_pointer);
glSampleMaski(index, mask);
}
JNIEXPORT void JNICALL Java_org_lwjgl_opengl_GL32_nglFramebufferTexture(JNIEnv *env, jclass clazz, jint target, jint attachment, jint texture, jint level, jlong function_pointer) {
glFramebufferTexturePROC glFramebufferTexture = (glFramebufferTexturePROC)((intptr_t)function_pointer);
glFramebufferTexture(target, attachment, texture, level);
}
JNIEXPORT jlong JNICALL Java_org_lwjgl_opengl_GL32_nglFenceSync(JNIEnv *env, jclass clazz, jint condition, jint flags, jlong function_pointer) {
glFenceSyncPROC glFenceSync = (glFenceSyncPROC)((intptr_t)function_pointer);
GLsync __result = glFenceSync(condition, flags);
return (intptr_t)__result;
}
JNIEXPORT jboolean JNICALL Java_org_lwjgl_opengl_GL32_nglIsSync(JNIEnv *env, jclass clazz, jlong sync, jlong function_pointer) {
glIsSyncPROC glIsSync = (glIsSyncPROC)((intptr_t)function_pointer);
GLboolean __result = glIsSync((GLsync)(intptr_t)sync);
return __result;
}
JNIEXPORT void JNICALL Java_org_lwjgl_opengl_GL32_nglDeleteSync(JNIEnv *env, jclass clazz, jlong sync, jlong function_pointer) {
glDeleteSyncPROC glDeleteSync = (glDeleteSyncPROC)((intptr_t)function_pointer);
glDeleteSync((GLsync)(intptr_t)sync);
}
JNIEXPORT jint JNICALL Java_org_lwjgl_opengl_GL32_nglClientWaitSync(JNIEnv *env, jclass clazz, jlong sync, jint flags, jlong timeout, jlong function_pointer) {
glClientWaitSyncPROC glClientWaitSync = (glClientWaitSyncPROC)((intptr_t)function_pointer);
GLenum __result = glClientWaitSync((GLsync)(intptr_t)sync, flags, timeout);
return __result;
}
JNIEXPORT void JNICALL Java_org_lwjgl_opengl_GL32_nglWaitSync(JNIEnv *env, jclass clazz, jlong sync, jint flags, jlong timeout, jlong function_pointer) {
glWaitSyncPROC glWaitSync = (glWaitSyncPROC)((intptr_t)function_pointer);
glWaitSync((GLsync)(intptr_t)sync, flags, timeout);
}
JNIEXPORT void JNICALL Java_org_lwjgl_opengl_GL32_nglGetInteger64v(JNIEnv *env, jclass clazz, jint pname, jlong data, jlong function_pointer) {
GLint64 *data_address = (GLint64 *)(intptr_t)data;
glGetInteger64vPROC glGetInteger64v = (glGetInteger64vPROC)((intptr_t)function_pointer);
glGetInteger64v(pname, data_address);
}
JNIEXPORT void JNICALL Java_org_lwjgl_opengl_GL32_nglGetInteger64i_v(JNIEnv *env, jclass clazz, jint value, jint index, jlong data, jlong function_pointer) {
GLint64 *data_address = (GLint64 *)(intptr_t)data;
glGetInteger64i_vPROC glGetInteger64i_v = (glGetInteger64i_vPROC)((intptr_t)function_pointer);
glGetInteger64i_v(value, index, data_address);
}
JNIEXPORT void JNICALL Java_org_lwjgl_opengl_GL32_nglGetSynciv(JNIEnv *env, jclass clazz, jlong sync, jint pname, jint bufSize, jlong length, jlong values, jlong function_pointer) {
GLsizei *length_address = (GLsizei *)(intptr_t)length;
GLint *values_address = (GLint *)(intptr_t)values;
glGetSyncivPROC glGetSynciv = (glGetSyncivPROC)((intptr_t)function_pointer);
glGetSynciv((GLsync)(intptr_t)sync, pname, bufSize, length_address, values_address);
}
| {
"content_hash": "65e85dd087db512b5b8c0151c826b213",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 243,
"avg_line_length": 72.74452554744525,
"alnum_prop": 0.793096528195866,
"repo_name": "mk12/mycraft",
"id": "0b5c20f62f570a65d6f18a799a7cc9788d6adeb6",
"size": "9966",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "lib/LWJGL/lwjgl-source-2/src/native/generated/opengl/org_lwjgl_opengl_GL32.c",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1980831"
},
{
"name": "C++",
"bytes": "14305"
},
{
"name": "Java",
"bytes": "7505246"
},
{
"name": "Objective-C",
"bytes": "32654"
}
],
"symlink_target": ""
} |
package dk.statsbiblioteket.doms.transformers.fileenricher;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URISyntaxException;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
public class ChecksumParserTest {
@Before
public void setUp() throws Exception {
}
@After
public void tearDown() throws Exception {
}
@Test
public void parseFile() throws URISyntaxException, IOException {
ChecksumParser checksums =
new ChecksumParser(
new BufferedReader(
new InputStreamReader(
Thread.currentThread().getContextClassLoader().getResourceAsStream("checksumTestFile"))));
assertThat(checksums.getNameChecksumsMap().size(), is(3));
assertThat(checksums.getSizeMap().size(), is(3));
}
}
| {
"content_hash": "e9e4079e32b02c183ac3b5c77c9f0b3a",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 126,
"avg_line_length": 28.194444444444443,
"alnum_prop": 0.6758620689655173,
"repo_name": "statsbiblioteket/doms-transformers",
"id": "9e8a0de96235f2af7ac840d30c61ba1b46cdfc21",
"size": "1015",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fileenricher/src/test/java/dk/statsbiblioteket/doms/transformers/fileenricher/ChecksumParserTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "202449"
},
{
"name": "Python",
"bytes": "1832"
},
{
"name": "Shell",
"bytes": "6910"
},
{
"name": "XSLT",
"bytes": "12318"
}
],
"symlink_target": ""
} |
'''
Clipboard Gtk3: an implementation of the Clipboard using Gtk3.
'''
__all__ = ('ClipboardGtk3',)
from kivy.utils import platform
from kivy.support import install_gobject_iteration
from kivy.core.clipboard import ClipboardBase
if platform != 'linux':
raise SystemError('unsupported platform for gtk3 clipboard')
from gi.repository import Gtk, Gdk
clipboard = Gtk.Clipboard.get(Gdk.SELECTION_CLIPBOARD)
class ClipboardGtk3(ClipboardBase):
_is_init = False
def init(self):
if self._is_init:
return
install_gobject_iteration()
self._is_init = True
def get(self, mimetype='text/plain;charset=utf-8'):
self.init()
if mimetype == 'text/plain;charset=utf-8':
contents = clipboard.wait_for_text()
if contents:
return contents
return ''
def put(self, data, mimetype='text/plain;charset=utf-8'):
self.init()
if mimetype == 'text/plain;charset=utf-8':
text = data.decode(self._encoding)
clipboard.set_text(text, -1)
clipboard.store()
def get_types(self):
self.init()
return ['text/plain;charset=utf-8']
| {
"content_hash": "4107dbb2447fb6e25252963072b0c7e7",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 64,
"avg_line_length": 26.6,
"alnum_prop": 0.6265664160401002,
"repo_name": "el-ethan/kivy",
"id": "ce53a3d1b974bc971adda77832c3a46728650d4b",
"size": "1197",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "kivy/core/clipboard/clipboard_gtk3.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "340566"
},
{
"name": "Emacs Lisp",
"bytes": "9695"
},
{
"name": "GLSL",
"bytes": "289"
},
{
"name": "HTML",
"bytes": "19384"
},
{
"name": "Makefile",
"bytes": "4202"
},
{
"name": "Objective-C",
"bytes": "14779"
},
{
"name": "Python",
"bytes": "3679127"
},
{
"name": "Vim script",
"bytes": "1123"
}
],
"symlink_target": ""
} |
title: graduation
---
{% include authoring/image.html
image_path = "assets/collections/photography/preview/graduation.jpeg"
link = "assets/collections/photography/full-res/graduation.jpeg"
caption = "The Grand County High School Class of 2020 graduated in [an unusual ceremony](https://www.moabtimes.com/articles/class-of-20-graduates-in-style-despite-pandemic/)."
alt = "Grand County High School seniors sit atop and in their decorated vehicles, in line in front of the school during the Class of 2020 graduation ceremony"
is_decorative = false
credit = "Photo by Carter Pape"
license = "exclusive use"
%}
| {
"content_hash": "5566fd81e000777f1a667adab542248f",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 179,
"avg_line_length": 53.416666666666664,
"alnum_prop": 0.7410296411856474,
"repo_name": "CarterPape/CarterPape.github.io",
"id": "96d0e6fe85ab332f375c313756b82d6ceb7be569",
"size": "645",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_photography/graduation.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "18570"
},
{
"name": "HTML",
"bytes": "12004"
},
{
"name": "JavaScript",
"bytes": "536"
},
{
"name": "Python",
"bytes": "214"
},
{
"name": "Ruby",
"bytes": "6985"
}
],
"symlink_target": ""
} |
<?php
namespace yii\log;
use Yii;
use yii\base\InvalidConfigException;
use yii\di\Instance;
use yii\mail\MailerInterface;
/**
* EmailTarget sends selected log messages to the specified email addresses.
*
* You may configure the email to be sent by setting the [[message]] property, through which
* you can set the target email addresses, subject, etc.:
*
* ```php
* 'components' => [
* 'log' => [
* 'targets' => [
* [
* 'class' => 'yii\log\EmailTarget',
* 'mailer' =>'mailer',
* 'levels' => ['error', 'warning'],
* 'message' => [
* 'from' => ['[email protected]'],
* 'to' => ['[email protected]', '[email protected]'],
* 'subject' => 'Log message',
* ],
* ],
* ],
* ],
* ],
* ```
*
* In the above `mailer` is ID of the component that sends email and should be already configured.
*
* @author Qiang Xue <[email protected]>
* @since 2.0
*/
class EmailTarget extends Target
{
/**
* @var array the configuration array for creating a [[\yii\mail\MessageInterface|message]] object.
* Note that the "to" option must be set, which specifies the destination email address(es).
*/
public $message = [];
/**
* @var MailerInterface|array|string the mailer object or the application component ID of the mailer object.
* After the EmailTarget object is created, if you want to change this property, you should only assign it
* with a mailer object.
* Starting from version 2.0.2, this can also be a configuration array for creating the object.
*/
public $mailer = 'mailer';
/**
* @inheritdoc
*/
public function init()
{
parent::init();
if (empty($this->message['to'])) {
throw new InvalidConfigException('The "to" option must be set for EmailTarget::message.');
}
$this->mailer = Instance::ensure($this->mailer, 'yii\mail\MailerInterface');
}
/**
* Sends log messages to specified email addresses.
*/
public function export()
{
// moved initialization of subject here because of the following issue
// https://github.com/yiisoft/yii2/issues/1446
if (empty($this->message['subject'])) {
$this->message['subject'] = 'Application Log';
}
$messages = array_map([$this, 'formatMessage'], $this->messages);
$body = wordwrap(implode("\n", $messages), 70);
$this->composeMessage($body)->send($this->mailer);
}
/**
* Composes a mail message with the given body content.
* @param string $body the body content
* @return \yii\mail\MessageInterface $message
*/
protected function composeMessage($body)
{
$message = $this->mailer->compose();
Yii::configure($message, $this->message);
$message->setTextBody($body);
return $message;
}
}
| {
"content_hash": "2066a3ea43d0b4c19ddb844084fe4ec5",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 112,
"avg_line_length": 31.432989690721648,
"alnum_prop": 0.5746146277468023,
"repo_name": "meishichao/yii2test",
"id": "0e2b1b8a9044d038515bbf195a5c6edab12ad492",
"size": "3200",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vendor/yiisoft/yii2/log/EmailTarget.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "3154"
},
{
"name": "JavaScript",
"bytes": "55"
},
{
"name": "PHP",
"bytes": "144876"
},
{
"name": "Shell",
"bytes": "1541"
}
],
"symlink_target": ""
} |
/* Override widget.css */
.top-bar.expanded {
width: 100%;
}
/* Alerts */
.alert.alert-error,
.alert.alert-info,
.alert.alert-warning,
.alert.alert-success {
border: 1px solid #b2b2b2;
box-shadow: 0 1px 1px #aaaaa7;
margin-left: auto;
margin-right: auto;
margin-top: 10px;
padding: 8px 35px 8px 14px;
position: relative;
width: auto;
z-index: 999;
border-radius: 4px;
}
/* Sections */
.form-container form,
#datalist-filter-form,
#datatable-filter-form,
#summary-filter-form,
#summary-sections,
.thumbnail {
border: 1px solid #CCC;
}
/* Links should ALWAYS be unvisited */
#content a.help:link {
color: #fff;
text-decoration: none;
margin-right: 10px;
}
#content a.help:hover {
background-color: #336699;
text-decoration: underline;
}
#content a.help:visited {
font-weight: normal;
color: #666;
}
#content h1,
#content h2 {
font-size: 1.3em;
font-weight: bolder;
background: #F7F8F9;
padding: 5px 5px 5px 5px;
}
#content h2 {
margin-top: 10px;
padding-left: 10px;
}
#content h3 {
font-size: 1.1em;
padding-bottom: 5px;
}
/* General forms */
form {
font-size: 0.8em;
}
form .form-row {
margin-top: 5px !important;
}
form .form-row > .columns > label {
font-weight: bold;
}
form label.inline {
padding: 0;
}
form .gis_loc_select_btn {
font-size: 0.8em;
}
form .gis_loc_select_btn i {
font-size: 1.0em;
margin-right: 4px;
}
form .error {
display: block;
padding: 0.375rem 0.5625rem 0.5625rem;
margin-top: -1px;
margin-bottom: 1rem;
font-size: 0.75rem;
font-style: italic;
background: #c60f13;
color: white;
}
form .date-clear-btn {
font-size: 0.75rem;
margin-left: 0.15rem !important;
}
form .action-lnk {
font-weight: normal;
font-size: 0.75rem;
}
form table.embeddedComponent {
border-collapse: separate;
}
form.auth_login #submit_record__row {
white-space: pre;
}
/* General Widgets */
input[type="text"],
input[type="password"],
input[type="date"],
input[type="datetime"],
input[type="datetime-local"],
input[type="month"],
input[type="week"],
input[type="email"],
input[type="number"],
input[type="search"],
input[type="tel"],
input[type="time"],
input[type="url"],
textarea,
select {
height: 1.75rem;
padding: 0.25rem;
margin: 0;
display: inline-block;
font-weight: normal;
}
input[type="text"],
input[type="password"],
input[type="date"],
input[type="datetime"],
input[type="datetime-local"],
input[type="month"],
input[type="week"],
input[type="email"],
input[type="number"],
input[type="search"],
input[type="tel"],
input[type="time"],
input[type="url"],
select {
width: auto !important;
}
/* Form and Item containers */
#last_update {
float: none;
}
.form-container,
.item-container {
width: auto;
overflow: inherit;
}
.form-container form,
.item-container form {
background: #fefefe;
padding: 5px 10px;
}
.form-container .controls,
.item-container .controls {
display: inline-block;
}
.form-container form select,
.form-container form input.string:not(.date),
.form-container form textarea {
margin: 0;
}
.form-container form .ui-multiselect {
max-width: 100%;
}
.form-container form:not(.filter-form):not(.auth_login):not(.auth_register) input[type="text"]:not(.double):not(.datetimepicker):not(.date) {
width: 500px !important;
}
.item-container .controls {
background: #fafafa;
padding: 0.35rem;
min-height: 1.7rem;
}
/* Filter forms */
.filter-form table tr,
.filter-form .ui-multiselect {
max-width: 60%;
}
.filter-form .inline-tooltip {
padding-top: 0.2rem;
}
/* Login/Registration box */
#login_form,
#register_form,
#login_box {
clear: none !important;
}
/* Help Popup */
.inline-tooltip {
display: inline-block;
vertical-align: top;
padding-left: 0.75rem;
}
.inline-tooltip .tooltip,
.inline-tooltip .tooltipbody,
.inline-tooltip .stickytip,
.inline-tooltip .ajaxtip {
display: block;
padding: 0;
width: 60px;
}
/* Rheader */
#rheader a.th {
margin-right: 10px;
}
#rheader a.th img {
height: 60px;
}
/* RHeader Tabs */
div.tabs {
border-bottom: 1px solid #2ba6cb;
clear: left;
height: 2.125em;
margin: 5px 0 0;
padding: 0.5em 0;
text-align: left;
width: 100%;
line-height: 1em;
}
div.tabs span {
float: left;
border-radius: 3px 3px 0 0;
padding: 0.15em 6px 0.25em 5px;
}
div.tabs span a {
color: #ffffff !important;
text-decoration: none !important;
background: transparent !important;
}
div.tabs span a:hover {
background: transparent !important;
}
div.tabs span.tab_here {
background: #ffffff;
border-color: #2ba6cb;
border-style: solid;
border-width: 2px 2px 0 3px;
border-bottom-width: 1px;
border-bottom-style: solid;
border-bottom-color: #ffffff;
bottom: 0;
display: inline;
font-weight: bold;
margin-right: 5px;
padding: 0.2em 6px 0.25em 5px;
position: relative;
}
div.tabs span.tab_here:hover {
background: #f1edec;
}
div.tabs span.tab_here a {
color: #666666 !important;
}
div.tabs span.tab_here a:hover {
color: #666666 !important;
}
div.tabs span.tab_last,
div.tabs span.tab_other {
font-size: 90%;
background: #2ba6cb;
border-color: #2ba6cb;
border-width: 1px 1px 0 2px;
border-style: solid;
margin-right: 3px;
margin-top: 0.25em;
}
div.tabs span.tab_last:hover,
div.tabs span.tab_other:hover {
background: #1f7994;
border-color: #1f7994;
}
div.tabs span.tab_last a,
div.tabs span.tab_other a {
color: #ffffff !important;
}
div.tabs span.tab_last a:hover,
div.tabs span.tab_other a:hover {
color: #ffffff !important;
}
/* Action buttons */
.action-btn,
.delete-btn-ajax,
.delete-btn,
.selected-action {
font-size: 0.6875rem;
border: 0;
line-height: 1;
margin-bottom: inherit;
padding: 0.25rem 0.5rem 0.375rem;
cursor: pointer;
text-decoration: none !important;
display: inline-block;
}
.action-btn[disabled], .action-btn[disabled]:hover,
.delete-btn-ajax[disabled],
.delete-btn-ajax[disabled]:hover,
.delete-btn[disabled],
.delete-btn[disabled]:hover,
.selected-action[disabled],
.selected-action[disabled]:hover {
color: #ffffff;
background-color: #c0c0c0;
}
/* Default action buttons */
.action-btn,
.selected-action {
background-color: #2ba6cb;
color: #ffffff !important;
}
.action-btn:hover,
.selected-action:hover {
background-color: #1f7994;
color: #ffffff !important;
}
/* Alert-style action buttons (e.g. delete) */
.delete-btn-ajax,
.delete-btn {
background-color: #c60f13;
color: #ffffff !important;
}
.delete-btn-ajax:hover,
.delete-btn:hover {
background-color: #a20c10 !important;
color: #ffffff !important;
}
/* Action buttons in datatables */
.dataTable .action-btn,
.dataTable .selected-action {
color: #ffffff;
}
.dataTable .delete-btn,
.dataTable .delete-btn-ajax {
color: #ffffff;
}
.dataTable td.actions {
white-space: nowrap;
}
/* Footer */
#footer {
margin-top: 20px;
padding-top: 20px;
border-top: 1px solid #EEEEEE;
}
#footer button.btn {
font-size: 0.6875rem;
border: 0;
line-height: 1;
margin-bottom: inherit;
padding: 0.25rem 0.5rem 0.375rem;
cursor: pointer;
text-decoration: none !important;
display: inline-block;
margin-left: 2px;
margin-right: 2px;
color: #ffffff;
background: #dddddd;
}
#footer button.btn:hover {
color: #ffffff;
background: #a0a0a0;
}
/* Menu Logo */
.S3menulogo {
background: url(img/athewaas_logo_whiteSmall.png) left top no-repeat;
text-shadow: none;
padding: none;
margin-left: 5px;
margin-top: 10px;
width: 35px;
height: 28px;
display: inline-block;
}
/* Top bar */
.top-bar {
color: #ffffff;
background-color: #333333;
z-index: 1000;
}
.top-bar .menu-toggle input {
margin-right: 5px;
}
.top-bar-section ul li:not(.has-form) a:not(.button) {
color: #ffffff;
background-color: #333333;
}
.top-bar-section ul li:not(.has-form) a:not(.button):hover {
color: #ffffff;
background-color: #2ba6cb;
}
.top-bar-section ul li.active:not(.has-form) a:not(.button) {
color: #ffffff;
background-color: #555555;
}
.top-bar-section ul li.active:not(.has-form) a:not(.button):hover {
color: #ffffff;
background-color: #2ba6cb;
}
.top-bar-section ul li.has-dropdown:hover > a {
background-color: #2ba6cb;
}
.top-bar-section ul.dropdown li:not(.has-form) a:not(.button) {
color: #ffffff;
background-color: #777879;
}
.top-bar-section ul.dropdown li:not(.has-form) a:not(.button):hover {
color: #ffffff;
background-color: #2ba6cb;
}
.top-bar-section ul.dropdown li.active:not(.has-form) a:not(.button) {
color: #ffffff;
background-color: #979899;
}
.top-bar-section ul.dropdown li.active:not(.has-form) a:not(.button):hover {
color: #ffffff;
background-color: #2ba6cb;
}
.top-bar-section li.menu-home a {
font-weight: bold !important;
font-size: 1.4em !important;
text-transform: capitalize !important;
}
/* Side Menu */
.sidebar {
background: none repeat scroll 0 0 #d7d8d9;
margin-top: 10px;
}
.side-nav li {
line-height: 1.2;
list-style-type: none;
list-style-position: inside;
margin: 0;
padding: 0;
}
.side-nav li:last-child {
margin-bottom: 4px;
}
.side-nav li > a:not(.button) {
padding: 3px;
color: #666666;
background-color: #d7d8d9;
}
.side-nav li:hover > a:not(.button) {
color: #ffffff;
background-color: #2ba6cb;
}
.side-nav li.heading {
text-transform: uppercase;
font-weight: bold;
border-top: 1px solid #c0c1c2;
}
.side-nav li.heading > a:not(button) {
padding: 6px 3px 3px 6px;
color: #666666;
background-color: #d7d8d9;
}
.side-nav li.active > a:first-child:not(.button) {
font-weight: bold;
color: #666666;
background-color: #e0e1e2;
}
.side-nav li.active > a:first-child:not(.button):hover {
color: #ffffff;
background-color: #2ba6cb;
}
.side-nav li.divider {
border-top: 1px solid #ffffff;
height: 0;
list-style: none;
}
/* Datatables */
.dataTables_length {
float: left !important;
}
.dataTables_length label {
font-size: 0.75rem;
white-space: nowrap;
margin-right: 10em;
margin-bottom: 0.3em;
}
.dataTables_length select {
height: auto;
padding: 2px;
font-size: 0.75rem;
}
table.dataTable thead th, table.dataTable th, table.dataTable td {
border: 1px solid #cccccc;
padding: 0.2em 1.5em 0.2em 0.5em;
}
table.dataTable thead th, table.dataTable thead td {
background-color: #ffffff;
}
table.dataTable tbody tr.even {
background-color: #ffffff;
}
table.dataTable tbody tr.even td.sorting_1 {
background-color: #fafafa;
}
table.dataTable tbody tr.odd {
background-color: #f7f8f9;
}
table.dataTable tbody tr.odd td {
border-color: #cccccc;
}
table.dataTable tbody tr.odd td.sorting_1 {
background-color: #f0f1f2;
}
table.dataTable tfoot th, table.dataTable tfoot td {
background-color: #f7f8f9;
border-top: 2px solid #cccccc;
padding: 0.5em;
}
table.dataTable .selected-action {
margin: 5px 0 5px;
}
.dataTables_processing {
padding: 14px 0 28px;
}
.dataTables_filter {
text-align: left;
font-size: 0.75rem;
margin-right: 3rem;
}
.dt-export-options {
float: right;
}
.dt-export-options .list_formats {
padding-top: 0;
margin-left: 5px;
}
.dt-export-options .dt-export {
margin: 0 0.085rem;
}
/* Empty-sections */
.empty {
font-style: italic;
font-size: 0.8em;
}
/* jQuery UI widgets */
.ui-widget-header {
background: none;
border: 1px solid #ADA6A0;
}
.ui-widget-header a {
color: #222222 !important;
border-color: #C7C7C7;
margin-bottom: 0px;
font-weight: bold;
text-decoration: none !important;
font-size: 0.7em;
}
.ui-widget-header .ui-icon-check,
.ui-widget-header .ui-icon-closethick {
background: none;
height: 0px;
width: 0px;
}
.ui-state-active,
.ui-widget-content .ui-state-active,
.ui-widget-header .ui-state-active {
border-color: #ADA6A0;
}
/* Summary tabs */
.ui-tabs .ui-tabs-panel {
padding: 0.5em;
}
#summary-tabs {
margin-top: 10px;
}
#summary-tabs.ui-widget-content {
background: none;
border: none;
}
#summary-tab-headers .ui-tabs-nav {
border: none;
margin: 0 10px;
}
#summary-tab-headers a {
border-color: #CCC;
}
#summary-tab-headers .ui-tabs-active a {
border-bottom: 1px solid white;
}
#summary-tab-headers li {
margin-right: 5px;
box-shadow: none;
}
#summary-sections .x-panel-body {
border: none;
border-radius: 4px;
}
/* Date picker */
input.date.hasDatepicker {
display: inline-block;
}
.ui-datepicker-trigger {
background-color: transparent;
background-image: url("../../img/bootstrap/calendar.gif");
margin-left: 10px;
}
/* Range filter widgets */
.range-filter-label label {
font-size: 1.0em;
}
.range-filter-widget input.date-filter-input {
width: auto;
float: left;
margin-right: 2px;
}
.range-filter-widget button.date-clear-btn {
font-size: 0.85em;
padding: 0.325em;
margin-top: 0.5em;
}
/* ImageCropWidget */
.imagecrop-btn {
font-size: 1em;
margin-left: 0.2em;
padding: 0.3em;
}
/* Miscellaneous */
| {
"content_hash": "4f22cc4db0964e070b89cabc0639b4d7",
"timestamp": "",
"source": "github",
"line_count": 664,
"max_line_length": 141,
"avg_line_length": 19.329819277108435,
"alnum_prop": 0.6782236073237242,
"repo_name": "julianprabhakar/eden_car",
"id": "6bb21c62d6ba034ad829192e3d83ea989c248d44",
"size": "12835",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "static/themes/Kashmir/style.css",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2030949"
},
{
"name": "JavaScript",
"bytes": "19162817"
},
{
"name": "PHP",
"bytes": "15220"
},
{
"name": "Perl",
"bytes": "666"
},
{
"name": "Python",
"bytes": "28361616"
},
{
"name": "Ruby",
"bytes": "2051"
},
{
"name": "Shell",
"bytes": "4846"
},
{
"name": "XSLT",
"bytes": "2644035"
}
],
"symlink_target": ""
} |
package model
import (
"fmt"
"time"
"github.com/insionng/zenpress/helper"
"github.com/casbin/casbin"
)
// User 用户基本信息表。存放系统所有用户基本信息。
type User struct {
ID uint64 `gorm:"primary_key"`
Token string //`orm:"unique"`
UserLogin string `gorm:"not null default '' index VARCHAR(60)"`
UserPass string `gorm:"not null default '' VARCHAR(255)"`
UserNicename string `gorm:"not null default '' index VARCHAR(50)"`
UserEmail string `gorm:"not null default '' index VARCHAR(100)"`
UserURL string `gorm:"not null default '' VARCHAR(100)"`
UserRegistered time.Time `gorm:"not null default '0000-00-00 00:00:00' DATETIME"`
UserActivationKey string `gorm:"not null default '' VARCHAR(255)"`
UserStatus int `gorm:"not null default 0 INT(11)"`
DisplayName string `gorm:"not null default '' VARCHAR(250)"`
Spam int `gorm:"not null default 0 TINYINT(2)"`
Deleted int `gorm:"not null default 0 TINYINT(2)"`
Roles []Role `gorm:"many2many:user_roles;"`
}
var Enforcer *casbin.Enforcer = nil
func getAttr(name string, attr string) string {
if attr != "url" {
return ""
}
permissions := FindPermissions()
for _, permission := range permissions {
if name == fmt.Sprintf("%v", permission.ID) {
return permission.URL
}
}
return ""
}
func getAttrFunc(args ...interface{}) (interface{}, error) {
name := args[0].(string)
attr := args[1].(string)
return (string)(getAttr(name, attr)), nil
}
func Init() {
Enforcer = &casbin.Enforcer{}
Enforcer.InitWithFile("content/config/rbac_model.conf", "")
Enforcer.AddActionAttributeFunction(getAttrFunc)
type UserRoleResult struct {
UserID uint64
RoleID uint64
}
var res []UserRoleResult
Database.Raw("select user_id, role_id from user_roles").Scan(&res)
for _, param := range res {
Enforcer.AddRoleForUser(fmt.Sprintf("%v", param.UserID), fmt.Sprintf("%v", param.RoleID))
}
type RolePermissionResult struct {
RoleID uint64
PermissionID uint64
}
var rez []RolePermissionResult
Database.Raw("select role_id, permission_id from role_permissions").Scan(&rez)
for _, param := range rez {
Enforcer.AddPermissionForUser(fmt.Sprintf("%v", param.RoleID), fmt.Sprintf("%v", param.PermissionID))
}
}
func FindUserById(id int) (bool, User) {
var user User
db := Database.Where("id = ?", id).First(&user)
return db.Error == nil, user
}
func FindUserByToken(token string) (bool, User) {
var user User
db := Database.Where("token = ?", token).First(&user)
return db.Error == nil, user
}
func Login(username string, password string) (bool, User) {
var user User
db := Database.Where("username = ? and password = ?", username, password).First(&user)
return db.Error == nil, user
}
func FindUserByUserName(username string) (bool, User) {
var user User
db := Database.Where("username = ?", username).First(&user)
return db.Error == nil, user
}
func SaveUser(user *User) uint64 {
Database.Create(user)
return user.ID
}
func UpdateUser(user *User) {
Database.Update(user)
}
func PageUser(p int, size int) helper.Page {
var user User
var list []User
qs := Database.Find(&user)
var count int
qs.Count(count)
qs.Related(&user).Order("user_registered").Limit(size).Offset((p - 1) * size).Find(&list)
//c, _ := strconv.Atoi(strconv.FormatInt(count, 10))
return helper.PageUtil(count, p, size, list)
}
func FindPermissionByUserIdAndPermissionName(userId int, name string) bool {
permissions := FindPermissions()
for _, permission := range permissions {
if name == permission.Name {
return Enforcer.Enforce(fmt.Sprintf("%v", userId), permission.URL)
}
}
return false
}
func DeleteUser(user *User) {
Enforcer.DeleteUser(fmt.Sprintf("%v", user.ID))
Database.Delete(user)
}
func DeleteUserRolesByUserId(user_id int) {
Enforcer.DeleteRolesForUser(fmt.Sprintf("%v", user_id))
Database.Exec("delete from user_roles where user_id = ?", user_id)
}
func SaveUserRole(user_id int, role_id int) {
Enforcer.AddRoleForUser(fmt.Sprintf("%v", user_id), fmt.Sprintf("%v", role_id))
Database.Exec("insert into user_roles (user_id, role_id) values (?, ?)", user_id, role_id)
}
type UserRoleResult struct {
ID uint64
UserID uint64
RoleID uint64
}
func FindUserRolesByUserId(user_id int) []UserRoleResult {
var res []UserRoleResult
Database.Raw("select id, user_id, role_id from user_roles where user_id = ?", user_id).Scan(&res)
return res
}
| {
"content_hash": "8cb44093a2f7f8b874b9ec585dbecd9a",
"timestamp": "",
"source": "github",
"line_count": 165,
"max_line_length": 103,
"avg_line_length": 27.393939393939394,
"alnum_prop": 0.6754424778761062,
"repo_name": "insionng/zenpress",
"id": "f633eb2813565f45228bebde62082a7ec761bd62",
"size": "4562",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "model/user.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "916"
},
{
"name": "CSS",
"bytes": "345464"
},
{
"name": "CoffeeScript",
"bytes": "161106"
},
{
"name": "Go",
"bytes": "22037570"
},
{
"name": "Makefile",
"bytes": "794"
},
{
"name": "Shell",
"bytes": "14822"
}
],
"symlink_target": ""
} |
interface CssExports {
'entry': string;
'filterView': string;
'search': string;
'separator': string;
}
export const cssExports: CssExports;
export default cssExports;
| {
"content_hash": "931cb659353d9c04cd83f4c3dfcff5d8",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 36,
"avg_line_length": 21.875,
"alnum_prop": 0.7257142857142858,
"repo_name": "DestinyItemManager/DIM",
"id": "1b309cc040fd29e468fafe1b41ba91e40c5ad846",
"size": "251",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/app/search/FilterHelp.m.scss.d.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "361"
},
{
"name": "HTML",
"bytes": "4081"
},
{
"name": "JavaScript",
"bytes": "42997"
},
{
"name": "SCSS",
"bytes": "229517"
},
{
"name": "Shell",
"bytes": "2277"
},
{
"name": "TypeScript",
"bytes": "2941689"
}
],
"symlink_target": ""
} |
import { Path } from 'slate'
export const input = {
path: [0, 1, 2],
another: [1],
}
export const test = ({ path, another }) => {
return Path.isChild(path, another)
}
export const output = false
| {
"content_hash": "4f25aeda4efe692a114cc7517d463841",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 44,
"avg_line_length": 20.2,
"alnum_prop": 0.6237623762376238,
"repo_name": "ianstormtaylor/slate",
"id": "0960d6612401f96363d5b4665d1f6166c8182689",
"size": "202",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "packages/slate/test/interfaces/Path/isChild/before.tsx",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1015"
},
{
"name": "JavaScript",
"bytes": "26950"
},
{
"name": "TypeScript",
"bytes": "870599"
}
],
"symlink_target": ""
} |
package main
import (
"encoding/json"
"github.com/garyburd/redigo/redis"
"log"
"time"
)
var ErrCacheNotFound = redis.ErrNil
type Cache interface {
Set(string, interface{}) error
Get(string, interface{}) error
Del(string) error
Init() error
}
type RedisCache struct {
Prefix string `json:"prefix"`
Host string `json:"host"`
pool *redis.Pool
}
func (r *RedisCache) Init() error {
r.pool = &redis.Pool{
MaxIdle: 3,
IdleTimeout: 240 * time.Second,
Dial: func() (redis.Conn, error) {
c, err := redis.Dial("tcp", r.Host)
if err != nil {
log.Fatal(err)
}
return c, err
},
TestOnBorrow: func(c redis.Conn, t time.Time) error {
_, err := c.Do("PING")
return err
},
}
return nil
}
func (r *RedisCache) Set(key string, data interface{}) error {
conn := r.pool.Get()
defer conn.Close()
j, err := json.Marshal(data)
if err != nil {
return err
}
_, err = conn.Do("SET", r.Prefix+key, j)
return err
}
func (r *RedisCache) Get(key string, container interface{}) error {
conn := r.pool.Get()
defer conn.Close()
data, err := conn.Do("GET", r.Prefix+key)
if err != nil {
return err
}
if data == nil {
return ErrCacheNotFound
}
return json.Unmarshal(data.([]byte), container)
}
func (r *RedisCache) Del(key string) error {
conn := r.pool.Get()
defer conn.Close()
_, err := conn.Do("DEL", r.Prefix+key)
return err
}
| {
"content_hash": "3b93221289cbfe5637df907a353c0fcf",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 67,
"avg_line_length": 18.958904109589042,
"alnum_prop": 0.634393063583815,
"repo_name": "hyperboloide/fe",
"id": "e8ad2aa43f9af87726b2586a480f84921783aa7c",
"size": "1605",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cache.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "55589"
},
{
"name": "Makefile",
"bytes": "389"
}
],
"symlink_target": ""
} |
\hypertarget{classIir_1_1LowPassTransform}{}\doxysubsection{Iir\+::Low\+Pass\+Transform Class Reference}
\label{classIir_1_1LowPassTransform}\index{Iir::LowPassTransform@{Iir::LowPassTransform}}
{\ttfamily \#include $<$Pole\+Filter.\+h$>$}
\doxysubsubsection{Detailed Description}
s-\/plane to z-\/plane transforms
For pole filters, an analog prototype is created via placement of poles and zeros in the s-\/plane. The analog prototype is either a halfband low pass or a halfband low shelf. The poles, zeros, and normalization parameters are transformed into the z-\/plane using variants of the bilinear transformation. low pass to low pass
The documentation for this class was generated from the following files\+:\begin{DoxyCompactItemize}
\item
iir/Pole\+Filter.\+h\item
iir/Pole\+Filter.\+cpp\end{DoxyCompactItemize}
| {
"content_hash": "5954fe216ea052e734028313f507f8b0",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 326,
"avg_line_length": 48.88235294117647,
"alnum_prop": 0.779783393501805,
"repo_name": "berndporr/iir1",
"id": "50a748bf676629623bfb8fa798664fa716f8d8ff",
"size": "831",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/pdf/classIir_1_1LowPassTransform.tex",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1866"
},
{
"name": "C++",
"bytes": "177828"
},
{
"name": "CMake",
"bytes": "7018"
},
{
"name": "Python",
"bytes": "353"
},
{
"name": "Shell",
"bytes": "129"
}
],
"symlink_target": ""
} |
#include "StackedSet.h"
#include "OverlayItemInternal.h"
#include "MapProjection.h"
#include "OverlayItemZoomSpec.h"
#include "OverlayViewLayerInterface.h"
#include "OverlayItemVisualSpec.h"
#include "ImageSpec.h"
#include "PrecisionScalingConstants.h"
#include "OverlayItemUtil.h"
#include "MapProjectionAdapter.h"
// To get the distance between two coordinates
#include "GfxUtility.h"
StackedSet::StackedSet(MapProjectionAdapter& projection,
OverlayViewLayerInterface& layerInterface)
: m_projection(projection),
m_layerInterface(layerInterface),
m_automaticHighlightMode(false)
{
m_closest.item = NULL;
}
void StackedSet::initItem(OverlayItemInternal* item) {
// Clears the stack of the item.
item->m_stack.clearStack();
// We need to reset the adjusted position, since
// calculations will be made using this position.
item->m_adjustedPosition = item->getPosition();
item->m_isHighlighted = false;
// Sets the item to its normal state.
WFAPI::wf_uint32 curPixelScale = static_cast<WFAPI::wf_uint32>(
m_projection.getPixelScale()) ;
const WFAPI::OverlayItemVisualSpec* visualSpec =
OverlayItemUtil::getCurrentVisualSpec(item, curPixelScale);
if(!visualSpec) {
return;
}
updateItemPixelBox(item, visualSpec, item->getPosition());
}
void StackedSet::addToStackSet(OverlayItemInternal* item)
{
// If the item is unstackable, we just add it to the set.
if (!item->isStackable()){
m_stacks.push_back(item);
return;
}
bool overlapping = false;
StackVec::iterator it = m_stacks.begin();
StackVec::iterator end = m_stacks.end();
PixelBox itemBox(item->getPrecisionScaledPixelBox());
while (it != end && !overlapping) {
// If an overlap is found, the item will be added to an existing
// stack and the while loop will terminate.
OverlayItemStackInfo& otherStack = (*it)->m_stack;
PixelBox otherBox((*it)->getPrecisionScaledPixelBox());
if ((*it)->isStackable() && itemBox.overlaps(otherBox)) {
overlapping = true;
// We add the item to the stack of an existing item.
otherStack.addToStack(item);
if (otherStack.getSize() == 2) {
// This means that the old item was not previously stacked,
// and now we have to update its pixelbox according to
// its stacked specification.
WFAPI::wf_uint32 curPixelScale = static_cast<WFAPI::wf_uint32>(
m_projection.getPixelScale()) ;
const WFAPI::OverlayItemVisualSpec* visualSpec =
OverlayItemUtil::getCurrentVisualSpec((*it), curPixelScale);
updateItemPixelBox((*it),
visualSpec,
(*it)->getPosition());
}
}
it++;
}
if(!overlapping) {
// If no overlap was found, create a new stack from the item.
m_stacks.push_back(item);
}
}
void StackedSet::smoothPositions()
{
for (StackVec::iterator itr = m_stacks.begin(); itr != m_stacks.end();
itr++)
{
const OverlayItemInternal* curItem = *itr;
MC2Point average(0, 0);
int numItems = 0;
while (curItem) {
// average will contain the sum of all items positions,
// it will later we divided by the number of items.
average = average +
getPrecisionScaledScreenPosition(curItem);
curItem = curItem->m_stack.getNext();
numItems++;
}
// After these divisions average will actually contain the average
// position (in precision scaled dimensions).
average.getX() /= numItems;
average.getY() /= numItems;
curItem = *itr;
while (curItem) {
// Use the offset to move pixel box.
MC2Point offset = average -
getPrecisionScaledScreenPosition(curItem);
curItem->getPrecisionScaledPixelBox().move(offset);
// Use the offset to move the world position.
MC2Point tmpPoint;
MC2Coordinate tmpCoord(curItem->m_adjustedPosition);
WFAPI::wf_float64 x;
WFAPI::wf_float64 y;
// We get the screen position for the coordinate.
m_projection.transformf(x,y, tmpCoord);
// We scale the screen position into precision scaled dimensions.
tmpPoint = MC2Point(static_cast<int>(x * SCALING_CONSTANT),
static_cast<int>(y * SCALING_CONSTANT));
tmpPoint += offset;
// We have now offset the position and want to translate it
// back to a world coordinate. Thus we need to scale it back
// to normal screen space.
tmpPoint.getX() /= SCALING_CONSTANT;
tmpPoint.getY() /= SCALING_CONSTANT;
// Get the corresponding world coordinate.
m_projection.inverseTransform(tmpCoord, tmpPoint);
curItem->m_adjustedPosition = tmpCoord;
// Move to next item
curItem = curItem->m_stack.getNext();
}
}
}
StackedSet::StackVec& StackedSet::getStackedItems()
{
return m_stacks;
}
void StackedSet::clear()
{
m_stacks.clear();
}
// #define USE_SMOOTHING
// #define USE_ITERATIVE_SMOOTHING
// #define ITERATIVE_SMOOTHING_CAP 2
void StackedSet::update()
{
clear();
// Since overlap should be calculated using the precision scaled dimensions
// we need to precision scale the bounding box for the screen.
PixelBox screenBox = m_projection.getPixelBoundingBox();
MC2Point topLeft(screenBox.getTopLeft().getX() * SCALING_CONSTANT,
screenBox.getTopLeft().getY() * SCALING_CONSTANT);
MC2Point bottomRight(screenBox.getBottomRight().getX() * SCALING_CONSTANT,
screenBox.getBottomRight().getY() * SCALING_CONSTANT);
PixelBox screenBoxScaled(topLeft, bottomRight);
for( OverlayViewLayerInterface::LayerMap::iterator itr =
m_layerInterface.begin();
itr != m_layerInterface.end(); itr++)
{
Layer& l = *itr->second;
if(!l.getVisible()) {
// We don't need to detect any overlaps with invisible layers.
continue;
}
for(Layer::const_iterator itemItr = l.items_begin();
itemItr != l.items_end(); itemItr++)
{
OverlayItemInternal* item = *itemItr;
initItem(item);
if (!screenBoxScaled.overlaps(item->getPrecisionScaledPixelBox())) {
// The item lies outside the screen, do NOT add it to the stacks.
continue;
}
addToStackSet(item);
}
}
#ifdef USE_SMOOTHING
// Smooth the positions of the stacks to represent the average
// of the contained items' positions.
smoothPositions();
#endif
#ifdef USE_SMOOTHING
#ifdef USE_ITERATIVE_SMOOTHING
// Since we have changed the position of the stacks, we run another
// pass of overlap detection.
unsigned int prevSize = 0;
unsigned int iterationCap = ITERATIVE_SMOOTHING_CAP;
// If the number of stacks has not changed, no stacks overlapped
// so the iteration should terminate.
while (prevSize != m_stacks.size() && iterationCap > 0) {
StackVec tempVec = m_stacks;
prevSize = m_stacks.size();
clear();
for(StackVec::iterator itr = tempVec.begin();
itr != tempVec.end(); itr++) {
addToStackSet(*itr);
}
smoothPositions();
--iterationCap;
}
#endif
#endif
OverlayItemInternal* currClosest = NULL;
if ( m_stacks.size() != 0 && m_automaticHighlightMode){
// Find out which item is closest to the center of the screen
MC2Point screenCenter;
m_projection.transform(screenCenter, m_projection.getCenter());
WFAPI::wf_float64 closestDistSq = 0;
currClosest = *m_stacks.begin();
closestDistSq = getSquareScreenDistance(screenCenter, currClosest);
for(StackedSet::StackVec::const_iterator itr = m_stacks.begin();
itr != m_stacks.end(); itr++)
{
WFAPI::wf_float64 currDistSq = getSquareScreenDistance(screenCenter,
*itr);
if (currDistSq < closestDistSq){
currClosest = *itr;
closestDistSq = currDistSq;
}
}
// If the currently closest item is further away than RADIUS pixels,
// disregard it.
const unsigned int RADIUS = 75;
if (closestDistSq > RADIUS * RADIUS){
currClosest = NULL;
closestDistSq = 0;
}
}
updateClosest(currClosest);
if (m_closest.item && automaticHighlightEnabled()){
m_closest.item->m_isHighlighted = true;
// Since we don't care pixel scale the item is on, we
// use 0 as the current pixel scale.
const WFAPI::OverlayItemVisualSpec* highlighted =
OverlayItemUtil::getCurrentVisualSpec(m_closest.item, 0);
if(highlighted) {
updateItemPixelBox(m_closest.item, highlighted,
m_closest.item->m_adjustedPosition);
}
}
}
void
StackedSet::updateItemPixelBox(const OverlayItemInternal* item,
const WFAPI::OverlayItemVisualSpec* visualSpec,
const WFAPI::WGS84Coordinate& coord)
{
MC2Point screenPoint;
WFAPI::wf_float64 x;
WFAPI::wf_float64 y;
m_projection.transformf(x,y, coord);
screenPoint = MC2Point(static_cast<int>(x * SCALING_CONSTANT),
static_cast<int>(y * SCALING_CONSTANT));
if(!visualSpec) {
item->getPrecisionScaledPixelBox() = PixelBox(MC2Point(0, 0),
MC2Point(0, 0));
return;
}
const WFAPI::ImageSpec* background = visualSpec->getBackgroundImage();
if(!background) {
item->getPrecisionScaledPixelBox() = PixelBox(MC2Point(0, 0),
MC2Point(0, 0));
return;
}
WFAPI::ScreenPoint offsetPoint = visualSpec->getFocusPoint();
// The pixel box needs to be scaled in order to gain sufficient
// precision.
offsetPoint.getX() *= SCALING_CONSTANT;
offsetPoint.getY() *= SCALING_CONSTANT;
unsigned int width = background->getWidth() * SCALING_CONSTANT;
unsigned int height = background->getHeight() * SCALING_CONSTANT;
// Create our offset box
PixelBox box( MC2Point(-offsetPoint.getX(), -offsetPoint.getY()),
MC2Point(-offsetPoint.getX() + width,
-offsetPoint.getY() + height));
//Offset the box to the correct position on the screen
box.move(screenPoint);
//Assign pixel box to item so that other items can check for overlap
item->getPrecisionScaledPixelBox() = box;
}
void StackedSet::updateClosestItemPoint()
{
if (m_closest.item == NULL){
return;
}
MC2Point screenPoint;
m_projection.transform(screenPoint,
m_closest.item->m_adjustedPosition);
m_closest.point = screenPoint;
}
MC2Point StackedSet::getClosestItemPoint()
{
if(m_closest.item == NULL){
return MC2Point(0,0);
} else {
return m_closest.point;
}
}
MC2Coordinate StackedSet::getClosestItemCoord()
{
if(m_closest.item == NULL){
return MC2Coordinate(0,0);
} else {
return m_closest.item->m_adjustedPosition;
}
}
bool StackedSet::hasValidClosestItem()
{
return m_closest.item != NULL;
}
void StackedSet::updateClosest(const OverlayItemInternal* newClosest)
{
if (newClosest == NULL){
// Clear the m_closest struct
m_closest.item = NULL;
m_closest.stack.clear();
m_closest.point = WFAPI::ScreenPoint();
return;
}
// To determine whether the m_closest struct should be updated or not,
// we examine the pointer itself, the size of it's stack - and if
// both are the same, we examine the contents of the stack.
bool shouldUpdate = false;
if (newClosest != m_closest.item ||
m_closest.stack.size() != newClosest->m_stack.getSize()-1){
// Either the pointer differs, or the size of the stack.
shouldUpdate = true;
} else {
// The pointer and the size is the same as previously,
// so we examine the contents of the stack.
const OverlayItemInternal* cur = newClosest->m_stack.getNext();
unsigned int curIndex = 0;
while (cur && !shouldUpdate){
if (cur != m_closest.stack[curIndex]){
shouldUpdate = true;
}
cur = cur->m_stack.getNext();
curIndex++;
}
}
if (!shouldUpdate){
return;
} else {
// Now we update the m_closest struct.
m_closest.item = newClosest;
m_closest.stack.clear();
const OverlayItemInternal* cur = newClosest->m_stack.getNext();
while (cur){
m_closest.stack.push_back(cur);
cur = cur->m_stack.getNext();
}
// This call sets the m_closest.point correctly for the current
// projection state.
updateClosestItemPoint();
}
}
WFAPI::wf_float64
StackedSet::getSquareScreenDistance(MC2Point screenCenter,
const OverlayItemInternal* item)
{
MC2Point screenItem;
m_projection.transform(screenItem, item->m_adjustedPosition);
return (screenItem.getX() - screenCenter.getX()) *
(screenItem.getX() - screenCenter.getX()) +
(screenItem.getY() - screenCenter.getY()) *
(screenItem.getY() - screenCenter.getY());
}
MC2Point
StackedSet::getPrecisionScaledScreenPosition(const OverlayItemInternal* item)
{
WFAPI::wf_float64 x;
WFAPI::wf_float64 y;
// We get the screen position for the coordinate.
m_projection.transformf(x,y, item->m_adjustedPosition);
// We scale the screen position into precision scaled dimensions.
return MC2Point(static_cast<int>(x * SCALING_CONSTANT),
static_cast<int>(y * SCALING_CONSTANT));
// return item->getPrecisionScaledPixelBox().getTopLeft();
}
void StackedSet::enableAutomaticHighlight(bool enable)
{
m_automaticHighlightMode = enable;
if (!enable){
updateClosest(NULL);
}
updateClosestItemPoint();
}
bool StackedSet::automaticHighlightEnabled()
{
return m_automaticHighlightMode;
}
const OverlayItemInternal* StackedSet::getClosestItem()
{
return m_closest.item;
}
| {
"content_hash": "889b8af98b42ace9581192dcc8bf286c",
"timestamp": "",
"source": "github",
"line_count": 488,
"max_line_length": 78,
"avg_line_length": 29.868852459016395,
"alnum_prop": 0.6208836443468716,
"repo_name": "wayfinder/Wayfinder-CppCore-v2",
"id": "45549cdf258530de0b68ff87786d003d632fea36",
"size": "16104",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cpp/Targets/MapLib/Shared/src/StackedSet.cpp",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "7768867"
},
{
"name": "C++",
"bytes": "10721081"
},
{
"name": "Objective-C",
"bytes": "320116"
},
{
"name": "Shell",
"bytes": "7147"
}
],
"symlink_target": ""
} |
"""
Maximum cardinality matching in general graphs.
This module implements the general matching algorithm given in "The General
Maximum Matching Algorithm of Micali and Vazirani" by Paul A. Peterson and
Michael C. Loui, Algorithmica, 1988.
Many terms used in the code comments are explained in the paper by Peterson
and Loui. The paper could prove necessary in making sense of this code.
:filename matching.py
"""
# Authorship information
__author__ = "Alexander Soloviev"
__email__ = "[email protected]"
__date__ = "04/04/2015"
__all__ = [ 'max_cardinality_matching' ]
# Necessary imports
import structures
def max_cardinality_matching( G ):
"""Compute a maximum cardinality matching in a general graph G.
A matching is a subset of edges in which no node occurs more than once.
The cardinality of a matching is the number of matched edges.
The maximum matching is a matching of maximum cardinality.
:param G - the NetworkX graph given
Undirected graph.
:return mate - dictionary
The matching is returned as a dictionary such that
mate[v] == w if node v is matched to node w. Unmatched
nodes do not occur as a key in mate.
:notes
This function takes time O(sqrt(number_of_nodes) * number_of_edges).
This method is based on the "blossom" method for finding augmenting
paths.
:references
..[1] "The General Maximum Matching Algorithm of Micali and Vazirani",
Paul A. Peterson and Michael C. Loui, Algorithmica, 1988
"""
# Global variables for initializing node attributes
INFINITY = len( G ) + 1 # Odd and even level attribute value
UNERASED = False # Erase attribute value
ERASED = True
UNVISITED = False # Visit attribute value
VISITED = True
LEFT = -1 # Left and right attribute value
UNMARKED = 0
RIGHT = 1
# Global variables for initializing edge attributes
UNUSED = False
USED = True
class Bloom:
""" Representation of a bloom (a generalization of a blossom).
A blossom is a circuit of odd length, say 2k+1, that has k matched
edges. This class stores only the peak vertices and the base vertex
of the bloom.
"""
__slots__ = [ 'peaks', 'base' ]
class DfsInfo:
""" The information needed by the left and right depth first searches.
In calling leftDfs and rightDfs, the vertices could get updated or
modified. This class stores all of the parameters that could be
altered.
"""
def __init__(self, s, t, vL, vR, dcv, barrier):
self.s = s
self.t = t
self.vL = vL
self.vR = vR
self.dcv = dcv
self.barrier = barrier
# Get a list of vertices
gnodes = G.nodes()
if not gnodes:
return { } # Ignore empty graphs
# Initialize the top-level data structures for node attributes.
# Each of these is a dictionary indexed by the node.
nodeEvenLevel = { }
nodeOddLevel = { }
nodeBloom = { }
nodePredecessors = { }
nodeSuccessors = { }
nodeAnomalies = { }
nodeCount = { }
nodeErase = { }
nodeVisit = { }
nodeMark = { }
nodeParent = { }
# Path compression:
#nodeBaseStar = { }
# Initialize the top-level data structure for nodes marked
# left or right during the current call to augmentBlossom. If a
# bloom is found, these nodes will be a part of the bloom.
bloomNodes = [ ]
# Initialize the top-level data structure for candidates.
# Candidates is constructed so that candidates[i] contains all of the
# vertices to search at the current level i.
candidates = { }
# Initialize the top-level data structure for bridges.
# Bridges is constructed so that bridges[i] contains all bridges at
# level i. A bridge is an edge whose removal leaves a disconnected graph.
bridges = { }
# If v is a matched vertex, mate[v] is its partner vertex.
# If v is a single vertex, v does not occur as a key in mate.
# Initially all vertices are single and are updated during augmentation.
mate = { }
def search():
""" The search subroutine.
Find all augmenting paths of minimal length and increase the current
matching along these paths. Call the augmentBlossom function with
each bridge found.
:return augmented - True if matching was augmented, False otherwise
"""
i = 0 # Counter for the current level
# Insert each exposed vertex into candidates
for v in G.nodes_iter():
if v not in mate:
nodeEvenLevel[v] = 0
candidates[0].append( v )
# Perform a breadth-first search through each of the vertices.
# Continue iteration while candidates is not empty and no augmentation
# occurred at level i-1.
augmented = False
while (i < len( gnodes ) + 1) and not augmented:
if i % 2 == 0: # If level i is even
for v in candidates[i]:
# For each unerased and unmatched neighbor u of node v,
# determine whether the edge (u, v) is a bridge.
for u in G.neighbors_iter( v ):
if u == v: continue # Ignore self-loops
if mate.get(v) != u and nodeErase[u] == UNERASED:
assert mate.get(u) != v
if nodeEvenLevel[u] < INFINITY:
j = (nodeEvenLevel[u] + nodeEvenLevel[v]) / 2
bridges[j].add( tuple( sorted( [u, v] ) ) )
else:
if nodeOddLevel[u] == INFINITY:
nodeOddLevel[u] = i + 1
if nodeOddLevel[u] == i + 1:
nodeCount[u] += 1
nodePredecessors[u].append( v )
nodeSuccessors[v].append( u )
candidates[i + 1].append( u )
elif nodeOddLevel[u] < i:
nodeAnomalies[u].append( v )
else: # If level i is odd
for v in candidates[i]:
# For each node v in candidates such that v belongs to no bloom,
# determine whether the edge (u, v) is a bridge, where u is the
# mate of v.
if nodeBloom[v] == None:
u = mate[v]
if nodeOddLevel[u] < INFINITY:
j = (nodeOddLevel[u] + nodeOddLevel[v]) / 2
bridges[j].add( tuple( sorted( [u, v] ) ) )
elif nodeEvenLevel[u] == INFINITY:
nodePredecessors[u] = [v]
nodeSuccessors[v] = [u]
nodeCount[u] = 1
nodeEvenLevel[u] = i + 1
candidates[i + 1].append( u )
# Call augmentBlossom for each edge in bridges
for s, t in bridges[i]:
if nodeErase[s] == UNERASED and nodeErase[t] == UNERASED:
augmented = augmentBlossom(s, t, i)
i += 1 # Increment the level counter
return augmented
def augmentBlossom(s, t, i):
""" The augmentBlossom subroutine, or blossAug.
Either define a new blossom, discover that s and t are in the same
blossom, or find an augmenting path by using a double depth-first
search. Use the functions leftDfs, rightDfs, and erasePath. Upon
return, augmented is True if an augmenting path was found.
:param s - first node of a bridge
:param t - second node of a bridge
:param i - the current level
:return augmented - True if augmenting path was found, False otherwise
"""
# Boolean flags for whether a bloom was found or augmentation occurred
foundBloom = False
augmented = False
vL = baseStar(s) if nodeBloom[s] else s
vR = baseStar(t) if nodeBloom[t] else t
if vL == vR:
return False # Exit if s and t belong to same compressed bloom
# Set the parent nodes accordingly
if nodeBloom[s]:
nodeParent[vL] = s
if nodeBloom[t]:
nodeParent[vR] = t
# Mark vL left and vR right
nodeMark[vL] = LEFT
nodeMark[vR] = RIGHT
bloomNodes.append( vL )
bloomNodes.append( vR )
# DfsInfo stores information about s, t, vL, vR, dcv, and barrier vertices
dfsInfo = DfsInfo(s, t, vL, vR, None, vR)
# While a bloom has not been found and no augmentation has occurred,
# perform the double depth-first search.
while not foundBloom and not augmented:
# Get the levels of both vL and vR
if dfsInfo.vL == None or dfsInfo.vR == None: return False
level_vL = min(nodeEvenLevel[dfsInfo.vL], nodeOddLevel[dfsInfo.vL])
level_vR = min(nodeEvenLevel[dfsInfo.vR], nodeOddLevel[dfsInfo.vR])
# Increase the matching if vL and vR are both exposed
if dfsInfo.vL not in mate and dfsInfo.vR not in mate:
pathL = findPath(dfsInfo.s, dfsInfo.vL, None)
pathR = findPath(dfsInfo.t, dfsInfo.vR, None)
path = connectPath(pathL, pathR, dfsInfo.s, dfsInfo.t)
augmentMatching(dfsInfo.vL, dfsInfo.vR)
erasePath(path)
augmented = True
break
elif level_vL >= level_vR:
foundBloom = leftDfs( dfsInfo ) # Call leftDfs
else:
foundBloom = rightDfs( dfsInfo ) # Call rightDfs
# Create a new bloom if a bloom is found by the depth-first search.
if foundBloom and dfsInfo.dcv != None:
nodeMark[dfsInfo.dcv] = UNMARKED # Vertex dcv cannot be in the bloom
b = Bloom() # Create a new bloom
b.peaks = (dfsInfo.s, dfsInfo.t) # Assign it the peak vertices
b.base = dfsInfo.dcv # Assign it a base vertex
# Path compression
#baseStardcv = baseStar( dfsInfo.dcv )
#assert baseStardcv != None
# Put each vertex marked left or right during this call in the
# new bloom
for v in bloomNodes:
if nodeMark[v] == UNMARKED or nodeBloom[v] != None: continue # If no mark or bloom already defined, skip it
# Set the bloom attribute of the vertex
nodeBloom[v] = b
# Path compression
# Set the base* attribute of the vertex
#nodeBaseStar[v] = baseStardcv
level_v = min(nodeEvenLevel[v], nodeOddLevel[v])
if level_v % 2 == 0: # Check if v is outer
nodeOddLevel[v] = 2*i + 1 - nodeEvenLevel[v]
else: # Else v is inner
nodeEvenLevel[v] = 2*i + 1 - nodeOddLevel[v]
candidates[ nodeEvenLevel[v] ].append( v )
for z in nodeAnomalies[v]:
j = (nodeEvenLevel[v] + nodeEvenLevel[z]) / 2
bridges[j].add( tuple( sorted( [v, z] ) ) )
G[v][z]['use'] = USED
# Clear the bloomNodes list
del bloomNodes[:]
return augmented
def connectPath(pathL, pathR, s, t):
""" Connect two paths into a single path.
:param pathL - the left path given as a list
:param pathR - the right path given as a list
:param s - first node of a bridge
:param t - second node of a bridge
:return path - the combination of both paths
"""
reverseL = True if s == pathL[0] else False
reverseR = True if t == pathR[-1] else False
# Reverse the parent pointers of pathL
if reverseL:
nodeParent[ pathL[0] ] = None
prevv = None
currentv = pathL[-1]
nextv = None
while currentv != None:
nextv = nodeParent[currentv]
nodeParent[currentv] = prevv
prevv = currentv
currentv = nextv
# Reverse the list pathL
pathL.reverse()
# Reverse the parent pointers of pathR
if reverseR:
nodeParent[ pathR[0] ] = None
prevv = None
currentv = pathR[-1]
nextv = None
while currentv != None:
nextv = nodeParent[currentv]
nodeParent[currentv] = prevv
prevv = currentv
currentv = nextv
# Reverse the list pathR
pathR.reverse()
# Initialize the combined path
path = [ ]
path.extend( pathL )
path.extend( pathR )
# Connect the parent pointers of the path nodes
nodeParent[ pathR[0] ] = pathL[-1]
return path
def augmentMatching(lv, rv):
""" Augment the matching by the path from vertex lv to vertex rv.
:param lv - the left vertex
:param rv - the right vertex
"""
# Iterate through the path by following the parent pointers
firstv = rv
secondv = None
while firstv != lv:
# Get the parent node of firstv
secondv = nodeParent[firstv]
if mate.get(secondv) != firstv:
assert mate.get(firstv) != secondv
# Add the vertices to mate
mate[firstv] = secondv
mate[secondv] = firstv
firstv = secondv
def leftDfs(dfsInfo):
""" The leftDfs subroutine.
One step of the left depth-first search process. This step either
advances vL to a predecessor, backtracks, or signals the discovery
of a bloom.
:param dfsInfo - the information stored for the depth-first searches
:return bool - True if a bloom was found, False otherwise
"""
# Search through all unused and unerased predecessor edges of vL
for uL in nodePredecessors[dfsInfo.vL]:
# Skip the edge (vL, uL) if it is used or erased
if G[dfsInfo.vL][uL]['use'] == USED or nodeErase[uL] == ERASED:
continue
# Mark the edge (vL, uL) as used
G[dfsInfo.vL][uL]['use'] = USED
# If uL belongs to a bloom, set the bloombase of uL
if nodeBloom[uL]:
uL = baseStar(uL)
# If uL is unmarked, set its mark and exit
if nodeMark[uL] == UNMARKED:
nodeMark[uL] = LEFT
nodeParent[uL] = dfsInfo.vL
dfsInfo.vL = uL
bloomNodes.append( uL )
return False
# Otherwise if u is equal to vR, set the dcv equal to u
elif uL == dfsInfo.vR:
dfsInfo.dcv = uL
# If u has a mark, then leftDfs is backtracking
if dfsInfo.vL == dfsInfo.s:
return True # Signal discovery of a bloom
elif nodeParent[dfsInfo.vL] != None:
dfsInfo.vL = nodeParent[dfsInfo.vL] # Keep backtracking
return False
def rightDfs(dfsInfo):
""" The rightDfs subroutine.
One step of the right depth-first search process. This step either
advances vR to a predecessor or backtracks, or signals the discovery
of a bloom.
:param dfsInfo - the information stored for the depth-first searches
:return bool - True if a bloom was found, False otherwise
"""
# Search through all unused and unerased predecessor edges of vR
for uR in nodePredecessors[dfsInfo.vR]:
# Skip the edge (vR, uR) if it is used or erased
if G[dfsInfo.vR][uR]['use'] == USED or nodeErase[uR] == ERASED:
continue
# Mark the edge (vR, uR) as used
G[dfsInfo.vR][uR]['use'] = USED
# If uR belongs to a bloom, set the bloombase of uR
if nodeBloom[uR]:
uR = baseStar(uR)
# If u is unmarked, set its mark and exit
if nodeMark[uR] == UNMARKED:
nodeMark[uR] = RIGHT
nodeParent[uR] = dfsInfo.vR
dfsInfo.vR = uR
bloomNodes.append( uR )
return False
# Otherwise if u is equal to vL, set the dcv equal to u
elif uR == dfsInfo.vL:
dfsInfo.dcv = uR
# The vertex vR has no more unused predecessor edges
if dfsInfo.vR == dfsInfo.barrier:
dfsInfo.vR = dfsInfo.dcv
dfsInfo.barrier = dfsInfo.dcv
nodeMark[dfsInfo.vR] = RIGHT
if nodeParent[dfsInfo.vL] != None:
dfsInfo.vL = nodeParent[dfsInfo.vL] # Force leftDfs to backtrack from vL = dcv
elif nodeParent[dfsInfo.vR] != None:
dfsInfo.vR = nodeParent[dfsInfo.vR] # Keep backtracking
return False
def erasePath(path):
""" The erasePath subroutine (erase).
Set the erase attribute for all vertices in the input path to
erased. Once all predecessors of a vertex have been erased, the
vertex itself is erased too.
:param path - the list of vertices to be erased
"""
# While there are vertices left in the path
while path:
# Get a vertex from the path
y = path.pop()
nodeErase[y] = ERASED
# Iterate through each of its successors
for z in nodeSuccessors[y]:
if nodeErase[z] == UNERASED:
nodeCount[z] -= 1
# If the successor is unerased, add it to the path
if nodeCount[z] == 0:
path.append( z )
def findPath(high, low, b):
""" The findPath subroutine.
Find an alternating path from vertex high to vertex low through
the predecessor vertices. Note that the level of high is greater
or equal to the level of low. Call openBloom to find paths through
blooms other than bloom b.
:param high - the high vertex
:param low - the low vertex
:param b - the bloom given
:return path - the alternating path found
"""
# Determine the level of the vertices high and low
level_high = min(nodeEvenLevel[high], nodeOddLevel[high])
level_low = min(nodeEvenLevel[low], nodeOddLevel[low])
assert level_high >= level_low
# If the vertices are equivalent, return a single node path
if high == low:
return [high]
# Initialize the alternating path
path = [ ]
# Perform a depth-first search to find the vertex low from the vertex high
v = high
u = high
while u != low:
# Check whether v has unvisited predecessor edges
hasUnvisitedPredecessor = False
for p in nodePredecessors[v]:
# Break if the edge (p, v) is unvisited
if G[p][v]['visit'] == UNVISITED:
hasUnvisitedPredecessor = True
# Check whether vertex v belongs to a bloom, set u accordingly
if nodeBloom[v] == None or nodeBloom[v] == b:
G[p][v]['visit'] = VISITED
u = p
else:
u = nodeBloom[v].base
break
# There are no unvisited predecessor edges, so backtrack
if not hasUnvisitedPredecessor:
assert nodeParent[v] != None
v = nodeParent[v]
else:
# Get the level of node u
level_u = min(nodeEvenLevel[u], nodeOddLevel[u])
# Mark u visited and set the parent pointers
if nodeErase[u] == UNERASED and level_u >= level_low \
and ( u == low or ( nodeVisit[u] == UNVISITED \
and ( nodeMark[u] == nodeMark[high] != UNMARKED \
or ( nodeBloom[u] != None and nodeBloom[u] != b ) ) ) ):
nodeVisit[u] = VISITED
nodeParent[u] = v
v = u
# Compute the path
while u != high:
path.append(u)
u = nodeParent[u]
path.append( u )
path.reverse()
# The path has been found, except for blooms other than bloom b
# These blooms must be opened using openBloom
j = 0
while j < len(path) - 1:
xj = path[j]
# Replace the part of the path by the output of openBloom
if nodeBloom[xj] != None and nodeBloom[xj] != b:
nodeVisit[xj] = UNVISITED
path[j : j + 2], pathLength = openBloom( xj )
nodeParent[ xj ] = path[j - 1] if j > 0 else None
j += pathLength - 1
j += 1
return path
def openBloom(x):
""" The openBloom subroutine (open).
Return an alternating path from vertex x through the bloom of
x to the base of the bloom. Call findPath to get this alternating
path.
:param x - the vertex given
:return path - the alternating path through the bloom
"""
# Get the bloom that vertex x corresponds to
bloom = nodeBloom[x]
base = bloom.base
level_x = min(nodeEvenLevel[x], nodeOddLevel[x])
path = [ ]
if level_x % 2 == 0: # If x is outer
path = findPath(x, base, bloom)
else: # Else x is inner
# Get the peaks of the bloom
(leftPeak, rightPeak) = bloom.peaks
if nodeMark[x] == LEFT: # If x is marked left
pathLeft = findPath(leftPeak, x, bloom)
pathRight = findPath(rightPeak, base, bloom)
path = connectPath(pathLeft, pathRight, leftPeak, rightPeak)
elif nodeMark[x] == RIGHT: # Else x is marked right
pathLeft = findPath(rightPeak, x, bloom)
pathRight = findPath(leftPeak, base, bloom)
path = connectPath(pathLeft, pathRight, rightPeak, leftPeak)
return ( path, len(path) )
def baseStar(v):
""" The base* function.
Return the base* of the vertex v and compress the path
traversed. This has the effect of shrinking a bloom into its
base*.
:param v - the vertex given
:return base - the base* of v
"""
base = v
while nodeBloom[base] != None:
assert nodeBloom[base].base != base
base = nodeBloom[base].base
# Path compression:
#while nodeBaseStar[n] != None:
# n = nodeBaseStar[n]
#while v != n:
# vNext = nodeBaseStar[v]
# nodeBaseStar[v] = n
# v = vNext
return base
# Main loop: continue iteration until no further augmentation is possible.
augmented = True
while augmented:
# Initialize/reset the nodes
for v in G.nodes_iter():
nodeEvenLevel[v] = INFINITY
nodeOddLevel[v] = INFINITY
nodeBloom[v] = None
nodePredecessors[v] = [ ]
nodeSuccessors[v] = [ ]
nodeAnomalies[v] = [ ]
nodeCount[v] = 0
nodeErase[v] = UNERASED
nodeVisit[v] = UNVISITED
nodeMark[v] = UNMARKED
nodeParent[v] = None
# Path compression
#nodeBaseStar[v] = None
# Initialize/reset the edges
for u, v, d in G.edges_iter( data=True ):
if u == v: continue # Ignore self-loops
d['use'] = UNUSED
d['visit'] = UNVISITED
# Initialize/reset the candidates and bridges
for i in range( len( gnodes ) + 1 ):
candidates[i] = [ ]
bridges[i] = structures.OrderedSet()
# Call the search subroutine
augmented = search()
# Paranoia check that the matching is symmetric
for v in mate:
assert mate[ mate[v] ] == v
# Delete edge attributes from graph G
for u, v, d in G.edges_iter( data=True ):
if u == v: continue # Ignore self-loops
del d['use']
del d['visit']
return mate
#end | {
"content_hash": "bc9bfed16d142bd439f98a724bfcc0d1",
"timestamp": "",
"source": "github",
"line_count": 702,
"max_line_length": 123,
"avg_line_length": 38.13247863247863,
"alnum_prop": 0.5050244686017408,
"repo_name": "AlexanderSoloviev/mv-matching",
"id": "b41e3290386625dea1cf5827618413c1ca75606f",
"size": "26792",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "matching/matching.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "82960"
}
],
"symlink_target": ""
} |
<?php
namespace VerticalResponse\Client;
use Psr\Http\Message\ResponseInterface;
class HttpException extends Exception
{
/**
* HttpException constructor.
* @param ResponseInterface $response
* @param Exception|null $previous
*/
public function __construct(ResponseInterface $response, Exception $previous = null)
{
$message = 'HTTP Error communicating with API: '.$response->getStatusCode();
parent::__construct($message, $response, $response->getStatusCode(), $previous);
}
}
| {
"content_hash": "f5187443f6e511c65ee1db91e01a31a6",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 88,
"avg_line_length": 28.157894736842106,
"alnum_prop": 0.685981308411215,
"repo_name": "navarr/verticalresponse",
"id": "1c513d2f678b6e3541bb47883b9b1a2fe1ba49d0",
"size": "535",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Client/HttpException.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "6527"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "62918ca33437ddf51df4c163a9f4481a",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "03e242c168fb6fe01f7da935dca54f7f9cde5647",
"size": "171",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Brassicales/Brassicaceae/Sinapis/Sinapis pusilla/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
import {
SchematicTestRunner,
UnitTestTree,
} from '@angular-devkit/schematics/testing';
import { getFileContent } from '@schematics/angular/utility/test';
import * as path from 'path';
import { Schema as StoreDevtoolsOptions } from './schema';
import {
getTestProjectPath,
createWorkspace,
} from '@ngrx/schematics-core/testing';
describe('Store-Devtools ng-add Schematic', () => {
const schematicRunner = new SchematicTestRunner(
'@ngrx/store-devtools',
path.join(__dirname, '../collection.json')
);
const defaultOptions: StoreDevtoolsOptions = {
skipPackageJson: false,
project: 'bar',
module: 'app',
};
const projectPath = getTestProjectPath();
let appTree: UnitTestTree;
beforeEach(async () => {
appTree = await createWorkspace(schematicRunner, appTree);
});
it('should update package.json', async () => {
const options = { ...defaultOptions };
const tree = await schematicRunner
.runSchematicAsync('ng-add', options, appTree)
.toPromise();
const packageJson = JSON.parse(tree.readContent('/package.json'));
expect(packageJson.dependencies['@ngrx/store-devtools']).toBeDefined();
});
it('should skip package.json update', async () => {
const options = { ...defaultOptions, skipPackageJson: true };
const tree = await schematicRunner
.runSchematicAsync('ng-add', options, appTree)
.toPromise();
const packageJson = JSON.parse(tree.readContent('/package.json'));
expect(packageJson.dependencies['@ngrx/store-devtools']).toBeUndefined();
});
it('should be provided by default', async () => {
const options = { ...defaultOptions };
const tree = await schematicRunner
.runSchematicAsync('ng-add', options, appTree)
.toPromise();
const content = tree.readContent(`${projectPath}/src/app/app.module.ts`);
expect(content).toMatch(
/import { StoreDevtoolsModule } from '@ngrx\/store-devtools';/
);
expect(content).toMatch(
/StoreDevtoolsModule.instrument\({ maxAge: 25, logOnly: environment.production }\)/
);
});
it('should import into a specified module', async () => {
const options = { ...defaultOptions };
const tree = await schematicRunner
.runSchematicAsync('ng-add', options, appTree)
.toPromise();
const content = tree.readContent(`${projectPath}/src/app/app.module.ts`);
expect(content).toMatch(
/import { StoreDevtoolsModule } from '@ngrx\/store-devtools';/
);
});
it('should import the environments correctly', async () => {
const options = { ...defaultOptions, module: 'app.module.ts' };
const tree = await schematicRunner
.runSchematicAsync('ng-add', options, appTree)
.toPromise();
const content = tree.readContent(`${projectPath}/src/app/app.module.ts`);
expect(content).toMatch(
/import { environment } from '..\/environments\/environment';/
);
});
it('should fail if specified module does not exist', async () => {
const options = { ...defaultOptions, module: '/src/app/app.moduleXXX.ts' };
let thrownError: Error | null = null;
try {
await schematicRunner.runSchematicAsync('ng-add', options, appTree);
} catch (err) {
thrownError = err;
}
expect(thrownError).toBeDefined();
});
it('should fail if negative maxAges', async () => {
const options = { ...defaultOptions, maxAge: -4 };
let thrownError: Error | null = null;
try {
await schematicRunner.runSchematicAsync('ng-add', options, appTree);
} catch (err) {
thrownError = err;
}
expect(thrownError).toBeDefined();
});
it('should fail if maxAge of 1', async () => {
const options = { ...defaultOptions, maxAge: -4 };
let thrownError: Error | null = null;
try {
await schematicRunner.runSchematicAsync('ng-add', options, appTree);
} catch (err) {
thrownError = err;
}
expect(thrownError).toBeDefined();
});
it('should support a custom maxAge', async () => {
const options = {
...defaultOptions,
name: 'State',
maxAge: 5,
};
const tree = await schematicRunner
.runSchematicAsync('ng-add', options, appTree)
.toPromise();
const content = tree.readContent(`${projectPath}/src/app/app.module.ts`);
expect(content).toMatch(/maxAge: 5/);
});
});
| {
"content_hash": "eecd7f8100adecb42dd077ac3fac4bba",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 89,
"avg_line_length": 30.886524822695037,
"alnum_prop": 0.6473019517795637,
"repo_name": "brandonroberts/platform",
"id": "5f6c1c3b56cab423322b408504be8f105c34de6c",
"size": "4355",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/store-devtools/schematics/ng-add/index.spec.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "85301"
},
{
"name": "Dockerfile",
"bytes": "255"
},
{
"name": "HTML",
"bytes": "68906"
},
{
"name": "JavaScript",
"bytes": "376131"
},
{
"name": "Shell",
"bytes": "12950"
},
{
"name": "Starlark",
"bytes": "32977"
},
{
"name": "TypeScript",
"bytes": "2492617"
}
],
"symlink_target": ""
} |
/*
* MinIO Cloud Storage, (C) 2019-2020 MinIO, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package madmin
import (
"context"
"math/rand"
"net"
"net/http"
"net/url"
"strings"
"sync"
"time"
)
// MaxRetry is the maximum number of retries before stopping.
var MaxRetry = 10
// MaxJitter will randomize over the full exponential backoff time
const MaxJitter = 1.0
// NoJitter disables the use of jitter for randomizing the exponential backoff time
const NoJitter = 0.0
// DefaultRetryUnit - default unit multiplicative per retry.
// defaults to 1 second.
const DefaultRetryUnit = time.Second
// DefaultRetryCap - Each retry attempt never waits no longer than
// this maximum time duration.
const DefaultRetryCap = time.Second * 30
// lockedRandSource provides protected rand source, implements rand.Source interface.
type lockedRandSource struct {
lk sync.Mutex
src rand.Source
}
// Int63 returns a non-negative pseudo-random 63-bit integer as an int64.
func (r *lockedRandSource) Int63() (n int64) {
r.lk.Lock()
n = r.src.Int63()
r.lk.Unlock()
return
}
// Seed uses the provided seed value to initialize the generator to a
// deterministic state.
func (r *lockedRandSource) Seed(seed int64) {
r.lk.Lock()
r.src.Seed(seed)
r.lk.Unlock()
}
// newRetryTimer creates a timer with exponentially increasing
// delays until the maximum retry attempts are reached.
func (adm AdminClient) newRetryTimer(ctx context.Context, maxRetry int, unit time.Duration, cap time.Duration, jitter float64) <-chan int {
attemptCh := make(chan int)
// computes the exponential backoff duration according to
// https://www.awsarchitectureblog.com/2015/03/backoff.html
exponentialBackoffWait := func(attempt int) time.Duration {
// normalize jitter to the range [0, 1.0]
if jitter < NoJitter {
jitter = NoJitter
}
if jitter > MaxJitter {
jitter = MaxJitter
}
//sleep = random_between(0, min(cap, base * 2 ** attempt))
sleep := unit * time.Duration(1<<uint(attempt))
if sleep > cap {
sleep = cap
}
if jitter != NoJitter {
sleep -= time.Duration(adm.random.Float64() * float64(sleep) * jitter)
}
return sleep
}
go func() {
defer close(attemptCh)
for i := 0; i < maxRetry; i++ {
// Attempts start from 1.
select {
case attemptCh <- i + 1:
case <-ctx.Done():
// Stop the routine.
return
}
select {
case <-time.After(exponentialBackoffWait(i)):
case <-ctx.Done():
// Stop the routine.
return
}
}
}()
return attemptCh
}
// isHTTPReqErrorRetryable - is http requests error retryable, such
// as i/o timeout, connection broken etc..
func isHTTPReqErrorRetryable(err error) bool {
if err == nil {
return false
}
switch e := err.(type) {
case *url.Error:
switch e.Err.(type) {
case *net.DNSError, *net.OpError, net.UnknownNetworkError:
return true
}
if strings.Contains(err.Error(), "Connection closed by foreign host") {
return true
} else if strings.Contains(err.Error(), "net/http: TLS handshake timeout") {
// If error is - tlsHandshakeTimeoutError, retry.
return true
} else if strings.Contains(err.Error(), "i/o timeout") {
// If error is - tcp timeoutError, retry.
return true
} else if strings.Contains(err.Error(), "connection timed out") {
// If err is a net.Dial timeout, retry.
return true
} else if strings.Contains(err.Error(), "net/http: HTTP/1.x transport connection broken") {
// If error is transport connection broken, retry.
return true
}
}
return false
}
// List of AWS S3 error codes which are retryable.
var retryableS3Codes = map[string]struct{}{
"RequestError": {},
"RequestTimeout": {},
"Throttling": {},
"ThrottlingException": {},
"RequestLimitExceeded": {},
"RequestThrottled": {},
"InternalError": {},
"SlowDown": {},
// Add more AWS S3 codes here.
}
// isS3CodeRetryable - is s3 error code retryable.
func isS3CodeRetryable(s3Code string) (ok bool) {
_, ok = retryableS3Codes[s3Code]
return ok
}
// List of HTTP status codes which are retryable.
var retryableHTTPStatusCodes = map[int]struct{}{
http.StatusRequestTimeout: {},
http.StatusTooManyRequests: {},
http.StatusInternalServerError: {},
http.StatusBadGateway: {},
http.StatusServiceUnavailable: {},
// Add more HTTP status codes here.
}
// isHTTPStatusRetryable - is HTTP error code retryable.
func isHTTPStatusRetryable(httpStatusCode int) (ok bool) {
_, ok = retryableHTTPStatusCodes[httpStatusCode]
return ok
}
| {
"content_hash": "a78cd9c2f7d14733492ec7bcb36ccc5d",
"timestamp": "",
"source": "github",
"line_count": 183,
"max_line_length": 139,
"avg_line_length": 27.65573770491803,
"alnum_prop": 0.6953171310017783,
"repo_name": "fwessels/minio",
"id": "0f6ff9c610c4b8fd49df04760ef641dcee2d0083",
"size": "5061",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pkg/madmin/retry.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "51736"
},
{
"name": "Dockerfile",
"bytes": "941"
},
{
"name": "Go",
"bytes": "4795943"
},
{
"name": "HTML",
"bytes": "2242"
},
{
"name": "JavaScript",
"bytes": "277190"
},
{
"name": "Makefile",
"bytes": "3797"
},
{
"name": "Shell",
"bytes": "41050"
}
],
"symlink_target": ""
} |
// This file has been auto-generated by code_generator_v8.py. DO NOT MODIFY!
#ifndef MediaKeySystemMediaCapability_h
#define MediaKeySystemMediaCapability_h
#include "modules/ModulesExport.h"
#include "platform/heap/Handle.h"
#include "wtf/text/WTFString.h"
namespace blink {
class MODULES_EXPORT MediaKeySystemMediaCapability {
ALLOW_ONLY_INLINE_ALLOCATION();
public:
MediaKeySystemMediaCapability();
bool hasContentType() const { return !m_contentType.isNull(); }
String contentType() const { return m_contentType; }
void setContentType(String value) { m_contentType = value; }
bool hasRobustness() const { return !m_robustness.isNull(); }
String robustness() const { return m_robustness; }
void setRobustness(String value) { m_robustness = value; }
DECLARE_VIRTUAL_TRACE();
private:
String m_contentType;
String m_robustness;
friend class V8MediaKeySystemMediaCapability;
};
} // namespace blink
#endif // MediaKeySystemMediaCapability_h
| {
"content_hash": "2a0d7398d625e55131b9de4dec18536b",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 76,
"avg_line_length": 28.054054054054053,
"alnum_prop": 0.7138728323699421,
"repo_name": "weolar/miniblink49",
"id": "1fd792adedde4b91fc105f23bb6a2840eb7840a0",
"size": "1206",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "gen/blink/modules/encryptedmedia/MediaKeySystemMediaCapability.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "11324372"
},
{
"name": "Batchfile",
"bytes": "52488"
},
{
"name": "C",
"bytes": "32157305"
},
{
"name": "C++",
"bytes": "280103993"
},
{
"name": "CMake",
"bytes": "88548"
},
{
"name": "CSS",
"bytes": "20839"
},
{
"name": "DIGITAL Command Language",
"bytes": "226954"
},
{
"name": "HTML",
"bytes": "202637"
},
{
"name": "JavaScript",
"bytes": "32539485"
},
{
"name": "Lua",
"bytes": "32432"
},
{
"name": "M4",
"bytes": "125191"
},
{
"name": "Makefile",
"bytes": "1517330"
},
{
"name": "NASL",
"bytes": "42"
},
{
"name": "Objective-C",
"bytes": "5320"
},
{
"name": "Objective-C++",
"bytes": "35037"
},
{
"name": "POV-Ray SDL",
"bytes": "307541"
},
{
"name": "Perl",
"bytes": "3283676"
},
{
"name": "Prolog",
"bytes": "29177"
},
{
"name": "Python",
"bytes": "4331616"
},
{
"name": "R",
"bytes": "10248"
},
{
"name": "Scheme",
"bytes": "25457"
},
{
"name": "Shell",
"bytes": "264021"
},
{
"name": "TypeScript",
"bytes": "166033"
},
{
"name": "Vim Script",
"bytes": "11362"
},
{
"name": "XS",
"bytes": "4319"
},
{
"name": "eC",
"bytes": "4383"
}
],
"symlink_target": ""
} |
package net.orange_box.storebox.example.proguard;
import net.orange_box.storebox.annotations.method.ClearMethod;
import net.orange_box.storebox.annotations.method.DefaultValue;
import net.orange_box.storebox.annotations.method.KeyByResource;
import net.orange_box.storebox.annotations.method.KeyByString;
import net.orange_box.storebox.annotations.method.RegisterChangeListenerMethod;
import net.orange_box.storebox.annotations.method.UnregisterChangeListenerMethod;
import net.orange_box.storebox.annotations.type.ActivityPreferences;
import net.orange_box.storebox.listeners.OnPreferenceValueChangedListener;
@ActivityPreferences
interface Preferences {
@KeyByString("int")
@DefaultValue(R.integer.default_int)
int getInt();
@KeyByResource(R.string.key_int)
void setInt(int value);
@RegisterChangeListenerMethod
@KeyByString("int")
void regIntListener(OnPreferenceValueChangedListener<Integer> listener);
@UnregisterChangeListenerMethod
@KeyByResource(R.string.key_int)
void unregIntListener(OnPreferenceValueChangedListener<Integer> listener);
@ClearMethod
void clear();
}
| {
"content_hash": "ed2bcb8d50d729b4ca07361383ffb6f8",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 81,
"avg_line_length": 36.09375,
"alnum_prop": 0.7948051948051948,
"repo_name": "martino2k6/StoreBox",
"id": "f83d9a91d5ae8636d0acbeba6c53a8e330847d2c",
"size": "1155",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/proguard/src/main/java/net/orange_box/storebox/example/proguard/Preferences.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "154972"
},
{
"name": "Kotlin",
"bytes": "3120"
}
],
"symlink_target": ""
} |
Refactor a tree building algorithm.
Some web-forums have a tree layout, so posts are presented as a tree. However
the posts are typically stored in a database as an unsorted set of records. Thus
when presenting the posts to the user the tree structure has to be
reconstructed.
Your job will be to refactor a working but slow and ugly piece of code that
implements the tree building logic for highly abstracted records. The records
only contain an ID number and a parent ID number. The ID number is always
between 0 (inclusive) and the length of the record list (exclusive). All records
have a parent ID lower than their own ID, except for the root record, which has
a parent ID that's equal to its own ID.
An example tree:
<pre>
root (ID: 0, parent ID: 0)
|-- child1 (ID: 1, parent ID: 0)
| |-- grandchild1 (ID: 2, parent ID: 1)
| +-- grandchild2 (ID: 4, parent ID: 1)
+-- child2 (ID: 3, parent ID: 0)
| +-- grandchild3 (ID: 6, parent ID: 3)
+-- child3 (ID: 5, parent ID: 0)
</pre>
## Running the tests
To run the tests run the command `go test` from within the exercise directory.
If the test suite contains benchmarks, you can run these with the `-bench`
flag:
go test -bench .
Keep in mind that each reviewer will run benchmarks on a different machine, with
different specs, so the results from these benchmark tests may vary.
## Further information
For more detailed information about the Go track, including how to get help if
you're having trouble, please visit the exercism.io [Go language page](http://exercism.io/languages/go/about).
## Submitting Incomplete Solutions
It's possible to submit an incomplete solution so you can see how others have completed the exercise.
| {
"content_hash": "2a22dc51ecc5db0912d09fc93c0d7fed",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 110,
"avg_line_length": 37.108695652173914,
"alnum_prop": 0.7445811364967779,
"repo_name": "robphoenix/exercism-go",
"id": "65e6229e8b6c2d91e0046e10625aacbc8f3bcdb4",
"size": "1724",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "exercises/tree-building/README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "463409"
},
{
"name": "Shell",
"bytes": "2482"
}
],
"symlink_target": ""
} |
<?xml version='1.0' encoding='UTF-8'?>
<TEI xmlns="http://www.tei-c.org/ns/1.0">
<teiHeader>
<fileDesc>
<titleStmt>
<title>Description of Free Library of Philadelphia, Lewis T277: Grant</title>
<respStmt>
<resp>contributor</resp>
<persName>David Kalish</persName>
</respStmt>
<respStmt>
<resp>cataloger</resp>
<persName>Dot Porter</persName>
</respStmt>
<funder>Council on Library and Information Resources</funder>
</titleStmt>
<publicationStmt>
<publisher>Free Library of Philadelphia</publisher>
<availability>
<licence target="http://creativecommons.org/publicdomain/mark/1.0/">These images and the content of Free Library of Philadelphia Lewis T277: Grant are free of known copyright restrictions and in the public domain. See the Creative Commons Public Domain Mark page for usage details, http://creativecommons.org/publicdomain/mark/1.0/.</licence>
<licence target="https://creativecommons.org/publicdomain/zero/1.0/legalcode">To the extent possible under law, Free Library of Philadelphia, Special Collections has waived all copyright and related or neighboring rights to this metadata about Free Library of Philadelphia Lewis T277: Grant. This work is published from: United States. For a summary of CC0, see https://creativecommons.org/publicdomain/zero/1.0/. Legal code: https://creativecommons.org/publicdomain/zero/1.0/legalcode.</licence>
</availability>
</publicationStmt>
<sourceDesc>
<msDesc>
<msIdentifier>
<country>United States</country>
<settlement>Philadelphia</settlement>
<repository>Free Library of Philadelphia</repository>
<collection>John Frederick Lewis Collection of European Manuscripts</collection>
<idno type="call-number">Lewis T277</idno>
</msIdentifier>
<msContents>
<summary/>
<textLang mainLang="lat">Latin</textLang>
<msItem>
<title>Grant</title>
</msItem>
</msContents>
<physDesc>
<objectDesc>
<supportDesc material="parchment">
<support>
<p>Parchment</p>
</support>
</supportDesc>
</objectDesc>
</physDesc>
<history>
<origin>
<origDate notBefore="1000" notAfter="1500"/>
<p>Undated</p>
<origPlace>France</origPlace>
</origin>
</history>
</msDesc>
</sourceDesc>
</fileDesc>
<encodingDesc>
<classDecl>
<taxonomy xml:id="keywords">
<category xml:id="keyword_1">
<catDesc>Book Type</catDesc>
<category xml:id="keyword_1.2">
<catDesc>Accounts</catDesc>
</category>
<category xml:id="keyword_1.3">
<catDesc>Antiphonary</catDesc>
</category>
<category xml:id="keyword_1.4">
<catDesc>Armorial</catDesc>
</category>
<category xml:id="keyword_1.5">
<catDesc>Bible</catDesc>
</category>
<category xml:id="keyword_1.6">
<catDesc>Biography</catDesc>
</category>
<category xml:id="keyword_1.7">
<catDesc>Book of Hours</catDesc>
</category>
<category xml:id="keyword_1.8">
<catDesc>Breviary</catDesc>
</category>
<category xml:id="keyword_1.9">
<catDesc>Chronicle</catDesc>
</category>
<category xml:id="keyword_1.10">
<catDesc>Collection of Sermons</catDesc>
</category>
<category xml:id="keyword_1.11">
<catDesc>Commentary</catDesc>
</category>
<category xml:id="keyword_1.12">
<catDesc>Dictionary</catDesc>
</category>
<category xml:id="keyword_1.13">
<catDesc>Document</catDesc>
</category>
<category xml:id="keyword_1.14">
<catDesc>Dogale</catDesc>
</category>
<category xml:id="keyword_1.15">
<catDesc>Friendship book</catDesc>
</category>
<category xml:id="keyword_1.16">
<catDesc>Genealogy</catDesc>
</category>
<category xml:id="keyword_1.17">
<catDesc>Glossary</catDesc>
</category>
<category xml:id="keyword_1.18">
<catDesc>Gospel Lectionary</catDesc>
</category>
<category xml:id="keyword_1.19">
<catDesc>Gospels</catDesc>
</category>
<category xml:id="keyword_1.20">
<catDesc>Gradual</catDesc>
</category>
<category xml:id="keyword_1.22">
<catDesc>Haggadah</catDesc>
</category>
<category xml:id="keyword_1.23">
<catDesc>Homilary</catDesc>
</category>
<category xml:id="keyword_1.24">
<catDesc>Horologion</catDesc>
</category>
<category xml:id="keyword_1.25">
<catDesc>Hymnal</catDesc>
</category>
<category xml:id="keyword_1.26">
<catDesc>Koran</catDesc>
</category>
<category xml:id="keyword_1.27">
<catDesc>Lectionary</catDesc>
</category>
<category xml:id="keyword_1.28">
<catDesc>Mass book</catDesc>
</category>
<category xml:id="keyword_1.29">
<catDesc>Miscellany</catDesc>
</category>
<category xml:id="keyword_1.30">
<catDesc>Missal</catDesc>
</category>
<category xml:id="keyword_1.31">
<catDesc>Pontifical</catDesc>
</category>
<category xml:id="keyword_1.32">
<catDesc>Portolan chart</catDesc>
</category>
<category xml:id="keyword_1.33">
<catDesc>Prayer book</catDesc>
</category>
<category xml:id="keyword_1.34">
<catDesc>Private devotional text</catDesc>
</category>
<category xml:id="keyword_1.35">
<catDesc>Processional</catDesc>
</category>
<category xml:id="keyword_1.36">
<catDesc>Protective scroll</catDesc>
</category>
<category xml:id="keyword_1.37">
<catDesc>Psalter</catDesc>
</category>
<category xml:id="keyword_1.38">
<catDesc>Psalter-Hours</catDesc>
</category>
<category xml:id="keyword_1.39">
<catDesc>Qur`an</catDesc>
</category>
<category xml:id="keyword_1.40">
<catDesc>Recipe book</catDesc>
</category>
<category xml:id="keyword_1.41">
<catDesc>Saint's Life</catDesc>
</category>
<category xml:id="keyword_1.42">
<catDesc>Scribal manual</catDesc>
</category>
<category xml:id="keyword_1.43">
<catDesc>Song book</catDesc>
</category>
<category xml:id="keyword_1.44">
<catDesc>Torah</catDesc>
</category>
<category xml:id="keyword_1.45">
<catDesc>Treatise</catDesc>
</category>
</category>
<category xml:id="keyword_2">
<catDesc>Century</catDesc>
<category xml:id="keyword_2.2">
<catDesc>6th century</catDesc>
</category>
<category xml:id="keyword_2.3">
<catDesc>7th century</catDesc>
</category>
<category xml:id="keyword_2.4">
<catDesc>8th century</catDesc>
</category>
<category xml:id="keyword_2.5">
<catDesc>9th century</catDesc>
</category>
<category xml:id="keyword_2.6">
<catDesc>10th century</catDesc>
</category>
<category xml:id="keyword_2.7">
<catDesc>11th century</catDesc>
</category>
<category xml:id="keyword_2.8">
<catDesc>12th century</catDesc>
</category>
<category xml:id="keyword_2.9">
<catDesc>13th century</catDesc>
</category>
<category xml:id="keyword_2.10">
<catDesc>14th century</catDesc>
</category>
<category xml:id="keyword_2.11">
<catDesc>15th century</catDesc>
</category>
<category xml:id="keyword_2.12">
<catDesc>16th century</catDesc>
</category>
<category xml:id="keyword_2.13">
<catDesc>17th century</catDesc>
</category>
<category xml:id="keyword_2.14">
<catDesc>18th century</catDesc>
</category>
<category xml:id="keyword_2.15">
<catDesc>19th century</catDesc>
</category>
<category xml:id="keyword_2.16">
<catDesc>20th century</catDesc>
</category>
</category>
<category xml:id="keyword_3">
<catDesc>Culture</catDesc>
<category xml:id="keyword_3.2">
<catDesc>Abbasid</catDesc>
</category>
<category xml:id="keyword_3.3">
<catDesc>Arabic</catDesc>
</category>
<category xml:id="keyword_3.4">
<catDesc>Armenian</catDesc>
</category>
<category xml:id="keyword_3.5">
<catDesc>Ashkenazi</catDesc>
</category>
<category xml:id="keyword_3.6">
<catDesc>Austrian</catDesc>
</category>
<category xml:id="keyword_3.7">
<catDesc>Byzantine</catDesc>
</category>
<category xml:id="keyword_3.8">
<catDesc>Carolingian</catDesc>
</category>
<category xml:id="keyword_3.9">
<catDesc>Chinese</catDesc>
</category>
<category xml:id="keyword_3.10">
<catDesc>Christian</catDesc>
</category>
<category xml:id="keyword_3.11">
<catDesc>Coptic</catDesc>
</category>
<category xml:id="keyword_3.12">
<catDesc>Dutch</catDesc>
</category>
<category xml:id="keyword_3.13">
<catDesc>Egyptian</catDesc>
</category>
<category xml:id="keyword_3.14">
<catDesc>English</catDesc>
</category>
<category xml:id="keyword_3.15">
<catDesc>Ethiopian</catDesc>
</category>
<category xml:id="keyword_3.16">
<catDesc>Fatimid</catDesc>
</category>
<category xml:id="keyword_3.17">
<catDesc>Flemish</catDesc>
</category>
<category xml:id="keyword_3.18">
<catDesc>French</catDesc>
</category>
<category xml:id="keyword_3.19">
<catDesc>German</catDesc>
</category>
<category xml:id="keyword_3.20">
<catDesc>Indian</catDesc>
</category>
<category xml:id="keyword_3.21">
<catDesc>Islamic</catDesc>
</category>
<category xml:id="keyword_3.22">
<catDesc>Italian</catDesc>
</category>
<category xml:id="keyword_3.23">
<catDesc>Jewish</catDesc>
</category>
<category xml:id="keyword_3.25">
<catDesc>Mamluk</catDesc>
</category>
<category xml:id="keyword_3.26">
<catDesc>Mughal</catDesc>
</category>
<category xml:id="keyword_3.27">
<catDesc>Ottoman</catDesc>
</category>
<category xml:id="keyword_3.28">
<catDesc>Ottonian</catDesc>
</category>
<category xml:id="keyword_3.29">
<catDesc>Persian</catDesc>
</category>
<category xml:id="keyword_3.30">
<catDesc>Portuguese</catDesc>
</category>
<category xml:id="keyword_3.31">
<catDesc>Qajar</catDesc>
</category>
<category xml:id="keyword_3.32">
<catDesc>Rajput</catDesc>
</category>
<category xml:id="keyword_3.33">
<catDesc>Russian</catDesc>
</category>
<category xml:id="keyword_3.34">
<catDesc>Safavid</catDesc>
</category>
<category xml:id="keyword_3.35">
<catDesc>Sephardi</catDesc>
</category>
<category xml:id="keyword_3.36">
<catDesc>Spanish</catDesc>
</category>
<category xml:id="keyword_3.37">
<catDesc>Timurid</catDesc>
</category>
<category xml:id="keyword_3.38">
<catDesc>Turkish</catDesc>
</category>
<category xml:id="keyword_3.39">
<catDesc>Yemenite</catDesc>
</category>
</category>
<category xml:id="keyword_4">
<catDesc>Descriptive term</catDesc>
<category xml:id="keyword_4.2">
<catDesc>Accordion book</catDesc>
</category>
<category xml:id="keyword_4.3">
<catDesc>Album</catDesc>
</category>
<category xml:id="keyword_4.4">
<catDesc>Annotated</catDesc>
</category>
<category xml:id="keyword_4.5">
<catDesc>Binding</catDesc>
</category>
<category xml:id="keyword_4.6">
<catDesc>Calligraphy</catDesc>
</category>
<category xml:id="keyword_4.7">
<catDesc>Caroline minuscule</catDesc>
</category>
<category xml:id="keyword_4.8">
<catDesc>Chart</catDesc>
</category>
<category xml:id="keyword_4.9">
<catDesc>Charter</catDesc>
</category>
<category xml:id="keyword_4.10">
<catDesc>Colophon</catDesc>
</category>
<category xml:id="keyword_4.11">
<catDesc>Damage</catDesc>
</category>
<category xml:id="keyword_4.12">
<catDesc>Diagrams</catDesc>
</category>
<category xml:id="keyword_4.13">
<catDesc>Drawing</catDesc>
</category>
<category xml:id="keyword_4.14">
<catDesc>Early Abbasid script</catDesc>
</category>
<category xml:id="keyword_4.15">
<catDesc>Fragment</catDesc>
</category>
<category xml:id="keyword_4.16">
<catDesc>Gloss</catDesc>
</category>
<category xml:id="keyword_4.17">
<catDesc>Gothic</catDesc>
</category>
<category xml:id="keyword_4.18">
<catDesc>Gothic book hand</catDesc>
</category>
<category xml:id="keyword_4.19">
<catDesc>Grisaille</catDesc>
</category>
<category xml:id="keyword_4.20">
<catDesc>Grotesques</catDesc>
</category>
<category xml:id="keyword_4.21">
<catDesc>Headpiece</catDesc>
</category>
<category xml:id="keyword_4.22">
<catDesc>Heraldry</catDesc>
</category>
<category xml:id="keyword_4.23">
<catDesc>Historiated initial</catDesc>
</category>
<category xml:id="keyword_4.24">
<catDesc>Humanistic</catDesc>
</category>
<category xml:id="keyword_4.25">
<catDesc>Illumination</catDesc>
</category>
<category xml:id="keyword_4.26">
<catDesc>Illustration</catDesc>
</category>
<category xml:id="keyword_4.27">
<catDesc>Inhabited initial</catDesc>
</category>
<category xml:id="keyword_4.28">
<catDesc>Kufic</catDesc>
</category>
<category xml:id="keyword_4.29">
<catDesc>Manicules</catDesc>
</category>
<category xml:id="keyword_4.30">
<catDesc>Medallion</catDesc>
</category>
<category xml:id="keyword_4.31">
<catDesc>Micrography</catDesc>
</category>
<category xml:id="keyword_4.32">
<catDesc>Miniature</catDesc>
</category>
<category xml:id="keyword_4.33">
<catDesc>Missing leaves</catDesc>
</category>
<category xml:id="keyword_4.34">
<catDesc>Musical notation</catDesc>
</category>
<category xml:id="keyword_4.35">
<catDesc>New Abbasid style</catDesc>
</category>
<category xml:id="keyword_4.36">
<catDesc>Notable binding</catDesc>
</category>
<category xml:id="keyword_4.37">
<catDesc>Original binding</catDesc>
</category>
<category xml:id="keyword_4.38">
<catDesc>Ornament</catDesc>
</category>
<category xml:id="keyword_4.39">
<catDesc>Painting</catDesc>
</category>
<category xml:id="keyword_4.40">
<catDesc>Palimpsest</catDesc>
</category>
<category xml:id="keyword_4.41">
<catDesc>Paper</catDesc>
</category>
<category xml:id="keyword_4.42">
<catDesc>Penwork initial</catDesc>
</category>
<category xml:id="keyword_4.43">
<catDesc>Puzzle initial</catDesc>
</category>
<category xml:id="keyword_4.44">
<catDesc>Ragini</catDesc>
</category>
<category xml:id="keyword_4.45">
<catDesc>Romanesque</catDesc>
</category>
<category xml:id="keyword_4.46">
<catDesc>Scrapbook</catDesc>
</category>
<category xml:id="keyword_4.47">
<catDesc>Scroll</catDesc>
</category>
<category xml:id="keyword_4.48">
<catDesc>Scroll-work initial</catDesc>
</category>
<category xml:id="keyword_4.49">
<catDesc>Seals</catDesc>
</category>
<category xml:id="keyword_4.50">
<catDesc>Tables</catDesc>
</category>
<category xml:id="keyword_4.51">
<catDesc>Textura</catDesc>
</category>
<category xml:id="keyword_4.52">
<catDesc>Treasure binding</catDesc>
</category>
<category xml:id="keyword_4.53">
<catDesc>Watermark</catDesc>
</category>
<category xml:id="keyword_4.54">
<catDesc>White-vine lettering</catDesc>
</category>
<category xml:id="keyword_4.55">
<catDesc>Woodcuts</catDesc>
</category>
</category>
<category xml:id="keyword_5">
<catDesc>Geography</catDesc>
<category xml:id="keyword_5.2">
<catDesc>Afghanistan</catDesc>
</category>
<category xml:id="keyword_5.3">
<catDesc>Armenia</catDesc>
</category>
<category xml:id="keyword_5.4">
<catDesc>Austria</catDesc>
</category>
<category xml:id="keyword_5.5">
<catDesc>Belgium</catDesc>
</category>
<category xml:id="keyword_5.6">
<catDesc>Central Arab lands (Near East)</catDesc>
</category>
<category xml:id="keyword_5.7">
<catDesc>Central Asia</catDesc>
</category>
<category xml:id="keyword_5.8">
<catDesc>China</catDesc>
</category>
<category xml:id="keyword_5.9">
<catDesc>Egypt</catDesc>
</category>
<category xml:id="keyword_5.10">
<catDesc>England</catDesc>
</category>
<category xml:id="keyword_5.11">
<catDesc>Ethiopia</catDesc>
</category>
<category xml:id="keyword_5.12">
<catDesc>Flanders</catDesc>
</category>
<category xml:id="keyword_5.13">
<catDesc>France</catDesc>
</category>
<category xml:id="keyword_5.14">
<catDesc>Germany</catDesc>
</category>
<category xml:id="keyword_5.15">
<catDesc>Greece</catDesc>
</category>
<category xml:id="keyword_5.16">
<catDesc>India</catDesc>
</category>
<category xml:id="keyword_5.17">
<catDesc>Iran</catDesc>
</category>
<category xml:id="keyword_5.18">
<catDesc>Iraq</catDesc>
</category>
<category xml:id="keyword_5.19">
<catDesc>Israel</catDesc>
</category>
<category xml:id="keyword_5.20">
<catDesc>Italy</catDesc>
</category>
<category xml:id="keyword_5.21">
<catDesc>Maghreb</catDesc>
</category>
<category xml:id="keyword_5.22">
<catDesc>Morocco</catDesc>
</category>
<category xml:id="keyword_5.23">
<catDesc>Netherlands</catDesc>
</category>
<category xml:id="keyword_5.24">
<catDesc>Pakistan</catDesc>
</category>
<category xml:id="keyword_5.25">
<catDesc>Portugal</catDesc>
</category>
<category xml:id="keyword_5.26">
<catDesc>Romania</catDesc>
</category>
<category xml:id="keyword_5.27">
<catDesc>Russia</catDesc>
</category>
<category xml:id="keyword_5.28">
<catDesc>Spain</catDesc>
</category>
<category xml:id="keyword_5.29">
<catDesc>Sub-Saharan (West) Africa</catDesc>
</category>
<category xml:id="keyword_5.30">
<catDesc>Turkey</catDesc>
</category>
<category xml:id="keyword_5.31">
<catDesc>Wales</catDesc>
</category>
<category xml:id="keyword_5.32">
<catDesc>Yemen</catDesc>
</category>
</category>
<category xml:id="keyword_6">
<catDesc>Other</catDesc>
<category xml:id="keyword_6.2">
<catDesc>Alchemy</catDesc>
</category>
<category xml:id="keyword_6.3">
<catDesc>Allegory</catDesc>
</category>
<category xml:id="keyword_6.4">
<catDesc>Astrology</catDesc>
</category>
<category xml:id="keyword_6.5">
<catDesc>Astronomy</catDesc>
</category>
<category xml:id="keyword_6.6">
<catDesc>Biblical</catDesc>
</category>
<category xml:id="keyword_6.7">
<catDesc>Binding waste</catDesc>
</category>
<category xml:id="keyword_6.8">
<catDesc>Cartography</catDesc>
</category>
<category xml:id="keyword_6.9">
<catDesc>Church Fathers</catDesc>
</category>
<category xml:id="keyword_6.10">
<catDesc>Cosmology</catDesc>
</category>
<category xml:id="keyword_6.11">
<catDesc>Devotion</catDesc>
</category>
<category xml:id="keyword_6.12">
<catDesc>Financial records</catDesc>
</category>
<category xml:id="keyword_6.13">
<catDesc>Fortune-telling</catDesc>
</category>
<category xml:id="keyword_6.14">
<catDesc>Friendship</catDesc>
</category>
<category xml:id="keyword_6.15">
<catDesc>Games</catDesc>
</category>
<category xml:id="keyword_6.16">
<catDesc>Geography</catDesc>
</category>
<category xml:id="keyword_6.17">
<catDesc>Geomancy</catDesc>
</category>
<category xml:id="keyword_6.18">
<catDesc>Grammar</catDesc>
</category>
<category xml:id="keyword_6.19">
<catDesc>History</catDesc>
</category>
<category xml:id="keyword_6.20">
<catDesc>Kabbalah</catDesc>
</category>
<category xml:id="keyword_6.21">
<catDesc>Legal</catDesc>
</category>
<category xml:id="keyword_6.22">
<catDesc>Literature -- Poetry</catDesc>
</category>
<category xml:id="keyword_6.23">
<catDesc>Literature -- Prose</catDesc>
</category>
<category xml:id="keyword_6.24">
<catDesc>Liturgy</catDesc>
</category>
<category xml:id="keyword_6.25">
<catDesc>Logic</catDesc>
</category>
<category xml:id="keyword_6.26">
<catDesc>Magic</catDesc>
</category>
<category xml:id="keyword_6.27">
<catDesc>Maritime</catDesc>
</category>
<category xml:id="keyword_6.28">
<catDesc>Mathematics</catDesc>
</category>
<category xml:id="keyword_6.29">
<catDesc>Mineralogy</catDesc>
</category>
<category xml:id="keyword_6.30">
<catDesc>Philosophy</catDesc>
</category>
<category xml:id="keyword_6.31">
<catDesc>Piyutim</catDesc>
</category>
<category xml:id="keyword_6.32">
<catDesc>Responsa</catDesc>
</category>
<category xml:id="keyword_6.33">
<catDesc>Science</catDesc>
</category>
<category xml:id="keyword_6.34">
<catDesc>Science -- Medicine</catDesc>
</category>
<category xml:id="keyword_6.35">
<catDesc>Scripture</catDesc>
</category>
<category xml:id="keyword_6.36">
<catDesc>Sufism</catDesc>
</category>
<category xml:id="keyword_6.37">
<catDesc>Theology</catDesc>
</category>
</category>
<category xml:id="keyword_7">
<catDesc/>
<category xml:id="keyword_7.1">
<catDesc> </catDesc>
</category>
</category>
</taxonomy>
</classDecl>
</encodingDesc>
<profileDesc>
<textClass>
<keywords n="keywords">
<term>Document</term>
<term>French</term>
<term>France</term>
<term>Fragment</term>
</keywords>
</textClass>
</profileDesc>
</teiHeader>
<facsimile>
<surface n="Lewis T277 front" xml:id="surface-lewis-t277-1">
<graphic height="3171px" url="master/8755_0000.tif" width="5670px"/>
<graphic height="106px" url="thumb/8755_0000_thumb.jpg" width="190px"/>
<graphic height="1006px" url="web/8755_0000_web.jpg" width="1800px"/>
</surface>
<surface n="Lewis T277 reverse" xml:id="surface-lewis-t277-2">
<graphic height="3171px" url="master/8755_0001.tif" width="5670px"/>
<graphic height="106px" url="thumb/8755_0001_thumb.jpg" width="190px"/>
<graphic height="1006px" url="web/8755_0001_web.jpg" width="1800px"/>
</surface>
</facsimile>
</TEI>
| {
"content_hash": "7caf494b7c9e29c06de1ea7de6ea8cb7",
"timestamp": "",
"source": "github",
"line_count": 760,
"max_line_length": 506,
"avg_line_length": 37.86578947368421,
"alnum_prop": 0.49655987212453956,
"repo_name": "leoba/VisColl",
"id": "8f5aa3aebd8657011b06aafd241e70987bbb3f72",
"size": "28778",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data/TEI/lewis_t277_TEI.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "17945"
},
{
"name": "HTML",
"bytes": "282678"
},
{
"name": "JavaScript",
"bytes": "23246"
},
{
"name": "Shell",
"bytes": "556"
},
{
"name": "XSLT",
"bytes": "549260"
}
],
"symlink_target": ""
} |
package ec;
/*
* Group.java
*
* Created: Tue Aug 10 20:49:45 1999
* By: Sean Luke
*/
/**
* Groups are used for populations and subpopulations. They are slightly
* different from Prototypes in a few important ways.
*
* A Group instance typically is set up with setup(...) and then <i>used</i>
* (unlike in a Prototype, where the prototype instance is never used,
* but only makes clones
* which are used). When a new Group instance is needed, it is created by
* calling emptyClone() on a previous Group instance, which returns a
* new instance set up exactly like the first Group instance had been set up
* when setup(...) was called on it.
*
* Groups are Serializable and Cloneable, but you should not clone
* them -- use emptyClone instead.
*
*
* @author Sean Luke
* @version 1.0
*/
public interface Group extends Setup, Cloneable
{
/** Returns a copy of the object just as it had been
immediately after Setup was called on it (or on
an ancestor object). You can obtain a fresh instance
using clone(), and then modify that.
*/
public Group emptyClone();
}
| {
"content_hash": "1c084c7f4072deb77fb6fd9f8692d592",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 76,
"avg_line_length": 27.75609756097561,
"alnum_prop": 0.6810193321616872,
"repo_name": "vaisaghvt/gameAnalyzer",
"id": "9426ccf2658f0e23bfda67143237a19bf5587e20",
"size": "1274",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "ecj19/ecj/ec/Group.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1466"
},
{
"name": "CSS",
"bytes": "3795"
},
{
"name": "Common Lisp",
"bytes": "1369"
},
{
"name": "HTML",
"bytes": "76889612"
},
{
"name": "Java",
"bytes": "18165919"
},
{
"name": "Makefile",
"bytes": "4432"
},
{
"name": "Python",
"bytes": "5563"
},
{
"name": "Shell",
"bytes": "1892"
},
{
"name": "TeX",
"bytes": "1053"
},
{
"name": "XSLT",
"bytes": "5008"
}
],
"symlink_target": ""
} |
/* tslint:disable:no-unused-variable */
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { By } from '@angular/platform-browser';
import { DebugElement } from '@angular/core';
import { LandingComponent } from './landing.component';
describe('LandingComponent', () => {
let component: LandingComponent;
let fixture: ComponentFixture<LandingComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [LandingComponent]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(LandingComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
| {
"content_hash": "56f0698a155615c184f720accd11f689",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 73,
"avg_line_length": 28.357142857142858,
"alnum_prop": 0.6599496221662469,
"repo_name": "ShamanisTe/samples-angular2",
"id": "b022e9b3062e85567d4e507c7db2387be1d7f222",
"size": "794",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/app/components/landing/landing.component.spec.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1744"
},
{
"name": "HTML",
"bytes": "9779"
},
{
"name": "JavaScript",
"bytes": "1833"
},
{
"name": "TypeScript",
"bytes": "67574"
}
],
"symlink_target": ""
} |
package action
import (
"testing"
"github.com/stretchr/testify/assert"
"helm.sh/helm/v3/pkg/release"
"helm.sh/helm/v3/pkg/storage"
)
func TestListStates(t *testing.T) {
for input, expect := range map[string]ListStates{
"deployed": ListDeployed,
"uninstalled": ListUninstalled,
"uninstalling": ListUninstalling,
"superseded": ListSuperseded,
"failed": ListFailed,
"pending-install": ListPendingInstall,
"pending-rollback": ListPendingRollback,
"pending-upgrade": ListPendingUpgrade,
"unknown": ListUnknown,
"totally made up key": ListUnknown,
} {
if expect != expect.FromName(input) {
t.Errorf("Expected %d for %s", expect, input)
}
// This is a cheap way to verify that ListAll actually allows everything but Unknown
if got := expect.FromName(input); got != ListUnknown && got&ListAll == 0 {
t.Errorf("Expected %s to match the ListAll filter", input)
}
}
filter := ListDeployed | ListPendingRollback
if status := filter.FromName("deployed"); filter&status == 0 {
t.Errorf("Expected %d to match mask %d", status, filter)
}
if status := filter.FromName("failed"); filter&status != 0 {
t.Errorf("Expected %d to fail to match mask %d", status, filter)
}
}
func TestList_Empty(t *testing.T) {
lister := NewList(actionConfigFixture(t))
list, err := lister.Run()
assert.NoError(t, err)
assert.Len(t, list, 0)
}
func newListFixture(t *testing.T) *List {
return NewList(actionConfigFixture(t))
}
func TestList_OneNamespace(t *testing.T) {
is := assert.New(t)
lister := newListFixture(t)
makeMeSomeReleases(lister.cfg.Releases, t)
list, err := lister.Run()
is.NoError(err)
is.Len(list, 3)
}
func TestList_AllNamespaces(t *testing.T) {
is := assert.New(t)
lister := newListFixture(t)
makeMeSomeReleases(lister.cfg.Releases, t)
lister.AllNamespaces = true
lister.SetStateMask()
list, err := lister.Run()
is.NoError(err)
is.Len(list, 3)
}
func TestList_Sort(t *testing.T) {
is := assert.New(t)
lister := newListFixture(t)
lister.Sort = ByNameDesc // Other sorts are tested elsewhere
makeMeSomeReleases(lister.cfg.Releases, t)
list, err := lister.Run()
is.NoError(err)
is.Len(list, 3)
is.Equal("two", list[0].Name)
is.Equal("three", list[1].Name)
is.Equal("one", list[2].Name)
}
func TestList_Limit(t *testing.T) {
is := assert.New(t)
lister := newListFixture(t)
lister.Limit = 2
makeMeSomeReleases(lister.cfg.Releases, t)
list, err := lister.Run()
is.NoError(err)
is.Len(list, 2)
// Lex order means one, three, two
is.Equal("one", list[0].Name)
is.Equal("three", list[1].Name)
}
func TestList_BigLimit(t *testing.T) {
is := assert.New(t)
lister := newListFixture(t)
lister.Limit = 20
makeMeSomeReleases(lister.cfg.Releases, t)
list, err := lister.Run()
is.NoError(err)
is.Len(list, 3)
// Lex order means one, three, two
is.Equal("one", list[0].Name)
is.Equal("three", list[1].Name)
is.Equal("two", list[2].Name)
}
func TestList_LimitOffset(t *testing.T) {
is := assert.New(t)
lister := newListFixture(t)
lister.Limit = 2
lister.Offset = 1
makeMeSomeReleases(lister.cfg.Releases, t)
list, err := lister.Run()
is.NoError(err)
is.Len(list, 2)
// Lex order means one, three, two
is.Equal("three", list[0].Name)
is.Equal("two", list[1].Name)
}
func TestList_LimitOffsetOutOfBounds(t *testing.T) {
is := assert.New(t)
lister := newListFixture(t)
lister.Limit = 2
lister.Offset = 3 // Last item is index 2
makeMeSomeReleases(lister.cfg.Releases, t)
list, err := lister.Run()
is.NoError(err)
is.Len(list, 0)
lister.Limit = 10
lister.Offset = 1
list, err = lister.Run()
is.NoError(err)
is.Len(list, 2)
}
func TestList_StateMask(t *testing.T) {
is := assert.New(t)
lister := newListFixture(t)
makeMeSomeReleases(lister.cfg.Releases, t)
one, err := lister.cfg.Releases.Get("one", 1)
is.NoError(err)
one.SetStatus(release.StatusUninstalled, "uninstalled")
err = lister.cfg.Releases.Update(one)
is.NoError(err)
res, err := lister.Run()
is.NoError(err)
is.Len(res, 2)
is.Equal("three", res[0].Name)
is.Equal("two", res[1].Name)
lister.StateMask = ListUninstalled
res, err = lister.Run()
is.NoError(err)
is.Len(res, 1)
is.Equal("one", res[0].Name)
lister.StateMask |= ListDeployed
res, err = lister.Run()
is.NoError(err)
is.Len(res, 3)
}
func TestList_StateMaskWithStaleRevisions(t *testing.T) {
is := assert.New(t)
lister := newListFixture(t)
lister.StateMask = ListFailed
makeMeSomeReleasesWithStaleFailure(lister.cfg.Releases, t)
res, err := lister.Run()
is.NoError(err)
is.Len(res, 1)
// "dirty" release should _not_ be present as most recent
// release is deployed despite failed release in past
is.Equal("failed", res[0].Name)
}
func makeMeSomeReleasesWithStaleFailure(store *storage.Storage, t *testing.T) {
t.Helper()
one := namedReleaseStub("clean", release.StatusDeployed)
one.Namespace = "default"
one.Version = 1
two := namedReleaseStub("dirty", release.StatusDeployed)
two.Namespace = "default"
two.Version = 1
three := namedReleaseStub("dirty", release.StatusFailed)
three.Namespace = "default"
three.Version = 2
four := namedReleaseStub("dirty", release.StatusDeployed)
four.Namespace = "default"
four.Version = 3
five := namedReleaseStub("failed", release.StatusFailed)
five.Namespace = "default"
five.Version = 1
for _, rel := range []*release.Release{one, two, three, four, five} {
if err := store.Create(rel); err != nil {
t.Fatal(err)
}
}
all, err := store.ListReleases()
assert.NoError(t, err)
assert.Len(t, all, 5, "sanity test: five items added")
}
func TestList_Filter(t *testing.T) {
is := assert.New(t)
lister := newListFixture(t)
lister.Filter = "th."
makeMeSomeReleases(lister.cfg.Releases, t)
res, err := lister.Run()
is.NoError(err)
is.Len(res, 1)
is.Equal("three", res[0].Name)
}
func TestList_FilterFailsCompile(t *testing.T) {
is := assert.New(t)
lister := newListFixture(t)
lister.Filter = "t[h.{{{"
makeMeSomeReleases(lister.cfg.Releases, t)
_, err := lister.Run()
is.Error(err)
}
func makeMeSomeReleases(store *storage.Storage, t *testing.T) {
t.Helper()
one := releaseStub()
one.Name = "one"
one.Namespace = "default"
one.Version = 1
two := releaseStub()
two.Name = "two"
two.Namespace = "default"
two.Version = 2
three := releaseStub()
three.Name = "three"
three.Namespace = "default"
three.Version = 3
for _, rel := range []*release.Release{one, two, three} {
if err := store.Create(rel); err != nil {
t.Fatal(err)
}
}
all, err := store.ListReleases()
assert.NoError(t, err)
assert.Len(t, all, 3, "sanity test: three items added")
}
func TestFilterLatestReleases(t *testing.T) {
t.Run("should filter old versions of the same release", func(t *testing.T) {
r1 := releaseStub()
r1.Name = "r"
r1.Version = 1
r2 := releaseStub()
r2.Name = "r"
r2.Version = 2
another := releaseStub()
another.Name = "another"
another.Version = 1
filteredList := filterLatestReleases([]*release.Release{r1, r2, another})
expectedFilteredList := []*release.Release{r2, another}
assert.ElementsMatch(t, expectedFilteredList, filteredList)
})
t.Run("should not filter out any version across namespaces", func(t *testing.T) {
r1 := releaseStub()
r1.Name = "r"
r1.Namespace = "default"
r1.Version = 1
r2 := releaseStub()
r2.Name = "r"
r2.Namespace = "testing"
r2.Version = 2
filteredList := filterLatestReleases([]*release.Release{r1, r2})
expectedFilteredList := []*release.Release{r1, r2}
assert.ElementsMatch(t, expectedFilteredList, filteredList)
})
}
func TestSelectorList(t *testing.T) {
r1 := releaseStub()
r1.Name = "r1"
r1.Version = 1
r1.Labels = map[string]string{"key": "value1"}
r2 := releaseStub()
r2.Name = "r2"
r2.Version = 1
r2.Labels = map[string]string{"key": "value2"}
r3 := releaseStub()
r3.Name = "r3"
r3.Version = 1
r3.Labels = map[string]string{}
lister := newListFixture(t)
for _, rel := range []*release.Release{r1, r2, r3} {
if err := lister.cfg.Releases.Create(rel); err != nil {
t.Fatal(err)
}
}
t.Run("should fail selector parsing", func(t *testing.T) {
is := assert.New(t)
lister.Selector = "a?=b"
_, err := lister.Run()
is.Error(err)
})
t.Run("should select one release with matching label", func(t *testing.T) {
lister.Selector = "key==value1"
res, _ := lister.Run()
expectedFilteredList := []*release.Release{r1}
assert.ElementsMatch(t, expectedFilteredList, res)
})
t.Run("should select two releases with non matching label", func(t *testing.T) {
lister.Selector = "key!=value1"
res, _ := lister.Run()
expectedFilteredList := []*release.Release{r2, r3}
assert.ElementsMatch(t, expectedFilteredList, res)
})
}
| {
"content_hash": "40246cbe862cec1a132cfca0a97e261d",
"timestamp": "",
"source": "github",
"line_count": 354,
"max_line_length": 86,
"avg_line_length": 24.80225988700565,
"alnum_prop": 0.6780182232346241,
"repo_name": "bacongobbler/helm",
"id": "73009d52341dbadb59801db2b51ac054a172bb12",
"size": "9338",
"binary": false,
"copies": "4",
"ref": "refs/heads/main",
"path": "pkg/action/list_test.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "1649534"
},
{
"name": "Makefile",
"bytes": "7784"
},
{
"name": "Shell",
"bytes": "32580"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?><!--
~ Copyright (c) 2021. Adventech <[email protected]>
~
~ Permission is hereby granted, free of charge, to any person obtaining a copy
~ of this software and associated documentation files (the "Software"), to deal
~ in the Software without restriction, including without limitation the rights
~ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
~ copies of the Software, and to permit persons to whom the Software is
~ furnished to do so, subject to the following conditions:
~
~ The above copyright notice and this permission notice shall be included in
~ all copies or substantial portions of the Software.
~
~ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
~ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
~ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
~ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
~ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
~ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
~ THE SOFTWARE.
-->
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/widget_root"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_gravity="center"
android:background="@drawable/bg_widget"
android:orientation="vertical"
tools:layout_height="wrap_content"
tools:layout_width="300dp">
<include layout="@layout/include_lesson_info" />
<include layout="@layout/include_horizontal_divider" />
<LinearLayout
android:id="@+id/widget_day_one_container"
style="@style/DayContainer"
android:layout_marginTop="@dimen/spacing_normal">
<TextView
android:id="@+id/widget_day_one"
style="@style/DayContainer.Title"
tools:text="Living in a 24-7 Society" />
<TextView
android:id="@+id/widget_day_one_date"
style="@style/DayContainer.Date"
tools:text="Sat. June 26" />
</LinearLayout>
<include
style="@style/DayContainer.Divider"
layout="@layout/include_horizontal_divider" />
<LinearLayout
android:id="@+id/widget_day_two_container"
style="@style/DayContainer">
<TextView
android:id="@+id/widget_day_two"
style="@style/DayContainer.Title"
tools:text="Living in a 24-7 Society" />
<TextView
android:id="@+id/widget_day_two_date"
style="@style/DayContainer.Date"
tools:text="Sunday. June 27" />
</LinearLayout>
<include
style="@style/DayContainer.Divider"
layout="@layout/include_horizontal_divider" />
<LinearLayout
android:id="@+id/widget_day_three_container"
style="@style/DayContainer">
<TextView
android:id="@+id/widget_day_three"
style="@style/DayContainer.Title"
tools:text="Living in a 24-7 Society" />
<TextView
android:id="@+id/widget_day_three_date"
style="@style/DayContainer.Date"
tools:text="Mon. June 28" />
</LinearLayout>
<include
style="@style/DayContainer.Divider"
layout="@layout/include_horizontal_divider" />
<LinearLayout
android:id="@+id/widget_day_four_container"
style="@style/DayContainer">
<TextView
android:id="@+id/widget_day_four"
style="@style/DayContainer.Title"
tools:text="Living in a 24-7 Society" />
<TextView
android:id="@+id/widget_day_four_date"
style="@style/DayContainer.Date"
tools:text="Tue. June 29" />
</LinearLayout>
<include
style="@style/DayContainer.Divider"
layout="@layout/include_horizontal_divider" />
<LinearLayout
android:id="@+id/widget_day_five_container"
style="@style/DayContainer">
<TextView
android:id="@+id/widget_day_five"
style="@style/DayContainer.Title"
tools:text="Living in a 24-7 Society" />
<TextView
android:id="@+id/widget_day_five_date"
style="@style/DayContainer.Date"
tools:text="Wed. June 30" />
</LinearLayout>
<include
style="@style/DayContainer.Divider"
layout="@layout/include_horizontal_divider" />
<LinearLayout
android:id="@+id/widget_day_six_container"
style="@style/DayContainer">
<TextView
android:id="@+id/widget_day_six"
style="@style/DayContainer.Title"
tools:text="Living in a 24-7 Society" />
<TextView
android:id="@+id/widget_day_six_date"
style="@style/DayContainer.Date"
tools:text="Thu. July 01" />
</LinearLayout>
<include
style="@style/DayContainer.Divider"
layout="@layout/include_horizontal_divider" />
<LinearLayout
android:id="@+id/widget_day_seven_container"
style="@style/DayContainer">
<TextView
android:id="@+id/widget_day_seven"
style="@style/DayContainer.Title"
tools:text="Living in a 24-7 Society" />
<TextView
android:id="@+id/widget_day_seven_date"
style="@style/DayContainer.Date"
tools:text="Fri. July 02" />
</LinearLayout>
</LinearLayout> | {
"content_hash": "40ae017c7cf4eddd76434dbd1161a345",
"timestamp": "",
"source": "github",
"line_count": 168,
"max_line_length": 81,
"avg_line_length": 33.482142857142854,
"alnum_prop": 0.6188444444444444,
"repo_name": "Adventech/sabbath-school-android-2",
"id": "d3a7b2a0f1bf974c416288473e381a98e5d4156f",
"size": "5625",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "features/app-widgets/src/main/res/layout/week_lesson_app_widget.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5966"
},
{
"name": "HTML",
"bytes": "2201"
},
{
"name": "Java",
"bytes": "295672"
},
{
"name": "JavaScript",
"bytes": "5086"
}
],
"symlink_target": ""
} |
// Copyright (C) 2012 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.client.changes;
import com.google.gerrit.client.info.AccountInfo;
import com.google.gerrit.client.info.ChangeInfo;
import com.google.gerrit.client.info.ChangeInfo.CommitInfo;
import com.google.gerrit.client.info.ChangeInfo.EditInfo;
import com.google.gerrit.client.info.ChangeInfo.IncludedInInfo;
import com.google.gerrit.client.rpc.CallbackGroup.Callback;
import com.google.gerrit.client.rpc.NativeString;
import com.google.gerrit.client.rpc.RestApi;
import com.google.gerrit.common.Nullable;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.user.client.rpc.AsyncCallback;
/** A collection of static methods which work on the Gerrit REST API for specific changes. */
public class ChangeApi {
/** Abandon the change, ending its review. */
public static void abandon(
@Nullable String project, int id, String msg, AsyncCallback<ChangeInfo> cb) {
MessageInput input = MessageInput.create();
input.message(emptyToNull(msg));
call(project, id, "abandon").post(input, cb);
}
/** Create a new work-in-progress change. */
public static void createChange(
String project,
String branch,
String topic,
String subject,
String base,
AsyncCallback<ChangeInfo> cb) {
CreateChangeInput input = CreateChangeInput.create();
input.project(emptyToNull(project));
input.branch(emptyToNull(branch));
input.topic(emptyToNull(topic));
input.subject(emptyToNull(subject));
input.baseChange(emptyToNull(base));
input.workInProgress(true);
new RestApi("/changes/").post(input, cb);
}
/** Restore a previously abandoned change to be open again. */
public static void restore(
@Nullable String project, int id, String msg, AsyncCallback<ChangeInfo> cb) {
MessageInput input = MessageInput.create();
input.message(emptyToNull(msg));
call(project, id, "restore").post(input, cb);
}
/** Create a new change that reverts the delta caused by this change. */
public static void revert(
@Nullable String project, int id, String msg, AsyncCallback<ChangeInfo> cb) {
MessageInput input = MessageInput.create();
input.message(emptyToNull(msg));
call(project, id, "revert").post(input, cb);
}
/** Update the topic of a change. */
public static void topic(
@Nullable String project, int id, String topic, AsyncCallback<String> cb) {
RestApi call = call(project, id, "topic");
topic = emptyToNull(topic);
if (topic != null) {
TopicInput input = TopicInput.create();
input.topic(topic);
call.put(input, NativeString.unwrap(cb));
} else {
call.delete(NativeString.unwrap(cb));
}
}
public static void detail(@Nullable String project, int id, AsyncCallback<ChangeInfo> cb) {
detail(project, id).get(cb);
}
public static RestApi detail(@Nullable String project, int id) {
return call(project, id, "detail");
}
public static RestApi blame(@Nullable String project, PatchSet.Id id, String path, boolean base) {
return revision(project, id).view("files").id(path).view("blame").addParameter("base", base);
}
public static RestApi actions(@Nullable String project, int id, String revision) {
if (revision == null || revision.equals("")) {
revision = "current";
}
return call(project, id, revision, "actions");
}
public static void deleteAssignee(
@Nullable String project, int id, AsyncCallback<AccountInfo> cb) {
change(project, id).view("assignee").delete(cb);
}
public static void setAssignee(
@Nullable String project, int id, String user, AsyncCallback<AccountInfo> cb) {
AssigneeInput input = AssigneeInput.create();
input.assignee(user);
change(project, id).view("assignee").put(input, cb);
}
public static void markPrivate(
@Nullable String project, int id, AsyncCallback<JavaScriptObject> cb) {
change(project, id).view("private").post(PrivateInput.create(), cb);
}
public static void unmarkPrivate(
@Nullable String project, int id, AsyncCallback<JavaScriptObject> cb) {
change(project, id).view("private.delete").post(PrivateInput.create(), cb);
}
public static RestApi comments(@Nullable String project, int id) {
return call(project, id, "comments");
}
public static RestApi drafts(@Nullable String project, int id) {
return call(project, id, "drafts");
}
public static void edit(@Nullable String project, int id, AsyncCallback<EditInfo> cb) {
edit(project, id).get(cb);
}
public static void editWithFiles(@Nullable String project, int id, AsyncCallback<EditInfo> cb) {
edit(project, id).addParameterTrue("list").get(cb);
}
public static RestApi edit(@Nullable String project, int id) {
return change(project, id).view("edit");
}
public static RestApi editWithCommands(@Nullable String project, int id) {
return edit(project, id).addParameterTrue("download-commands");
}
public static void includedIn(
@Nullable String project, int id, AsyncCallback<IncludedInInfo> cb) {
call(project, id, "in").get(cb);
}
public static RestApi revision(@Nullable String project, int id, String revision) {
return change(project, id).view("revisions").id(revision);
}
public static RestApi revision(@Nullable String project, PatchSet.Id id) {
int cn = id.getParentKey().get();
String revision = RevisionInfoCache.get(id);
if (revision != null) {
return revision(project, cn, revision);
}
return change(project, cn).view("revisions").id(id.get());
}
public static RestApi reviewers(@Nullable String project, int id) {
return change(project, id).view("reviewers");
}
public static RestApi suggestReviewers(
@Nullable String project, int id, String q, int n, boolean e) {
RestApi api =
change(project, id).view("suggest_reviewers").addParameter("n", n).addParameter("e", e);
if (q != null) {
api.addParameter("q", q);
}
return api;
}
public static RestApi vote(@Nullable String project, int id, int reviewer, String vote) {
return reviewer(project, id, reviewer).view("votes").id(vote);
}
public static RestApi reviewer(@Nullable String project, int id, int reviewer) {
return change(project, id).view("reviewers").id(reviewer);
}
public static RestApi reviewer(@Nullable String project, int id, String reviewer) {
return change(project, id).view("reviewers").id(reviewer);
}
public static RestApi hashtags(@Nullable String project, int changeId) {
return change(project, changeId).view("hashtags");
}
public static RestApi hashtag(@Nullable String project, int changeId, String hashtag) {
return change(project, changeId).view("hashtags").id(hashtag);
}
/** Submit a specific revision of a change. */
public static void cherrypick(
String project,
int id,
String commit,
String destination,
String message,
AsyncCallback<ChangeInfo> cb) {
CherryPickInput cherryPickInput = CherryPickInput.create();
cherryPickInput.setMessage(message);
cherryPickInput.setDestination(destination);
call(project, id, commit, "cherrypick").post(cherryPickInput, cb);
}
/** Move change to another branch. */
public static void move(
String project, int id, String destination, String message, AsyncCallback<ChangeInfo> cb) {
MoveInput moveInput = MoveInput.create();
moveInput.setMessage(message);
moveInput.setDestinationBranch(destination);
change(project, id).view("move").post(moveInput, cb);
}
/** Edit commit message for specific revision of a change. */
public static void message(
@Nullable String project,
int id,
String commit,
String message,
AsyncCallback<JavaScriptObject> cb) {
CherryPickInput input = CherryPickInput.create();
input.setMessage(message);
call(project, id, commit, "message").post(input, cb);
}
/** Submit a specific revision of a change. */
public static void submit(
@Nullable String project, int id, String commit, AsyncCallback<SubmitInfo> cb) {
JavaScriptObject in = JavaScriptObject.createObject();
call(project, id, commit, "submit").post(in, cb);
}
/** Delete a specific draft change. */
public static void deleteChange(
@Nullable String project, int id, AsyncCallback<JavaScriptObject> cb) {
change(project, id).delete(cb);
}
/** Delete change edit. */
public static void deleteEdit(
@Nullable String project, int id, AsyncCallback<JavaScriptObject> cb) {
edit(project, id).delete(cb);
}
/** Publish change edit. */
public static void publishEdit(
@Nullable String project, int id, AsyncCallback<JavaScriptObject> cb) {
JavaScriptObject in = JavaScriptObject.createObject();
change(project, id).view("edit:publish").post(in, cb);
}
/** Rebase change edit on latest patch set. */
public static void rebaseEdit(
@Nullable String project, int id, AsyncCallback<JavaScriptObject> cb) {
JavaScriptObject in = JavaScriptObject.createObject();
change(project, id).view("edit:rebase").post(in, cb);
}
/** Rebase a revision onto the branch tip or another change. */
public static void rebase(
@Nullable String project, int id, String commit, String base, AsyncCallback<ChangeInfo> cb) {
RebaseInput rebaseInput = RebaseInput.create();
rebaseInput.setBase(base);
call(project, id, commit, "rebase").post(rebaseInput, cb);
}
private static class MessageInput extends JavaScriptObject {
final native void message(String m) /*-{ if(m)this.message=m; }-*/;
static MessageInput create() {
return (MessageInput) createObject();
}
protected MessageInput() {}
}
private static class AssigneeInput extends JavaScriptObject {
final native void assignee(String a) /*-{ if(a)this.assignee=a; }-*/;
static AssigneeInput create() {
return (AssigneeInput) createObject();
}
protected AssigneeInput() {}
}
private static class TopicInput extends JavaScriptObject {
final native void topic(String t) /*-{ if(t)this.topic=t; }-*/;
static TopicInput create() {
return (TopicInput) createObject();
}
protected TopicInput() {}
}
private static class CreateChangeInput extends JavaScriptObject {
static CreateChangeInput create() {
return (CreateChangeInput) createObject();
}
public final native void branch(String b) /*-{ if(b)this.branch=b; }-*/;
public final native void topic(String t) /*-{ if(t)this.topic=t; }-*/;
public final native void project(String p) /*-{ if(p)this.project=p; }-*/;
public final native void subject(String s) /*-{ if(s)this.subject=s; }-*/;
public final native void status(String s) /*-{ if(s)this.status=s; }-*/;
public final native void baseChange(String b) /*-{ if(b)this.base_change=b; }-*/;
public final native void workInProgress(Boolean b) /*-{ if(b)this.work_in_progress=b; }-*/;
protected CreateChangeInput() {}
}
private static class CherryPickInput extends JavaScriptObject {
static CherryPickInput create() {
return (CherryPickInput) createObject();
}
final native void setDestination(String d) /*-{ this.destination = d; }-*/;
final native void setMessage(String m) /*-{ this.message = m; }-*/;
protected CherryPickInput() {}
}
private static class MoveInput extends JavaScriptObject {
static MoveInput create() {
return (MoveInput) createObject();
}
final native void setDestinationBranch(String d) /*-{ this.destination_branch = d; }-*/;
final native void setMessage(String m) /*-{ this.message = m; }-*/;
protected MoveInput() {}
}
private static class PrivateInput extends JavaScriptObject {
static PrivateInput create() {
return (PrivateInput) createObject();
}
final native void setMessage(String m) /*-{ this.message = m; }-*/;
protected PrivateInput() {}
}
private static class RebaseInput extends JavaScriptObject {
final native void setBase(String b) /*-{ this.base = b; }-*/;
static RebaseInput create() {
return (RebaseInput) createObject();
}
protected RebaseInput() {}
}
private static RestApi call(@Nullable String project, int id, String action) {
return change(project, id).view(action);
}
private static RestApi call(@Nullable String project, int id, String commit, String action) {
return change(project, id).view("revisions").id(commit).view(action);
}
public static RestApi change(@Nullable String project, int id) {
if (project == null) {
return new RestApi("/changes/").id(String.valueOf(id));
}
return new RestApi("/changes/").id(project, id);
}
public static String emptyToNull(String str) {
return str == null || str.isEmpty() ? null : str;
}
public static void commitWithLinks(
@Nullable String project, int changeId, String revision, Callback<CommitInfo> callback) {
revision(project, changeId, revision).view("commit").addParameterTrue("links").get(callback);
}
}
| {
"content_hash": "bfcd81f2b5432113742f869c34c04904",
"timestamp": "",
"source": "github",
"line_count": 401,
"max_line_length": 100,
"avg_line_length": 34.43142144638404,
"alnum_prop": 0.6925472586369233,
"repo_name": "WANdisco/gerrit",
"id": "02be8c763990effb11b40850d7918ccfbb0c80d1",
"size": "13807",
"binary": false,
"copies": "2",
"ref": "refs/heads/2.16.21_WD",
"path": "gerrit-gwtui/src/main/java/com/google/gerrit/client/changes/ChangeApi.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "47431"
},
{
"name": "GAP",
"bytes": "4119"
},
{
"name": "Go",
"bytes": "5563"
},
{
"name": "HTML",
"bytes": "726266"
},
{
"name": "Java",
"bytes": "11491861"
},
{
"name": "JavaScript",
"bytes": "404723"
},
{
"name": "Makefile",
"bytes": "7107"
},
{
"name": "PLpgSQL",
"bytes": "3576"
},
{
"name": "Perl",
"bytes": "9943"
},
{
"name": "Prolog",
"bytes": "17904"
},
{
"name": "Python",
"bytes": "267395"
},
{
"name": "Roff",
"bytes": "32749"
},
{
"name": "Shell",
"bytes": "133358"
}
],
"symlink_target": ""
} |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.operator;
import io.prestosql.spi.Page;
import io.prestosql.spi.block.Block;
import java.util.List;
import java.util.Optional;
import java.util.OptionalInt;
import static io.prestosql.spi.type.BigintType.BIGINT;
public class JoinProbe
{
public static class JoinProbeFactory
{
private final int[] probeOutputChannels;
private final List<Integer> probeJoinChannels;
private final OptionalInt probeHashChannel;
public JoinProbeFactory(int[] probeOutputChannels, List<Integer> probeJoinChannels, OptionalInt probeHashChannel)
{
this.probeOutputChannels = probeOutputChannels;
this.probeJoinChannels = probeJoinChannels;
this.probeHashChannel = probeHashChannel;
}
public JoinProbe createJoinProbe(Page page)
{
return new JoinProbe(probeOutputChannels, page, probeJoinChannels, probeHashChannel);
}
}
private final int[] probeOutputChannels;
private final int positionCount;
private final Block[] probeBlocks;
private final Page page;
private final Page probePage;
private final Optional<Block> probeHashBlock;
private int position = -1;
private JoinProbe(int[] probeOutputChannels, Page page, List<Integer> probeJoinChannels, OptionalInt probeHashChannel)
{
this.probeOutputChannels = probeOutputChannels;
this.positionCount = page.getPositionCount();
this.probeBlocks = new Block[probeJoinChannels.size()];
for (int i = 0; i < probeJoinChannels.size(); i++) {
probeBlocks[i] = page.getBlock(probeJoinChannels.get(i));
}
this.page = page;
this.probePage = new Page(page.getPositionCount(), probeBlocks);
this.probeHashBlock = probeHashChannel.isPresent() ? Optional.of(page.getBlock(probeHashChannel.getAsInt())) : Optional.empty();
}
public int[] getOutputChannels()
{
return probeOutputChannels;
}
public boolean advanceNextPosition()
{
position++;
return position < positionCount;
}
public long getCurrentJoinPosition(LookupSource lookupSource)
{
if (currentRowContainsNull()) {
return -1;
}
if (probeHashBlock.isPresent()) {
long rawHash = BIGINT.getLong(probeHashBlock.get(), position);
return lookupSource.getJoinPosition(position, probePage, page, rawHash);
}
return lookupSource.getJoinPosition(position, probePage, page);
}
public int getPosition()
{
return position;
}
public Page getPage()
{
return page;
}
private boolean currentRowContainsNull()
{
for (Block probeBlock : probeBlocks) {
if (probeBlock.isNull(position)) {
return true;
}
}
return false;
}
}
| {
"content_hash": "91942b6897520931ccf3b81a2f3e951b",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 136,
"avg_line_length": 31.216216216216218,
"alnum_prop": 0.670995670995671,
"repo_name": "youngwookim/presto",
"id": "adc1d7c442f3bbf7904b2c897bf76da533243546",
"size": "3465",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "presto-main/src/main/java/io/prestosql/operator/JoinProbe.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "26917"
},
{
"name": "CSS",
"bytes": "12957"
},
{
"name": "HTML",
"bytes": "28832"
},
{
"name": "Java",
"bytes": "31475945"
},
{
"name": "JavaScript",
"bytes": "211244"
},
{
"name": "Makefile",
"bytes": "6830"
},
{
"name": "PLSQL",
"bytes": "2797"
},
{
"name": "PLpgSQL",
"bytes": "11504"
},
{
"name": "Python",
"bytes": "7664"
},
{
"name": "SQLPL",
"bytes": "926"
},
{
"name": "Shell",
"bytes": "29871"
},
{
"name": "Thrift",
"bytes": "12631"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "5b870aaf604761acb5fb168df00fe6f9",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "29ed7e07260bbe1cc8cbdd7c2f2948417c61bdf9",
"size": "177",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Malpighiales/Hypericaceae/Hypericum/Hypericum cajennense/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
// ==========================================================================
// SeqAn - The Library for Sequence Analysis
// ==========================================================================
// Copyright (c) 2006-2016, Knut Reinert, FU Berlin
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of Knut Reinert or the FU Berlin nor the names of
// its contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL KNUT REINERT OR THE FU BERLIN BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
// OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
// DAMAGE.
//
// ==========================================================================
// Author: Tobias Rausch <[email protected]>
// ==========================================================================
#ifndef SEQAN_CONSENSUS_CONSENSUS_REALIGN_H_
#define SEQAN_CONSENSUS_CONSENSUS_REALIGN_H_
namespace seqan
{
//////////////////////////////////////////////////////////////////////////////////
template <typename TPos, typename TGapAnchor, typename TSpec, typename TGapPos>
inline void
removeGap(AlignedReadStoreElement<TPos, TGapAnchor, TSpec>& alignedRead,
TGapPos const gapPos)
{
typedef String<TGapAnchor> TGaps;
typedef typename Iterator<TGaps, Standard>::Type TGapIter;
if (gapPos < (TGapPos) alignedRead.beginPos) {
--alignedRead.beginPos; --alignedRead.endPos;
} else if (gapPos < (TGapPos) alignedRead.endPos) {
--alignedRead.endPos;
TGapIter gapIt = upperBoundGapAnchor(alignedRead.gaps, gapPos - alignedRead.beginPos, SortGapPos() );
TGapIter gapItEnd = end(alignedRead.gaps, Standard());
// Note: We might create empty gaps here
for(;gapIt != gapItEnd; ++gapIt)
--(gapIt->gapPos);
}
}
//////////////////////////////////////////////////////////////////////////////////
template <typename TAlignedReads, typename TSpec, typename TGapPos>
inline void
removeGap(String<TAlignedReads, TSpec>& alignedReadStore,
TGapPos const gapPos)
{
typedef String<TAlignedReads, TSpec> TAlignedReadStore;
typedef typename Iterator<TAlignedReadStore, Standard>::Type TAlignIter;
TAlignIter alignIt = begin(alignedReadStore, Standard());
TAlignIter alignItEnd = end(alignedReadStore, Standard());
for(;alignIt != alignItEnd; ++alignIt)
removeGap(*alignIt, gapPos);
}
//////////////////////////////////////////////////////////////////////////////////
template <typename TPos, typename TGapAnchor, typename TSpec, typename TGapPos>
inline int
insertGap(AlignedReadStoreElement<TPos, TGapAnchor, TSpec>& alignedRead,
TGapPos const gapPos)
{
typedef String<TGapAnchor> TGaps;
typedef typename Iterator<TGaps, Standard>::Type TGapIter;
if (gapPos <= (TGapPos)alignedRead.beginPos) {
++alignedRead.beginPos; ++alignedRead.endPos;
return 0;
} else if (gapPos < (TGapPos)alignedRead.endPos) {
++alignedRead.endPos;
TGapIter gapIt = lowerBoundGapAnchor(alignedRead.gaps, gapPos - alignedRead.beginPos, SortGapPos() );
TGapIter gapItEnd = end(alignedRead.gaps, Standard());
TGapPos insertPos = (gapPos - alignedRead.beginPos);
if (gapIt == gapItEnd) {
int gapLen = 0;
if (gapItEnd != begin(alignedRead.gaps)) {
--gapItEnd;
gapLen = (int) gapItEnd->gapPos - (int) gapItEnd->seqPos;
}
appendValue(alignedRead.gaps, TGapAnchor(insertPos - gapLen, insertPos + 1), Generous());
}
else {
int gapPrev = 0;
if (gapIt != begin(alignedRead.gaps)) {
TGapIter gapPrevious = gapIt;
--gapPrevious;
gapPrev = (int) gapPrevious->gapPos - (int) gapPrevious->seqPos;
}
// If gap is within an existing gap, extend this gap
if (((TGapPos)(gapIt->gapPos - (((int) gapIt->gapPos - (int) gapIt->seqPos) - gapPrev)) <= insertPos) &&
((TGapPos)gapIt->gapPos >= insertPos)) {
for(;gapIt != gapItEnd; ++gapIt)
++(gapIt->gapPos);
} else {
// Otherwise, create a new gap
TGapAnchor tmp = value(gapIt);
++tmp.gapPos;
gapIt->gapPos = insertPos + 1;
gapIt->seqPos = insertPos - gapPrev;
do {
++gapIt;
TGapAnchor newTmp;
if (gapIt != gapItEnd) {
newTmp = *gapIt;
++newTmp.gapPos;
*gapIt = tmp;
} else appendValue(alignedRead.gaps, tmp, Generous() );
tmp = newTmp;
} while (gapIt != gapItEnd);
}
}
return 1;
}
return 0;
}
//////////////////////////////////////////////////////////////////////////////////
template <typename TAlignedReads, typename TSpec, typename TGapPos>
inline int
insertGap(String<TAlignedReads, TSpec>& alignedReadStore,
TGapPos const gapPos)
{
typedef String<TAlignedReads, TSpec> TAlignedReadStore;
typedef typename Iterator<TAlignedReadStore, Standard>::Type TAlignIter;
int numGaps = 0;
TAlignIter alignIt = begin(alignedReadStore, Standard());
TAlignIter alignItEnd = end(alignedReadStore, Standard());
for(;alignIt != alignItEnd; ++alignIt)
numGaps += insertGap(*alignIt, gapPos);
return numGaps;
}
//////////////////////////////////////////////////////////////////////////////////
template<typename TConsensus>
inline int
scoreConsensus(TConsensus& consensus)
{
typedef typename Size<TConsensus>::Type TSize;
typedef typename Iterator<TConsensus, Standard>::Type TConsIter;
// Compute the score
int score = 0;
TConsIter itCons = begin(consensus, Standard() );
TConsIter itConsEnd = end(consensus, Standard() );
TSize maxCount = 0;
TSize sumCount = 0;
TSize tmp;
for(;itCons != itConsEnd; ++itCons) {
maxCount = 0; sumCount = 0;
for(TSize i = 0; i < ValueSize<typename Value<TConsensus>::Type>::VALUE; ++i) {
if ((tmp = (*itCons).count[i]) > maxCount) maxCount = tmp;
sumCount += tmp;
}
score += (sumCount - maxCount);
}
return score;
}
//////////////////////////////////////////////////////////////////////////////////
// Perform one realignment round.
// TODO(holtgrew): Rename to reflect this more clearly.
// TODO(holtgrew): TConsensus/consensus are profiles, really.
template<typename TFragSpec, typename TConfig, typename TAlignedRead, typename TSpec, typename TConsensus, typename TScore, typename TMethod, typename TBandwidth>
void
reAlign(FragmentStore<TFragSpec, TConfig>& fragStore,
String<TAlignedRead, TSpec>& contigReads,
TConsensus& consensus,
TScore& consScore,
TMethod const rmethod,
TBandwidth const bandwidth,
bool includeReference,
double & timeBeforeAlign,
double & timeAlign,
double & timeAfterAlign
)
{
typedef FragmentStore<TSpec, TConfig> TFragmentStore;
typedef String<TAlignedRead, TSpec> TAlignedReadStore;
typedef typename TFragmentStore::TReadPos TReadPos;
typedef typename TFragmentStore::TReadSeq TReadSeq;
typedef typename Size<TFragmentStore>::Type TSize;
typedef typename TFragmentStore::TReadGapAnchor TGapAnchor;
typedef typename Iterator<TAlignedReadStore, Standard>::Type TAlignedReadIter;
typedef typename Iterator<TConsensus, Standard>::Type TConsIter;
// Initialization
typedef typename Value<TConsensus>::Type TProfileChar;
TSize gapPos = ValueSize<TProfileChar>::VALUE - 1;
// int refId = length(fragStore.readSeqStore) - 1;
// Remove each fragment and realign it to the profile.
// TAlignedReadIter beg = begin(contigReads, Standard());
TAlignedReadIter alignIt = begin(contigReads, Standard());
TAlignedReadIter alignItEnd = end(contigReads, Standard());
if (includeReference)
--alignItEnd;
TConsensus bandConsensus;
TConsensus myRead;
TConsensus newConsensus;
int i = 0;
for (; alignIt != alignItEnd; ++alignIt) {
double tBegin = sysTime();
if (i++ > 1000 || i == 1) {
//printf("realigning %u/%u\r", unsigned(alignIt-beg), unsigned(alignItEnd-beg));
if (i != 1)
i = 0;
fflush(stdout);
}
//// Debug code
//for(TSize i = 0; i<length(consensus); ++i) {
// std::cout << consensus[i] << std::endl;
//}
//TAlignedReadIter debugIt = begin(contigReads, Standard() );
//TAlignedReadIter debugItEnd = end(contigReads, Standard() );
//for(;debugIt != debugItEnd; goNext(debugIt)) {
// std::cout << debugIt->beginPos << ',' << debugIt->endPos << ':';
// typedef typename Iterator<String<TGapAnchor> , Standard>::Type TGapIter;
// TGapIter gapIt = begin(debugIt->gaps, Standard());
// TGapIter gapItEnd = end(debugIt->gaps, Standard());
// for(;gapIt != gapItEnd; goNext(gapIt)) {
// std::cout << '(' << gapIt->seqPos << ',' << gapIt->gapPos << ')' << ',';
// }
// std::cout << std::endl;
//}
TSize itConsPos = 0;
TConsIter itCons = begin(consensus, Standard());
TConsIter itConsEnd = end(consensus, Standard());
// Initialize the consensus of the band.
clear(myRead);
resize(myRead, length(fragStore.readSeqStore[alignIt->readId]), TProfileChar());
resize(bandConsensus, 2 * bandwidth + (alignIt->endPos - alignIt->beginPos), Generous());
TConsIter bandConsIt = begin(bandConsensus);
TConsIter bandConsItEnd = end(bandConsensus);
TConsIter myReadIt = begin(myRead);
TReadPos bandOffset = 0;
if (bandwidth < (TBandwidth) alignIt->beginPos) {
bandOffset = alignIt->beginPos - bandwidth;
itCons += bandOffset; itConsPos += bandOffset;
SEQAN_ASSERT_LEQ(itCons, itConsEnd);
}
int leftDiag = (alignIt->beginPos - bandOffset) - bandwidth;
int rightDiag = leftDiag + 2 * bandwidth;
//int increaseBand = 0;
int increaseBandLeft = 0;
int increaseBandRight = 0;
int removedBeginPos = 0;
int removedEndPos = 0;
for (TReadPos iPos = bandOffset; iPos < alignIt->beginPos && itCons != itConsEnd && bandConsIt != bandConsItEnd; ++itCons, ++bandConsIt, ++itConsPos, ++iPos)
*bandConsIt = *itCons; // fill in positions left of readbegin
TSize itConsPosBegin = itConsPos; // start position of read basically, right? if(itConsPosBegin != alignIt->beginPos) std::cout <<"nicht unbedingt gleich\n";
alignIt->beginPos = alignIt->endPos = 0; // So this read is discarded in all gap operations
// Remove sequence from profile (and add to the consensus??) // TODO(holtgrew): Add to consensus part right?
typedef typename Iterator<TReadSeq, Standard>::Type TReadIter;
TReadIter itRead = begin(fragStore.readSeqStore[alignIt->readId], Standard());
TReadIter itReadEnd = end(fragStore.readSeqStore[alignIt->readId], Standard());
typedef typename Iterator<String<TGapAnchor>, Standard>::Type TReadGapsIter;
TReadGapsIter itGaps = begin(alignIt->gaps, Standard());
TReadGapsIter itGapsEnd = end(alignIt->gaps, Standard());
TReadPos old = 0;
int diff = 0;
TReadPos clippedBeginPos = 0;
TReadPos clippedEndPos = 0;
SEQAN_ASSERT_LT(itRead, itReadEnd);
if ((itGaps != itGapsEnd) && (itGaps->gapPos == 0)) {
old = itGaps->seqPos;
clippedBeginPos = old; // gaps at beginning? or really clipped?
//std::cout << "clippedBeginPos = " << clippedBeginPos << std::endl;
itRead += old;
diff -= old;
++itGaps;
SEQAN_ASSERT_LT(itRead, itReadEnd);
}
for (; itGaps != itGapsEnd && itCons != itConsEnd; ++itGaps) {
// limit should never be larger than read length
TReadPos limit = itGaps->seqPos;
SEQAN_ASSERT_LT(itGaps->seqPos, (TReadPos)length(fragStore.readSeqStore[alignIt->readId]));
int newDiff = (itGaps->gapPos - limit);
SEQAN_ASSERT_LT(itGaps->gapPos, (TReadPos)length(consensus));
if (diff > newDiff) {
clippedEndPos = diff - newDiff;
limit -= clippedEndPos;
}
for (; old < limit && itCons != itConsEnd && itRead != itReadEnd && bandConsIt != bandConsItEnd; ++old, ++itRead) {
//SEQAN_ASSERT_LT(itCons, itConsEnd);
--(*itCons).count[ordValue(*itRead)];
if (!empty(*itCons)) {
*bandConsIt = *itCons;
++bandConsIt;
++itConsPos;
removedEndPos = 0;
} else {
if (itConsPosBegin != itConsPos) {
++increaseBandLeft; // insertion --> increaseBandLeft, read has character here, consensus doesnt
++removedEndPos;
} else ++removedBeginPos; // begin gaps
removeGap(contigReads, itConsPos);
}
(*myReadIt).count[0] = ordValue(*itRead);
++myReadIt;
++itCons;
//SEQAN_ASSERT_LT(itRead, itReadEnd);
}
for (; diff < newDiff && itCons != itConsEnd && bandConsIt != bandConsItEnd; ++diff) {
++increaseBandRight; // deletion --> increaseBandRight, read has gaps here, consensus doesnt
//SEQAN_ASSERT_LT(itCons, itConsEnd);
--(*itCons).count[gapPos];
if (!empty(*itCons)) {
*bandConsIt = *itCons;
++bandConsIt;
++itConsPos;
} else removeGap(contigReads, itConsPos); //++increaseBandRight;}
++itCons;
}
}
if (!clippedEndPos) {
for (; itRead!=itReadEnd && itCons != itConsEnd && bandConsIt != bandConsItEnd; ++itRead) {
//SEQAN_ASSERT_LT(itCons, itConsEnd);
//SEQAN_ASSERT_LT(itRead, itReadEnd);
--(*itCons).count[ordValue(*itRead)]; //subtract the read base to get bandConsensus wo myRead
if (!empty(*itCons)) {
*bandConsIt = *itCons;
++bandConsIt;
++itConsPos;
removedEndPos = 0;
} else { // only gaps left in this column after removing myRead
if (itConsPosBegin != itConsPos) {
++increaseBandLeft; // insertion --> increaseBandLeft, read is longer than consensus here
++removedEndPos;
} else ++removedBeginPos;
removeGap(contigReads, itConsPos);
}
(*myReadIt).count[0] = ordValue(*itRead);
++myReadIt;
++itCons;
}
}
bool singleton = (itConsPosBegin == itConsPos);
increaseBandLeft -= removedEndPos;
//increaseBand = increaseBandLeft + increaseBandRight;
// Go further up to the bandwidth
for (TReadPos iPos = 0; ((itCons != itConsEnd) && (iPos < (TReadPos) bandwidth)) && bandConsIt != bandConsItEnd; ++itCons, ++iPos, ++bandConsIt)
*bandConsIt = *itCons;
resize(bandConsensus, bandConsIt - begin(bandConsensus, Standard()), Generous());
resize(myRead, myReadIt - begin(myRead, Standard()), Generous());
// Realign the consensus with the sequence.
typedef StringSet<TConsensus, Dependent<> > TStringSet;
TStringSet pairSet;
appendValue(pairSet, bandConsensus);
appendValue(pairSet, myRead);
//for(TSize i = 0; i<length( pairSet[0]); ++i) {
// std::cout << pairSet[0][i] << std::endl;
//}
//std::cout << "_______________" << std::endl;
//for(TSize i = 0; i<length( pairSet[1]); ++i) {
// std::cout << pairSet[1][i] << std::endl;
//}
//std::cout << "..............." << std::endl;
typedef String<Fragment<> > TFragmentString;
TFragmentString matches;
assignProfile(consScore, bandConsensus);
double tBegAlign = sysTime();
leftDiag -= removedBeginPos;
rightDiag -= removedBeginPos;
//if(alignIt->readId == refId)
//{
// std::cout << "length(Cons)=" << (int) length(pairSet[0]) << std::endl;
// std::cout << "length(Ref)=" << (int) length(pairSet[1]) << std::endl;
// std::cout << "-->leftDiag" << _max(leftDiag - increaseBandLeft, -1 * (int) length(pairSet[1])) << std::endl;
// std::cout << "-->rightDiag" << _min(rightDiag + increaseBandRight, (int) length(pairSet[0])) << std::endl;
//}
if (!singleton) {
if (rmethod == 0)
globalAlignment(matches, pairSet, consScore, AlignConfig<true,false,false,true>(), _max(leftDiag - increaseBandLeft, -1 * (int) length(pairSet[1])), _min(rightDiag + increaseBandRight, (int) length(pairSet[0])), NeedlemanWunsch());
else if (rmethod == 1)
globalAlignment(matches, pairSet, consScore, AlignConfig<true,false,false,true>(), _max(leftDiag - increaseBandLeft, -1 * (int) length(pairSet[1])), _min(rightDiag + increaseBandRight, (int) length(pairSet[0])), Gotoh());
}
double tEndAlign = sysTime();
//// Debug code
//Graph<Alignment<TStringSet, void, WithoutEdgeId> > g1(pairSet);
//int sc1 = globalAlignment(g1, consScore, AlignConfig<true,false,false,true>(), _max(leftDiag - increaseBand, -1 * (int) length(pairSet[1])), _min(rightDiag + increaseBand, (int) length(pairSet[0])), Gotoh());
//std::cout << sc1 << std::endl;
//std::cout << g1 << std::endl;
// Add the read back to the consensus and build the new consensus.
resize(newConsensus, length(bandConsensus) + length(myRead), Generous());
TConsIter newConsIt = begin(newConsensus, Standard());
TConsIter bandIt = begin(bandConsensus, Standard());
TConsIter bandItEnd = end(bandConsensus, Standard());
typedef typename Iterator<TFragmentString, Standard>::Type TFragIter;
TFragIter fragIt = end(matches, Standard());
TFragIter fragItEnd = begin(matches, Standard());
TReadPos consPos = 0;
TReadPos readPos = 0;
TReadPos alignPos = 0;
clear(alignIt->gaps);
diff = 0;
if (clippedBeginPos) {
appendValue(alignIt->gaps, TGapAnchor(clippedBeginPos, 0), Generous() );
diff -= clippedBeginPos;
}
bool firstMatch = true;
if (fragIt != fragItEnd) { // walk through segment matches that represent read-msa alignment
do {
--fragIt;
int gapLen = fragIt->begin1 - consPos;
if (firstMatch) gapLen = 0; // gap between two adjacent segment matches
// equivalent to profilePos + fraglen < nextProfilePos
while (consPos < (TReadPos)fragIt->begin1) { // cons stretch before newCons start
SEQAN_ASSERT_LT(bandIt, bandItEnd);
SEQAN_ASSERT_LT(newConsIt, end(newConsensus,Standard()));
if (!firstMatch) ++(*bandIt).count[gapPos]; // fill with gaps if we are between two segment matches
*newConsIt = *bandIt;
++newConsIt;
++bandIt;
++consPos;
++alignPos;
}
// equivalent to refPos + fraglen < nextRefPos
while (readPos < (TReadPos)fragIt->begin2) { // read stretch before matching fragment starts
SEQAN_ASSERT_LT(readPos, (TReadPos)length(fragStore.readSeqStore[alignIt->readId]));
SEQAN_ASSERT_LT(newConsIt, end(newConsensus,Standard()));
// equivalent to profileDel
if (gapLen) {
diff += gapLen; // add gap of length gaplen to readGaps
appendValue(alignIt->gaps, TGapAnchor(clippedBeginPos + readPos, clippedBeginPos + readPos + diff), Generous() );
gapLen = 0; // do this only once
}
int numGaps = insertGap(contigReads, bandOffset + alignPos);
TProfileChar tmpChar;
++tmpChar.count[myRead[readPos].count[0]]; // insert new column in profile
tmpChar.count[gapPos] += numGaps;
*newConsIt = tmpChar; ++newConsIt;
++readPos; ++alignPos;
}
for (TSize i = 0; i<fragIt->len; ++i, ++bandIt, ++consPos, ++readPos, ++alignPos, ++newConsIt) {
SEQAN_ASSERT_LT(bandIt, bandItEnd);
SEQAN_ASSERT_LT(readPos, (TReadPos)length(fragStore.readSeqStore[alignIt->readId]));
SEQAN_ASSERT_LT(newConsIt, end(newConsensus,Standard()));
if (firstMatch) {
firstMatch = false;
alignIt->beginPos = bandOffset + consPos;
} else if (gapLen) {
diff += gapLen;
appendValue(alignIt->gaps, TGapAnchor(clippedBeginPos + readPos, clippedBeginPos + readPos + diff), Generous() );
gapLen = 0;
}
SEQAN_ASSERT_LT(bandIt, bandItEnd);
++(*bandIt).count[myRead[readPos].count[0]];
*newConsIt = *bandIt;
}
} while (fragIt != fragItEnd);
}
for (; readPos < (TReadPos)length(myRead); ++readPos) {
int numGaps = insertGap(contigReads, bandOffset + alignPos);
TProfileChar tmpChar;
++tmpChar.count[myRead[readPos].count[0]];
tmpChar.count[gapPos] += numGaps;
SEQAN_ASSERT_LT(newConsIt, end(newConsensus,Standard()));
*newConsIt = tmpChar; ++newConsIt;
++alignPos;
}
if (singleton) alignIt->beginPos = bandOffset;
alignIt->endPos = alignIt->beginPos + clippedBeginPos + readPos + diff;
if (clippedEndPos) {
diff -= clippedEndPos;
appendValue(alignIt->gaps, TGapAnchor(clippedBeginPos + readPos + clippedEndPos, clippedBeginPos + readPos + clippedEndPos + diff), Generous() );
}
for (; bandIt != bandItEnd; ++bandIt, ++newConsIt)
{
SEQAN_ASSERT_LT(newConsIt, end(newConsensus,Standard()));
*newConsIt = *bandIt;
}
resize(newConsensus, newConsIt - begin(newConsensus, Standard()), Generous());
replace(consensus, bandOffset, itCons - begin(consensus), newConsensus);
double tEnd = sysTime();
timeBeforeAlign += tBegAlign - tBegin;
timeAlign += tEndAlign - tBegAlign;
timeAfterAlign += tEnd -tEndAlign;
}
}
//////////////////////////////////////////////////////////////////////////////////
/*!
* @fn reAlign
* @headerfile <seqan/consensus.h>
* @brief Perform realignment using the Anson-Myers realignment.
*
* @deprecated Do not use this function but use the new function @link reAlignment @endlink instead.
*
* @signature void reAlign(fragStore, consensusScore, contigID, [realignmentMethod,] bandwidth, includeReference);
*
* @param[in,out] fragStore The @link FragmentStore @endlink with the alignment to realign.
* @param[in] consensusScore The @link Score @endlink to use for scoring alignments.
* @param[in] contigID The integer id of the contig to realign.
* @param[in] bandwidth The integer bandwidth to use for realignment.
* @param[in] includeReference A <tt>bool</tt> flag that indicates whether to include the reference as a pseudo-read.
*
* If <tt>includeReference</tt> then the reference of the given contig will be used as a pseudo-read. In this case, the
* reference will be replaced by the consensus. When included as a pseudo-read, the alignment of the consensus relative
* to the original refernence can be used to call variants.
*/
// TODO(holtgrew): realignmentMethod should not be optional or moved to the end of the list.
// TODO(holtgrew): The method should be selected with an enum instead of an int.
template<typename TSpec, typename TConfig, typename TScore, typename TId, typename TMethod, typename TBandwidth>
void
reAlign(FragmentStore<TSpec, TConfig> & fragStore,
TScore & consScore,
TId const contigId,
TMethod const rmethod,
TBandwidth const bandwidth,
bool includeReference)
{
typedef FragmentStore<TSpec, TConfig> TFragmentStore;
typedef typename Size<TFragmentStore>::Type TSize;
typedef typename TFragmentStore::TAlignedReadStore TAlignedReadStore;
typedef typename TFragmentStore::TReadPos TReadPos;
typedef typename TFragmentStore::TContigStore TContigStore;
typedef typename Value<TContigStore>::Type TContig;
typedef typename TFragmentStore::TContigPos TContigPos;
typedef typename TFragmentStore::TContigSeq TContigSeq;
typedef Gaps<TContigSeq, AnchorGaps<typename TContig::TGapAnchors> > TContigGaps;
typedef typename TFragmentStore::TReadSeq TReadSeq;
typedef typename TFragmentStore::TReadGapAnchor TGapAnchor;
typedef typename Value<typename TFragmentStore::TReadSeq>::Type TStoreAlphabet;
typedef typename BaseAlphabet<TStoreAlphabet>::Type TAlphabet;
typedef typename Value<TAlignedReadStore>::Type TAlignedElement;
//typedef typename Value<typename TFragmentStore::TReadStore>::Type TReadElement;
// double beginTime, endTime;
// beginTime = sysTime();
// TODO(holtgrew): Unnecessary, only required once.
// Sort according to contigId
sortAlignedReads(fragStore.alignedReadStore, SortContigId());
typedef typename Iterator<TAlignedReadStore, Standard>::Type TAlignIter;
TAlignIter alignIt = lowerBoundAlignedReads(fragStore.alignedReadStore, contigId, SortContigId());
TAlignIter alignItEnd = upperBoundAlignedReads(fragStore.alignedReadStore, contigId, SortContigId());
// Sort the reads according to their begin position.
sortAlignedReads(infix(fragStore.alignedReadStore, alignIt - begin(fragStore.alignedReadStore, Standard()), alignItEnd - begin(fragStore.alignedReadStore, Standard())), SortBeginPos());
alignIt = lowerBoundAlignedReads(fragStore.alignedReadStore, contigId, SortContigId());
alignItEnd = upperBoundAlignedReads(fragStore.alignedReadStore, contigId, SortContigId());
// endTime = sysTime();
// std::cerr << "TIME sorting " << endTime - beginTime << std::endl;
// beginTime = sysTime();
// Copy all reads belonging to this contig and reverse complement them if necessary.
TAlignedReadStore contigReads; // TODO(holtgrew): Rather contigAlignedReads?
TReadPos maxPos = 0;
TReadPos minPos = MaxValue<TReadPos>::VALUE;
for (; alignIt != alignItEnd; ++alignIt) {
if (alignIt->beginPos > alignIt->endPos) {
reverseComplement(fragStore.readSeqStore[alignIt->readId]);
TAlignedElement alignedEl = *alignIt;
TReadPos tmp = alignedEl.beginPos;
alignedEl.beginPos = alignedEl.endPos;
alignedEl.endPos = tmp;
if (alignedEl.beginPos < minPos)
minPos = alignedEl.beginPos;
if (alignedEl.endPos > maxPos)
maxPos = alignedEl.endPos;
appendValue(contigReads, alignedEl, Generous() );
} else {
if (alignIt->beginPos < minPos)
minPos = alignIt->beginPos;
if (alignIt->endPos > maxPos)
maxPos = alignIt->endPos;
appendValue(contigReads, value(alignIt), Generous() );
}
}
// Append reference sequence to aligned reads for contigs if requested to do so.
if (includeReference) {
TId dummyReadId = length(fragStore.readSeqStore);
TId dummyMatchId = length(fragStore.alignedReadStore);
appendRead(fragStore, fragStore.contigStore[contigId].seq);
appendValue(fragStore.readNameStore, fragStore.contigNameStore[contigId], Generous());
fragStore.contigNameStore[contigId] += "Consensus_";
TAlignedElement el;
el.id = dummyMatchId;
el.readId = dummyReadId;
el.contigId = contigId;
minPos = el.beginPos = 0;
TContigGaps contigGaps(fragStore.contigStore[contigId].seq, fragStore.contigStore[contigId].gaps);
maxPos = el.endPos = _max(maxPos,(TReadPos)positionSeqToGap(contigGaps,length(fragStore.contigStore[contigId].seq)-1)+1);
maxPos = el.endPos = _max(maxPos,(TReadPos)length(fragStore.contigStore[contigId].seq));
el.gaps = fragStore.contigStore[contigId].gaps;
appendValue(contigReads, el, Generous());
}
// endTime = sysTime();
// std::cerr << "TIME copying " << endTime - beginTime << std::endl;
// beginTime = sysTime();
// Create the consensus sequence
TSize gapPos = ValueSize<TAlphabet>::VALUE;
typedef ProfileChar<TAlphabet> TProfile;
typedef String<TProfile> TProfileString;
typedef typename Iterator<TProfileString, Standard>::Type TConsIter;
TProfileString consensus;
resize(consensus, maxPos - minPos, TProfile());
TConsIter itCons = begin(consensus, Standard() );
TConsIter itConsEnd = end(consensus, Standard());
TAlignIter contigReadsIt = begin(contigReads, Standard() );
TAlignIter contigReadsItEnd = end(contigReads, Standard() );
for(;contigReadsIt != contigReadsItEnd; ++contigReadsIt) {
contigReadsIt->beginPos -= minPos;
contigReadsIt->endPos -= minPos;
itCons = begin(consensus, Standard() );
itCons += contigReadsIt->beginPos;
typedef typename Iterator<TReadSeq, Standard>::Type TReadIter;
TReadIter itRead = begin(fragStore.readSeqStore[contigReadsIt->readId], Standard() );
TReadIter itReadEnd = end(fragStore.readSeqStore[contigReadsIt->readId], Standard() );
typedef typename Iterator<String<typename TFragmentStore::TReadGapAnchor>, Standard>::Type TReadGapsIter;
TReadGapsIter itGaps = begin(contigReadsIt->gaps, Standard() );
TReadGapsIter itGapsEnd = end(contigReadsIt->gaps, Standard() );
TReadPos old = 0;
int diff = 0;
bool clippedEnd = false;
if ((itGaps != itGapsEnd) && (itGaps->gapPos == 0)) {
old = itGaps->seqPos;
itRead += old;
diff -= old;
++itGaps;
}
for(;itGaps != itGapsEnd; ++itGaps) {
TReadPos limit = itGaps->seqPos;
int newDiff = (itGaps->gapPos - limit);
SEQAN_ASSERT_LT(itGaps->gapPos, (int)length(consensus));
if (diff > newDiff) {
limit -= (diff - newDiff);
clippedEnd = true;
}
for(;old < limit && itRead != itReadEnd && itCons != itConsEnd; ++old, ++itRead)
{
SEQAN_ASSERT_LT(itRead, itReadEnd);
++(value(itCons++)).count[ordValue(*itRead)];
}
for(;diff < newDiff; ++diff)
++(value(itCons++)).count[gapPos];
}
if (!clippedEnd) {
for( ; itRead!=itReadEnd && itCons != itConsEnd;++itRead)
++(value(itCons++)).count[ordValue(*itRead)];
}
}
// endTime = sysTime();
// std::cerr << "TIME consensus " << endTime - beginTime << std::endl;
// beginTime = sysTime();
double tBefore = 0, tAlign = 0, tAfter = 0;
reAlign(fragStore, contigReads, consensus, consScore, rmethod, bandwidth, includeReference, tBefore, tAlign, tAfter);
// fprintf(stderr, "TIME before align: %f s\nTIME align: %f s\nTIME after align: %f s\n", tBefore, tAlign, tAfter);
// endTime = sysTime();
// std::cerr << "TIME realign " << endTime - beginTime << std::endl;
int score = scoreConsensus(consensus);
int oldScore = score + 1;
while(score < oldScore) {
//std::cout << "Score: " << score << std::endl;
oldScore = score;
// double beginTime = sysTime();
double tBefore = 0, tAlign = 0, tAfter = 0;
reAlign(fragStore, contigReads, consensus, consScore, rmethod, bandwidth, includeReference, tBefore, tAlign, tAfter);
// fprintf(stderr, "TIME before align: %f s\nTIME align: %f s\nTIME after align: %f s\n", tBefore, tAlign, tAfter);
// double endTime = sysTime();
// std::cerr << "TIME realign " << endTime - beginTime << std::endl;
score = scoreConsensus(consensus);
}
//std::cout << "FinalScore: " << score << std::endl;
// beginTime = sysTime();
// Update all the aligned reads and the new consensus
alignIt = lowerBoundAlignedReads(fragStore.alignedReadStore, contigId, SortContigId());
TAlignIter contigReadIt = begin(contigReads, Standard());
for (; alignIt != alignItEnd; ++alignIt) {
if (alignIt->beginPos > alignIt->endPos) {
reverseComplement(fragStore.readSeqStore[alignIt->readId]);
alignIt->beginPos = contigReadIt->endPos;
alignIt->endPos = contigReadIt->beginPos;
} else {
alignIt->beginPos = contigReadIt->beginPos;
alignIt->endPos = contigReadIt->endPos;
}
// Remove empty gap anchors
clear(alignIt->gaps);
typedef typename Iterator<TGapAnchor, Standard>::Type TGapIter;
TGapIter gapIt = begin(contigReadIt->gaps, Standard());
TGapIter gapItEnd = end(contigReadIt->gaps, Standard());
int diff = 0;
for(;gapIt != gapItEnd; ++gapIt) {
if ((int) gapIt->gapPos - (int) gapIt->seqPos != diff) {
diff = (int) gapIt->gapPos - (int) gapIt->seqPos;
appendValue(alignIt->gaps, *gapIt, Generous() );
}
}
++contigReadIt;
}
typedef typename Value<typename TFragmentStore::TContigStore>::Type TContigElement;
TContigElement& contigEl = fragStore.contigStore[contigId];
typedef typename Iterator<TProfileString, Standard>::Type TConsIter;
TConsIter itConsensus = begin(consensus, Standard());
TConsIter itConsensusEnd = end(consensus, Standard());
char gapChar = gapValue<char>();
TSize gapLen = 0;
TContigPos contigPos = 0;
int diff = 0;
clear(contigEl.seq);
clear(contigEl.gaps);
for (; itConsensus != itConsensusEnd; ++itConsensus, ++contigPos) {
if ((char) *itConsensus == gapChar) ++gapLen;
else {
if (gapLen) {
diff += (int) gapLen;
appendValue(contigEl.gaps, TGapAnchor(contigPos - diff, contigPos), Generous() );
gapLen = 0;
}
// TODO(weese): Here we convert from ProfileChar<Dna5>->Dna5->Dna5Q
// instead diverting through Dna5 we could think of directly converting
// a profile to a quality value, e.g. like the base caller Phred does.
// Therefore the conversion ProfileChar<Dna5> <-> Dna5Q needs to be
// defined.
appendValue(contigEl.seq, (TAlphabet)value(itConsensus), Generous() );
}
}
if (includeReference)
appendValue(fragStore.alignedReadStore, contigReads[length(contigReads) - 1], Generous() );
// endTime = sysTime();
// std::cerr << "TIME finalizing " << endTime - beginTime << std::endl;
}
//////////////////////////////////////////////////////////////////////////////////
// Forwards to the overload that accepts the alignment method.
template<typename TSpec, typename TConfig, typename TScore, typename TId, typename TBandwidth>
inline void
reAlign(FragmentStore<TSpec, TConfig>& fragStore,
TScore& consScore,
TId const contigId,
TBandwidth const bandwidth,
bool includeReference)
{
reAlign(fragStore, consScore, contigId, 0, bandwidth, includeReference);
}
} // namespace seqan
#endif // #ifndef SEQAN_CONSENSUS_CONSENSUS_REALIGN_H_
| {
"content_hash": "5e2b0e6590d5a5f7c7e55776292b4ccc",
"timestamp": "",
"source": "github",
"line_count": 794,
"max_line_length": 247,
"avg_line_length": 47.65491183879093,
"alnum_prop": 0.596384586923199,
"repo_name": "bestrauc/seqan",
"id": "c970e74a23879398d16ec47dc06d776075f087d7",
"size": "37838",
"binary": false,
"copies": "15",
"ref": "refs/heads/develop",
"path": "include/seqan/consensus/consensus_realign.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Awk",
"bytes": "6549"
},
{
"name": "Batchfile",
"bytes": "14098"
},
{
"name": "C",
"bytes": "193554"
},
{
"name": "C++",
"bytes": "20764634"
},
{
"name": "CMake",
"bytes": "403010"
},
{
"name": "CSS",
"bytes": "556403"
},
{
"name": "GLSL",
"bytes": "1140"
},
{
"name": "HTML",
"bytes": "904823"
},
{
"name": "JavaScript",
"bytes": "748090"
},
{
"name": "Makefile",
"bytes": "7228"
},
{
"name": "Objective-C",
"bytes": "331370"
},
{
"name": "PHP",
"bytes": "1426"
},
{
"name": "POV-Ray SDL",
"bytes": "7521354"
},
{
"name": "Python",
"bytes": "1114560"
},
{
"name": "R",
"bytes": "12251"
},
{
"name": "Roff",
"bytes": "149667"
},
{
"name": "Shell",
"bytes": "77046"
},
{
"name": "Tcl",
"bytes": "3861"
},
{
"name": "TeX",
"bytes": "7974"
}
],
"symlink_target": ""
} |
package etcd
import (
"testing"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/rest/resttest"
"k8s.io/kubernetes/pkg/api/testapi"
"k8s.io/kubernetes/pkg/registry/registrytest"
"k8s.io/kubernetes/pkg/runtime"
"k8s.io/kubernetes/pkg/tools"
"k8s.io/kubernetes/pkg/tools/etcdtest"
"k8s.io/kubernetes/pkg/util"
"github.com/coreos/go-etcd/etcd"
)
func newStorage(t *testing.T) (*REST, *tools.FakeEtcdClient) {
etcdStorage, fakeClient := registrytest.NewEtcdStorage(t)
return NewREST(etcdStorage, false), fakeClient
}
func validNewEndpoints() *api.Endpoints {
return &api.Endpoints{
ObjectMeta: api.ObjectMeta{
Name: "foo",
Namespace: api.NamespaceDefault,
},
Subsets: []api.EndpointSubset{{
Addresses: []api.EndpointAddress{{IP: "1.2.3.4"}},
Ports: []api.EndpointPort{{Port: 80, Protocol: "TCP"}},
}},
}
}
func validChangedEndpoints() *api.Endpoints {
endpoints := validNewEndpoints()
endpoints.ResourceVersion = "1"
endpoints.Subsets = []api.EndpointSubset{{
Addresses: []api.EndpointAddress{{IP: "1.2.3.4"}, {IP: "5.6.7.8"}},
Ports: []api.EndpointPort{{Port: 80, Protocol: "TCP"}},
}}
return endpoints
}
func TestCreate(t *testing.T) {
storage, fakeClient := newStorage(t)
test := resttest.New(t, storage, fakeClient.SetError)
endpoints := validNewEndpoints()
endpoints.ObjectMeta = api.ObjectMeta{}
test.TestCreate(
// valid
endpoints,
func(ctx api.Context, obj runtime.Object) error {
return registrytest.SetObject(fakeClient, storage.KeyFunc, ctx, obj)
},
func(ctx api.Context, obj runtime.Object) (runtime.Object, error) {
return registrytest.GetObject(fakeClient, storage.KeyFunc, storage.NewFunc, ctx, obj)
},
// invalid
&api.Endpoints{
ObjectMeta: api.ObjectMeta{Name: "_-a123-a_"},
},
)
}
func TestDelete(t *testing.T) {
ctx := api.NewDefaultContext()
storage, fakeClient := newStorage(t)
test := resttest.New(t, storage, fakeClient.SetError)
endpoints := validChangedEndpoints()
key, _ := storage.KeyFunc(ctx, endpoints.Name)
key = etcdtest.AddPrefix(key)
createFn := func() runtime.Object {
fakeClient.Data[key] = tools.EtcdResponseWithError{
R: &etcd.Response{
Node: &etcd.Node{
Value: runtime.EncodeOrDie(testapi.Codec(), endpoints),
ModifiedIndex: 1,
},
},
}
return endpoints
}
gracefulSetFn := func() bool {
if fakeClient.Data[key].R.Node == nil {
return false
}
return fakeClient.Data[key].R.Node.TTL == 30
}
test.TestDelete(createFn, gracefulSetFn)
}
func TestEtcdGetEndpoints(t *testing.T) {
storage, fakeClient := newStorage(t)
test := resttest.New(t, storage, fakeClient.SetError)
endpoints := validNewEndpoints()
test.TestGet(endpoints)
}
func TestEtcdListEndpoints(t *testing.T) {
storage, fakeClient := newStorage(t)
test := resttest.New(t, storage, fakeClient.SetError)
endpoints := validNewEndpoints()
key := etcdtest.AddPrefix(storage.KeyRootFunc(test.TestContext()))
test.TestList(
endpoints,
func(objects []runtime.Object) []runtime.Object {
return registrytest.SetObjectsForKey(fakeClient, key, objects)
},
func(resourceVersion uint64) {
registrytest.SetResourceVersion(fakeClient, resourceVersion)
})
}
func TestEndpointsDecode(t *testing.T) {
storage, _ := newStorage(t)
expected := validNewEndpoints()
body, err := testapi.Codec().Encode(expected)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
actual := storage.New()
if err := testapi.Codec().DecodeInto(body, actual); err != nil {
t.Fatalf("unexpected error: %v", err)
}
if !api.Semantic.DeepEqual(expected, actual) {
t.Errorf("mismatch: %s", util.ObjectDiff(expected, actual))
}
}
func TestEtcdUpdateEndpoints(t *testing.T) {
ctx := api.NewDefaultContext()
storage, fakeClient := newStorage(t)
endpoints := validChangedEndpoints()
key, _ := storage.KeyFunc(ctx, "foo")
key = etcdtest.AddPrefix(key)
fakeClient.Set(key, runtime.EncodeOrDie(testapi.Codec(), validNewEndpoints()), 0)
_, _, err := storage.Update(ctx, endpoints)
if err != nil {
t.Errorf("unexpected error: %v", err)
}
response, err := fakeClient.Get(key, false, false)
if err != nil {
t.Fatalf("Unexpected error %v", err)
}
var endpointsOut api.Endpoints
err = testapi.Codec().DecodeInto([]byte(response.Node.Value), &endpointsOut)
if err != nil {
t.Errorf("unexpected error: %v", err)
}
endpoints.ObjectMeta.ResourceVersion = endpointsOut.ObjectMeta.ResourceVersion
if !api.Semantic.DeepEqual(endpoints, &endpointsOut) {
t.Errorf("Unexpected endpoints: %#v, expected %#v", &endpointsOut, endpoints)
}
}
func TestDeleteEndpoints(t *testing.T) {
ctx := api.NewDefaultContext()
storage, fakeClient := newStorage(t)
endpoints := validNewEndpoints()
name := endpoints.Name
key, _ := storage.KeyFunc(ctx, name)
key = etcdtest.AddPrefix(key)
fakeClient.ChangeIndex = 1
fakeClient.Data[key] = tools.EtcdResponseWithError{
R: &etcd.Response{
Node: &etcd.Node{
Value: runtime.EncodeOrDie(testapi.Codec(), endpoints),
ModifiedIndex: 1,
CreatedIndex: 1,
},
},
}
_, err := storage.Delete(ctx, name, nil)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
}
| {
"content_hash": "fb7170c00ad9eed55f0705470dd89cf3",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 88,
"avg_line_length": 27.622340425531913,
"alnum_prop": 0.699210475640285,
"repo_name": "cgvarela/kubernetes",
"id": "cd446a9c0e160b384f4728d7a3c40fc119dbd49a",
"size": "5782",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "pkg/registry/endpoint/etcd/etcd_test.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "9955794"
},
{
"name": "HTML",
"bytes": "1193979"
},
{
"name": "Makefile",
"bytes": "15936"
},
{
"name": "Nginx",
"bytes": "1013"
},
{
"name": "Python",
"bytes": "63720"
},
{
"name": "SaltStack",
"bytes": "37055"
},
{
"name": "Shell",
"bytes": "835548"
}
],
"symlink_target": ""
} |
( function ( w, d ) {
'use strict';
w.chrome = ( ( typeof browser != 'undefined' ) && browser.runtime ) ? browser : chrome;
var SCRIPT_NAME = 'twMediaDownloader',
DEBUG = false,
MANIFEST_VERSION = chrome.runtime.getManifest().manifest_version,
CONTENT_TAB_INFOS = {};
/*
//if ( 2 < MANIFEST_VERSION ) {
// importScripts( './jszip.min.js', './zip_worker.js' );
//}
*/
function log_debug() {
if ( ! DEBUG ) {
return;
}
console.log.apply( console, arguments );
} // end of log_debug()
function log_error() {
console.error.apply( console, arguments );
} // end of log_error()
function get_values( name_list ) {
return new Promise( function ( resolve, reject ) {
if ( typeof name_list == 'string' ) {
name_list = [ name_list ];
}
chrome.storage.local.get( name_list, function ( items ) {
resolve( items );
} );
} );
} // end of get_values()
/*
//function reload_tabs() {
// chrome.tabs.query( {
// url : '*://*.twitter.com/*' // TODO: url で query() を呼ぶためには tabs permission が必要になる
// }, function ( result ) {
// result.forEach( function ( tab ) {
// if ( ! tab.url.match( /^https?:\/\/(?:(?:mobile)\.)?twitter\.com\// ) ) {
// return;
// }
// chrome.tabs.reload( tab.id );
// } );
// });
//} // end of reload_tabs()
*/
var reload_tabs = ( () => {
var reg_host = /([^.]+\.)?twitter\.com/,
reload_tab = ( tab_info ) => {
log_debug( 'reload_tab():', tab_info );
var tab_id = tab_info.tab_id;
chrome.tabs.sendMessage( tab_id, {
type : 'RELOAD_REQUEST',
}, {
}, ( response ) => {
log_debug( 'response', response );
if ( chrome.runtime.lastError || ( ! response ) ) {
// タブが存在しないか、応答が無ければ chrome.runtime.lastError 発生→タブ情報を削除
// ※chrome.runtime.lastErrorをチェックしないときは Console に "Unchecked runtime.lastError: No tab with id: xxxx." 表示
delete CONTENT_TAB_INFOS[ tab_id ];
log_debug( 'tab or content_script does not exist: tab_id=', tab_id, '=> removed:', tab_info, '=> remained:', CONTENT_TAB_INFOS );
}
} );
};
return () => {
log_debug( 'reload_tabs():', CONTENT_TAB_INFOS );
Object.values( CONTENT_TAB_INFOS ).forEach( ( tab_info ) => {
log_debug( tab_info );
try {
if ( ! reg_host.test( new URL( tab_info.url ).host ) ) {
return;
}
}
catch ( error ) {
return;
}
reload_tab( tab_info );
} );
};
} )();
var request_tab_sorting = ( () => {
var sort_index_to_tab_id_map_map = {},
callback_map = {},
get_tab_index = ( tab_id ) => {
return new Promise( ( resolve, reject ) => {
chrome.tabs.get( tab_id, ( tab ) => {
resolve( tab.index );
} );
} );
},
move_tab_to_index = ( tab_id, tab_index ) => {
return new Promise( ( resolve, reject ) => {
chrome.tabs.move( tab_id, {
index : tab_index,
}, ( tab ) => {
resolve( tab );
} );
} );
},
start_tab_sorting = ( request_id, sorted_tab_id_list, sorted_tab_index_list ) => {
Promise.all( sorted_tab_id_list.map( ( tab_id, index ) => {
return move_tab_to_index( tab_id, sorted_tab_index_list[ index ] );
} ) ).then( ( tab_list ) => {
/*
//chrome.tabs.update( sorted_tab_id_list[ 0 ], {
// active : true,
//}, ( tab ) => {
// finish( request_id, sorted_tab_id_list );
// return;
//} );
//※能動的にはタブをアクティブにしない(ブラウザ設定依存とする)
// Firefox → browser.tabs.loadDivertedInBackground
*/
finish( request_id, sorted_tab_id_list );
} );
},
finish = ( request_id, sorted_tab_id_list ) => {
sorted_tab_id_list.forEach( ( tab_id ) => {
var callback = callback_map[ tab_id ];
if ( typeof callback == 'function' ) {
callback();
}
delete callback_map[ tab_id ];
} );
delete sort_index_to_tab_id_map_map[ request_id ];
};
return ( tab_id, request_id, total, sort_index, callback ) => {
var sort_index_to_tab_id_map = sort_index_to_tab_id_map_map[ request_id ] = sort_index_to_tab_id_map_map[ request_id ] || {};
sort_index_to_tab_id_map[ sort_index ] = tab_id;
callback_map[ tab_id ] = callback;
if ( Object.keys( sort_index_to_tab_id_map ).length < total ) {
return;
}
var sorted_tab_id_list = Object.keys( sort_index_to_tab_id_map ).sort().map( sort_index => sort_index_to_tab_id_map[ sort_index ] );
Promise.all( sorted_tab_id_list.map( get_tab_index ) )
.then( ( tab_index_list ) => {
var sorted_tab_index_list = tab_index_list.slice( 0 ).sort();
start_tab_sorting( request_id, sorted_tab_id_list, sorted_tab_index_list );
} );
};
} )(); // end of request_tab_sorting()
function on_message( message, sender, sendResponse ) {
log_debug( '*** on_message():', message, sender );
var type = message.type,
response = {},
tab_id = sender.tab && sender.tab.id;
switch ( type ) {
case 'GET_OPTIONS':
var names = message.names,
namespace = message.namespace;
response = {};
if ( typeof names == 'string' ) {
names = [ names ];
}
get_values( names.map( function ( name ) {
return ( ( namespace ) ? ( String( namespace ) + '_' ) : '' ) + name;
} ) )
.then( options => {
// 対象タブがシークレットモードかどうか判別
// ※Firefoxの場合、シークレットモードで ZipRequest ライブラリを使おうとすると、generateエラーが発生してしまう
options.INCOGNITO_MODE = ( sender.tab && sender.tab.incognito ) ? '1' : '0';
response = options;
sendResponse( response );
} );
return true;
case 'RELOAD_TABS':
reload_tabs();
sendResponse( response );
return true;
case 'NOTIFICATION_ONLOAD' :
log_debug( 'NOTIFICATION_ONLOAD: tab_id', tab_id, message );
if ( tab_id ) {
CONTENT_TAB_INFOS[ tab_id ] = Object.assign( message.info, {
tab_id : tab_id,
} );
}
log_debug( '=> CONTENT_TAB_INFOS', CONTENT_TAB_INFOS );
sendResponse( response );
return true;
case 'NOTIFICATION_ONUNLOAD' :
log_debug( 'NOTIFICATION_ONUNLOAD: tab_id', tab_id, message );
if ( tab_id ) {
delete CONTENT_TAB_INFOS[ tab_id ];
}
log_debug( '=> CONTENT_TAB_INFOS', CONTENT_TAB_INFOS );
sendResponse( response );
return true;
case 'TAB_SORT_REQUEST' :
log_debug( 'TAB_SORT_REQUEST: tab_id', tab_id, message );
if ( tab_id ) {
request_tab_sorting( tab_id, message.request_id, message.total, message.sort_index, () => {
sendResponse( {
result : 'OK',
} );
} );
}
return true;
case 'FETCH_JSON' :
log_debug( 'FETCH_JSON', message );
fetch( message.url, message.options )
.then( response => response.json() )
.then( ( json ) => {
log_debug( 'FETCH_JSON => json', json );
sendResponse( {
json : json,
} );
} )
.catch( ( error ) => {
log_error( 'FETCH_JSON => error', error );
sendResponse( {
error : error,
} );
} );
return true;
case 'GET_TAB_INFO' :
log_debug( 'GET_TAB_INFO', message );
response = {
tab_info : CONTENT_TAB_INFOS[message.tab_id],
};
sendResponse( response );
return true;
case 'BULK_DOWNLOAD_REQUEST_FROM_OPTIONS' :
log_debug( 'BULK_DOWNLOAD_REQUEST_FROM_OPTIONS', message );
bulk_download_request( message.tab, message.kind );
sendResponse( {
result : 'OK', // 暫定的
} );
return true;
default:
/*
//var flag_async = zip_request_handler( message, sender, sendResponse );
//return flag_async;
*/
log_error( `Unsupported message: ${type}` );
sendResponse( {
result : 'NG',
} );
return true;
}
} // end of on_message()
function bulk_download_request( tab, kind ) {
if ( ( ! tab ) || ( ! tab.id ) ) {
log_error( '[bulk_download_request()] tab error', tab, kind );
return;
}
// TODO: tab.url を参照するためには permissions に "tabs" が必要なので、なるべく避けたい
// → とりあえず送ってみて反応を見る
chrome.tabs.sendMessage( tab.id, {
type : 'BULK_DOWNLOAD_REQUEST',
kind : kind,
}, ( response ) => {
log_debug( '[BULK_DOWNLOAD_REQUEST] response:', response );
if ( ( chrome.runtime.lastError ) || ( response === undefined ) || ( ! response.url ) ) {
return;
}
} );
} // end of bulk_download_request()
// ■ 各種イベント設定
// [chrome.runtime - Google Chrome](https://developer.chrome.com/extensions/runtime)
// [chrome.contextMenus - Google Chrome](https://developer.chrome.com/extensions/contextMenus)
// メッセージ受信
chrome.runtime.onMessage.addListener( on_message );
/*
//[2022.09.30] 現状では未使用な機能(api.twitter.com/oauth2/tokenへのアクセス時のcookie削除、旧Twitter("__tmdl=legacy")サポート)のコメントアウト
//if ( MANIFEST_VERSION < 3 ) {
// // [webRequest]
//
// //// ※ Firefox 61.0.1 で、content_scripts で $.ajax() を読んだ際、Referer が設定されない不具合に対応(0.2.6.1201)
// // → jquery.js にパッチをあてることで対処(0.2.6.1202)
// // 参照:[Content scripts - Mozilla | MDN](https://developer.mozilla.org/en-US/Add-ons/WebExtensions/Content_scripts#XHR_and_Fetch)
// //chrome.webRequest.onBeforeSendHeaders.addListener(
// // function ( details ) {
// // var requestHeaders = details.requestHeaders,
// // referer;
// //
// // if ( ! requestHeaders.some( ( element ) => ( element.name.toLowerCase() == 'referer' ) ) ) {
// // referer = details.documentUrl || 'https://twitter.com';
// //
// // requestHeaders.push( {
// // name : 'Referer',
// // value : referer,
// // } );
// // }
// //
// // return { requestHeaders: requestHeaders };
// // }
// //, { urls : [ '*://twitter.com/*' ] }
// //, [ 'blocking', 'requestHeaders' ]
// //);
//
// const
// reg_oauth2_token = /^https:\/\/api\.twitter\.com\/oauth2\/token/,
// reg_legacy_mark = /[?&]__tmdl=legacy(?:&|$)/;
//
// chrome.webRequest.onBeforeSendHeaders.addListener(
// function ( details ) {
// var requestHeaders,
// url = details.url;
//
// if ( reg_oauth2_token.test( url ) ) {
// // ※ OAuth2 の token 取得時(api.twitter.com/oauth2/token)に Cookie を送信しないようにする
// requestHeaders = details.requestHeaders.filter( function ( element, index, array ) {
// return ( element.name.toLowerCase() != 'cookie' );
// } );
// }
// else if ( reg_legacy_mark.test( url ) ) {
// // ※ "__tmdl=legacy" が付いている場合、旧 Twitter の HTML / API をコールするために User-Agent を変更
// requestHeaders = details.requestHeaders.map( function ( element ) {
// if ( element.name.toLowerCase() == 'user-agent' ) {
// //element.value = 'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko';
// element.value = 'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) Waterfox/56.2';
// // 参考:[ZusorCode/GoodTwitter](https://github.com/ZusorCode/GoodTwitter)
// }
// return element;
// } );
// }
//
// //console.log( requestHeaders );
//
// return ( ( requestHeaders !== undefined ) ? { requestHeaders : requestHeaders } : {} );
// }
// , { urls : [ '*://*.twitter.com/*' ] }
// , [ 'blocking', 'requestHeaders' ]
// );
//}
//else {
// // [declarativeNetRequest]
//
// // Chrome Web Store からインストールしたものだと
// // TypeError: Cannot read properties of undefined (reading 'addListener')
// // というエラーが発生(デベロッパーモードで[パッケージ化されていない拡張機能を読み込む]からの場合は発生しない)
// // →もともとの仕様らしい
// // https://developer.chrome.com/docs/extensions/reference/declarativeNetRequest/#event-onRuleMatchedDebug
// // > onRuleMatchedDebug
// // > Fired when a rule is matched with a request.
// // > Only available for unpacked extensions with the declarativeNetRequestFeedback permission as this is intended to be used for debugging purposes only.
// if ( typeof chrome.declarativeNetRequest?.onRuleMatchedDebug?.addListener == 'function' ) {
// try {
// chrome.declarativeNetRequest.onRuleMatchedDebug.addListener( function ( obj ) {
// log_debug( '[declarativeNetRequest.onRuleMatchedDebug]', obj.request.url, obj );
// } );
// }
// catch ( error ) {
// log_error( error );
// }
// }
//}
*/
chrome.commands.onCommand.addListener( ( command ) => {
let callback;
switch ( command ) {
case 'bulk_download' :
callback = ( active_tab ) => bulk_download_request( active_tab, 'media' );
break;
case 'bulk_download_likes' :
callback = ( active_tab ) => bulk_download_request( active_tab, 'likes' );
break;
default :
return;
}
chrome.tabs.query( { active : true, currentWindow : true }, tabs => {
if ( tabs && tabs[ 0 ] ) {
callback( tabs[ 0 ] );
}
} );
} );
Object.assign( w, {
CONTENT_TAB_INFOS,
log_debug,
log_error,
reload_tabs,
bulk_download_request,
} );
} )(
( typeof window !== 'undefined' ? window : self ),
( typeof document !== 'undefined' ? document : self.document )
);
// ■ end of file
| {
"content_hash": "626547b1ba39490dcfa1162b64a75827",
"timestamp": "",
"source": "github",
"line_count": 451,
"max_line_length": 161,
"avg_line_length": 34.629711751662974,
"alnum_prop": 0.47253169419900115,
"repo_name": "furyutei/twMediaDownloader",
"id": "d28ca3fd9a05d2b983b192dc7b213336fd9bbe8a",
"size": "16432",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/js/background.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2290"
},
{
"name": "HTML",
"bytes": "5613"
},
{
"name": "JavaScript",
"bytes": "777864"
},
{
"name": "Shell",
"bytes": "131"
}
],
"symlink_target": ""
} |
<h3>Hours of Umbraco training videos are only a click away</h3>
<p>Want to master Umbraco? Spend a couple of minutes learning some best practices by watching one of these videos about using Umbraco, then visit <a class="btn-link -underline" href="http://umbraco.tv" target="_blank">umbraco.tv</a> for even more Umbraco videos.</p>
<div class="row-fluid"
ng-init="init('http://umbraco.tv/videos/developer/chapterrss?sort=no')"
ng-controller="Umbraco.Dashboard.StartupVideosController">
<ul class="thumbnails" >
<li class="span2" ng-repeat="video in videos">
<a class="btn-link" target="_blank" href="{{video.link}}" title="{{video.title}}">
<div class="thumbnail" style="margin-right: 20px; padding: 20px;">
<img ng-src="{{video.thumbnail}}" alt="{{video.title}}">
<div style="font-weight: bold; text-align: center; margin: 20px 0 0;">{{video.title}}</div>
</div>
</a>
</li>
</ul>
</div>
| {
"content_hash": "6c1e29870ab7289bbe1afcf42ce36371",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 267,
"avg_line_length": 55.388888888888886,
"alnum_prop": 0.6328986960882648,
"repo_name": "Mivaweb/Notely",
"id": "ad56db111a4d5c4b6c4c4f0553b0a3e361db744f",
"size": "997",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/Notely/Notely.Web.UI.Client/Umbraco/Views/dashboard/developer/developerdashboardvideos.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "273706"
},
{
"name": "C#",
"bytes": "183631"
},
{
"name": "CSS",
"bytes": "299179"
},
{
"name": "HTML",
"bytes": "1103190"
},
{
"name": "JavaScript",
"bytes": "7658710"
},
{
"name": "XSLT",
"bytes": "48541"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_17) on Mon Dec 02 20:33:05 CET 2013 -->
<title>Uses of Interface org.lwjgl.opengl.GLUConstants (LWJGL API)</title>
<meta name="date" content="2013-12-02">
<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Interface org.lwjgl.opengl.GLUConstants (LWJGL API)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../org/lwjgl/opengl/GLUConstants.html" title="interface in org.lwjgl.opengl">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../../../../overview-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?org/lwjgl/opengl/class-use/GLUConstants.html" target="_top">Frames</a></li>
<li><a href="GLUConstants.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Interface org.lwjgl.opengl.GLUConstants" class="title">Uses of Interface<br>org.lwjgl.opengl.GLUConstants</h2>
</div>
<div class="classUseContainer">No usage of org.lwjgl.opengl.GLUConstants</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../org/lwjgl/opengl/GLUConstants.html" title="interface in org.lwjgl.opengl">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../../../../overview-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?org/lwjgl/opengl/class-use/GLUConstants.html" target="_top">Frames</a></li>
<li><a href="GLUConstants.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small><i>Copyright © 2002-2009 lwjgl.org. All Rights Reserved.</i></small></p>
</body>
</html>
| {
"content_hash": "0f1600074b9a987a254acdaff3a324f0",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 129,
"avg_line_length": 35.974137931034484,
"alnum_prop": 0.6149053438773064,
"repo_name": "Craigspaz/Flying-Ferris-Wheel-Engine",
"id": "0fa7efe50bc21b504bee502186fcb57dc3ddf34a",
"size": "4173",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Tables/lwjgl-2.9.1/javadoc/org/lwjgl/opengl/class-use/GLUConstants.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "2169472"
},
{
"name": "C++",
"bytes": "2996"
},
{
"name": "GLSL",
"bytes": "3562"
},
{
"name": "HTML",
"bytes": "5582"
},
{
"name": "Java",
"bytes": "8428535"
},
{
"name": "Objective-C",
"bytes": "85891"
},
{
"name": "Shell",
"bytes": "128"
}
],
"symlink_target": ""
} |
from django.http import HttpResponse
def home(request):
return HttpResponse("Hello world!") | {
"content_hash": "1c1101cef2585ad5b91a7eaf6dc51825",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 39,
"avg_line_length": 19.4,
"alnum_prop": 0.7628865979381443,
"repo_name": "alex/tracebin",
"id": "1b5680c8b2dd626a52a534a3bb5cdc8a50867e8d",
"size": "97",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/tracebin_server/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "7712"
},
{
"name": "Python",
"bytes": "96821"
}
],
"symlink_target": ""
} |
#ifndef LTP_H
#define LTP_H
#include "coder.h"
void LtpInit(faacEncHandle hEncoder);
void LtpEnd(faacEncHandle hEncoder);
int LtpEncode(faacEncHandle hEncoder,
CoderInfo *coderInfo,
LtpInfo *ltpInfo,
TnsInfo *tnsInfo,
double *p_spectrum,
double *p_time_signal);
void LtpReconstruct(CoderInfo *coderInfo, LtpInfo *ltpInfo, double *p_spectrum);
void LtpUpdate(LtpInfo *ltpInfo, double *time_signal,
double *overlap_signal, int block_size_long);
#endif /* not defined LTP_H */
| {
"content_hash": "71f7489c9cfafeb1f82a0bbe01c17747",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 80,
"avg_line_length": 25.391304347826086,
"alnum_prop": 0.6318493150684932,
"repo_name": "PeterXu/livepusher",
"id": "76fc1be93c508b019ce3e85c1d938180e05d1f0d",
"size": "1457",
"binary": false,
"copies": "48",
"ref": "refs/heads/master",
"path": "jni/include/faac/libfaac/ltp.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "1093185"
},
{
"name": "C",
"bytes": "4145110"
},
{
"name": "C++",
"bytes": "824002"
},
{
"name": "Groff",
"bytes": "24553"
},
{
"name": "HTML",
"bytes": "41262"
},
{
"name": "Java",
"bytes": "20004"
},
{
"name": "Makefile",
"bytes": "117919"
},
{
"name": "Objective-C",
"bytes": "12230"
},
{
"name": "Perl",
"bytes": "36155"
},
{
"name": "Python",
"bytes": "45639"
},
{
"name": "Shell",
"bytes": "238926"
}
],
"symlink_target": ""
} |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#ifndef __GCCOVER_H__
#define __GCCOVER_H__
#ifdef HAVE_GCCOVER
/****************************************************************************/
/* GCCOverageInfo holds the state of which instructions have been visited by
a GC and which ones have not */
#ifdef _MSC_VER
#pragma warning(push)
#pragma warning(disable : 4200 ) // zero-sized array
#endif // _MSC_VER
class GCCoverageInfo {
public:
IJitManager::MethodRegionInfo methodRegion;
BYTE* curInstr; // The last instruction that was able to execute
// Following 6 variables are for prolog / epilog walking coverage
ICodeManager* codeMan; // CodeMan for this method
GCInfoToken gcInfoToken; // gcInfo for this method
Thread* callerThread; // Thread associated with context callerRegs
T_CONTEXT callerRegs; // register state when method was entered
unsigned gcCount; // GC count at the time we caputured the regs
bool doingEpilogChecks; // are we doing epilog unwind checks? (do we care about callerRegs?)
enum { hasExecutedSize = 4 };
unsigned hasExecuted[hasExecutedSize];
unsigned totalCount;
union
{
BYTE savedCode[0]; // really variable sized
// Note that DAC doesn't marshal the entire byte array automatically.
// Any client of this field needs to get the TADDR of this field and
// marshal over the bytes properly.
};
// Sloppy bitsets (will wrap, and not threadsafe) but best effort is OK
// since we just need half decent coverage.
BOOL IsBitSetForOffset(unsigned offset) {
unsigned dword = hasExecuted[(offset >> 5) % hasExecutedSize];
return(dword & (1 << (offset & 0x1F)));
}
void SetBitForOffset(unsigned offset) {
unsigned* dword = &hasExecuted[(offset >> 5) % hasExecutedSize];
*dword |= (1 << (offset & 0x1F)) ;
}
void SprinkleBreakpoints(BYTE * saveAddr, PCODE codeStart, size_t codeSize, size_t regionOffsetAdj, BOOL fZapped);
};
typedef DPTR(GCCoverageInfo) PTR_GCCoverageInfo; // see code:GCCoverageInfo::savedCode
#ifdef _MSC_VER
#pragma warning(pop)
#endif // _MSC_VER
#if defined(_TARGET_X86_) || defined(_TARGET_AMD64_)
#define INTERRUPT_INSTR 0xF4 // X86 HLT instruction (any 1 byte illegal instruction will do)
#define INTERRUPT_INSTR_CALL 0xFA // X86 CLI instruction
#define INTERRUPT_INSTR_PROTECT_FIRST_RET 0xFB // X86 STI instruction, protect the first return register
#define INTERRUPT_INSTR_PROTECT_SECOND_RET 0xEC // X86 IN instruction, protect the second return register
#define INTERRUPT_INSTR_PROTECT_BOTH_RET 0xED // X86 IN instruction, protect both return registers
#elif defined(_TARGET_ARM_)
// 16-bit illegal instructions which will cause exception and cause
// control to go to GcStress codepath
#define INTERRUPT_INSTR 0xde00
#define INTERRUPT_INSTR_CALL 0xde03 // 0xde01 generates SIGTRAP (breakpoint) instead of SIGILL on Unix
#define INTERRUPT_INSTR_PROTECT_RET 0xde02
// 32-bit illegal instructions. It is necessary to replace a 16-bit instruction
// with a 16-bit illegal instruction, and a 32-bit instruction with a 32-bit
// illegal instruction, to make GC stress with the "IT" instruction work, since
// it counts the number of instructions that follow it, so we can't change that
// number by replacing a 32-bit instruction with a 16-bit illegal instruction
// followed by 16 bits of junk that might end up being a legal instruction.
// Use the "Permanently UNDEFINED" section in the "ARM Architecture Reference Manual",
// section A6.3.4 "Branches and miscellaneous control" table.
// Note that we write these as a single 32-bit write, not two 16-bit writes, so the values
// need to be arranged as the ARM decoder wants them, with the high-order halfword first
// (in little-endian order).
#define INTERRUPT_INSTR_32 0xa001f7f0 // 0xf7f0a001
#define INTERRUPT_INSTR_CALL_32 0xa002f7f0 // 0xf7f0a002
#define INTERRUPT_INSTR_PROTECT_RET_32 0xa003f7f0 // 0xf7f0a003
#elif defined(_TARGET_ARM64_)
// The following encodings are undefined. They fall into section C4.5.8 - Data processing (2 source) of
// "Arm Architecture Reference Manual ARMv8"
//
#define INTERRUPT_INSTR 0xBADC0DE0
#define INTERRUPT_INSTR_CALL 0xBADC0DE1
#define INTERRUPT_INSTR_PROTECT_RET 0xBADC0DE2
#endif // _TARGET_*
// The body of this method is in this header file to allow
// mscordaccore.dll to link without getting an unsat symbol
//
inline bool IsGcCoverageInterruptInstructionVal(UINT32 instrVal)
{
#if defined(_TARGET_ARM64_)
switch (instrVal)
{
case INTERRUPT_INSTR:
case INTERRUPT_INSTR_CALL:
case INTERRUPT_INSTR_PROTECT_RET:
return true;
default:
return false;
}
#elif defined(_TARGET_ARM_)
UINT16 instrVal16 = static_cast<UINT16>(instrVal);
size_t instrLen = GetARMInstructionLength(instrVal16);
if (instrLen == 2)
{
switch (instrVal16)
{
case INTERRUPT_INSTR:
case INTERRUPT_INSTR_CALL:
case INTERRUPT_INSTR_PROTECT_RET:
return true;
default:
return false;
}
}
else
{
_ASSERTE(instrLen == 4);
switch (instrVal)
{
case INTERRUPT_INSTR_32:
case INTERRUPT_INSTR_CALL_32:
case INTERRUPT_INSTR_PROTECT_RET_32:
return true;
default:
return false;
}
}
#else // x64 and x86
switch (instrVal)
{
case INTERRUPT_INSTR:
case INTERRUPT_INSTR_CALL:
case INTERRUPT_INSTR_PROTECT_FIRST_RET:
case INTERRUPT_INSTR_PROTECT_SECOND_RET:
case INTERRUPT_INSTR_PROTECT_BOTH_RET:
return true;
default:
return false;
}
#endif // _TARGET_XXXX_
}
bool IsGcCoverageInterruptInstruction(PBYTE instrPtr);
bool IsGcCoverageInterrupt(LPVOID ip);
#endif // HAVE_GCCOVER
#endif // !__GCCOVER_H__
| {
"content_hash": "e47f2999e8b08518e2e2944a281514c2",
"timestamp": "",
"source": "github",
"line_count": 182,
"max_line_length": 127,
"avg_line_length": 35.467032967032964,
"alnum_prop": 0.6523625096824167,
"repo_name": "wtgodbe/coreclr",
"id": "42b093f4cea3acce217ac2a8a94bbf63bf87208f",
"size": "6455",
"binary": false,
"copies": "22",
"ref": "refs/heads/master",
"path": "src/vm/gccover.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "976648"
},
{
"name": "Awk",
"bytes": "6904"
},
{
"name": "Batchfile",
"bytes": "167893"
},
{
"name": "C",
"bytes": "4862319"
},
{
"name": "C#",
"bytes": "154822068"
},
{
"name": "C++",
"bytes": "64306017"
},
{
"name": "CMake",
"bytes": "723128"
},
{
"name": "M4",
"bytes": "15214"
},
{
"name": "Makefile",
"bytes": "46117"
},
{
"name": "Objective-C",
"bytes": "14116"
},
{
"name": "Perl",
"bytes": "23653"
},
{
"name": "PowerShell",
"bytes": "132755"
},
{
"name": "Python",
"bytes": "480080"
},
{
"name": "Roff",
"bytes": "672227"
},
{
"name": "Scala",
"bytes": "4102"
},
{
"name": "Shell",
"bytes": "513230"
},
{
"name": "Smalltalk",
"bytes": "635930"
},
{
"name": "SuperCollider",
"bytes": "650"
},
{
"name": "TeX",
"bytes": "126781"
},
{
"name": "XSLT",
"bytes": "1016"
},
{
"name": "Yacc",
"bytes": "157492"
}
],
"symlink_target": ""
} |
<html>
<head>
<title>GB18030 lead 823184</title>
<meta http-equiv='content-type' content='text/html;charset=GB18030'>
<link rel='stylesheet' href='tests.css'>
</head>
<body>
<table>
<caption>Four-byte lead 823184</caption>
<tr><th colspan=2>GB18030<th colspan=3>Unicode
<tr><td>=82318430<td> 10 <td>U+39B2<td>㦲<td class=u>(non-classical form of &#229;) a particle expressing surprise, admiration, or grief, an expletive
<tr><td>=82318431<td> 11 <td>U+39B3<td>㦳<td class=u>
<tr><td>=82318432<td> 12 <td>U+39B4<td>㦴<td class=u>to arrest; to catch; to seize, to brawl; a hand-to-hand fight; to struggle for; to fight for, to beat; to strike; to attack
<tr><td>=82318433<td> 13 <td>U+39B5<td>㦵<td class=u>to kill; to put to death; to slaughter; to execute, to punish, to weed out; to exterminate, a kind of weapon
<tr><td>=82318434<td> 14 <td>U+39B6<td>㦶<td class=u>repeatedly; frequently; to alternate, common; ordinary; normal, sharp, name of a state in ancient China, bold; brave; fierce; violent; severe; strict; stringent
<tr><td>=82318435<td> 15 <td>U+39B7<td>㦷<td class=u>(ancient form of &#229;) brave; courageous; bold; valiant; fearless; gallant; heroic
<tr><td>=82318436<td> 16 <td>U+39B8<td>㦸<td class=u>a lance with two points, a halberd with a crescent -shaped blade; weapons used in ancient times, to stimulate; to provoke; to excite; to irritate, to point with the index finger and the middle finger; to describe angry or an awe-inspiring display of military force, etc., masculine; heroic; brave
<tr><td>=82318437<td> 17 <td>U+39B9<td>㦹<td class=u>a spear; a lance; a javelin, a battle-axe; a halberd
<tr><td>=82318438<td> 18 <td>U+39BA<td>㦺<td class=u>a lance with two points, a halberd with a crescent -shaped blade; weapons used in ancient times, to stimulate; to provoke; to excite; to irritate, to point with the index finger and the middle finger; to describe angry or an awe-inspiring display of military force, etc., masculine; heroic; brave, (in general) shart points and edges of weapons
<tr><td>=82318439<td> 19 <td>U+39BB<td>㦻<td class=u>(ancient form of &#232;) to put on; to wear, repeated; double; to inherit; to attack or take by surprise, to accord with; to unite
</table>
<p><a href='charset/GB18030.html'>Return</a></p>
</body>
</html>
| {
"content_hash": "8d24087efb6167b1ccce4dd2e0c54284",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 408,
"avg_line_length": 96.88,
"alnum_prop": 0.7031379025598679,
"repo_name": "Ms2ger/presto-testo",
"id": "dc0f4c21dd1a1f5227952fc413969b7c15a8f21e",
"size": "2422",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "imported/peter/unicode/comparative/GB18030-823184.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "2312"
},
{
"name": "ActionScript",
"bytes": "23470"
},
{
"name": "AutoHotkey",
"bytes": "8832"
},
{
"name": "Batchfile",
"bytes": "5001"
},
{
"name": "C",
"bytes": "116512"
},
{
"name": "C++",
"bytes": "219233"
},
{
"name": "CSS",
"bytes": "207914"
},
{
"name": "Erlang",
"bytes": "18523"
},
{
"name": "Groff",
"bytes": "674"
},
{
"name": "HTML",
"bytes": "103272540"
},
{
"name": "Haxe",
"bytes": "3874"
},
{
"name": "Java",
"bytes": "125658"
},
{
"name": "JavaScript",
"bytes": "22516936"
},
{
"name": "Makefile",
"bytes": "13409"
},
{
"name": "PHP",
"bytes": "524911"
},
{
"name": "Perl",
"bytes": "321672"
},
{
"name": "Python",
"bytes": "948191"
},
{
"name": "Ruby",
"bytes": "1006850"
},
{
"name": "Shell",
"bytes": "12140"
},
{
"name": "Smarty",
"bytes": "1860"
},
{
"name": "XSLT",
"bytes": "2567445"
}
],
"symlink_target": ""
} |
#include "OutMgr.h"
#include <algorithm>
#include <iostream>
#include <cassert>
#include "AudioOut.h"
#include "Engine.h"
#include "EngineMgr.h"
#include "InMgr.h"
#include "WavEngine.h"
#include "../Misc/Util.h" //for interpolate()
using namespace std;
OutMgr &OutMgr::getInstance()
{
static OutMgr instance;
return instance;
}
OutMgr::OutMgr()
:wave(new WavEngine()),
priBuf(new float[4096],
new float[4096]), priBuffCurrent(priBuf)
{
currentOut = NULL;
stales = 0;
//init samples
outr = new float[synth->buffersize];
outl = new float[synth->buffersize];
memset(outl, 0, synth->bufferbytes);
memset(outr, 0, synth->bufferbytes);
}
OutMgr::~OutMgr()
{
delete wave;
delete [] priBuf.l;
delete [] priBuf.r;
delete [] outr;
delete [] outl;
}
void OutMgr::AddMixer(IMixer* mixer)
{
this->mixers.insert(mixer);
}
void OutMgr::RemoveMixer(IMixer* mixer)
{
this->mixers.erase(mixer);
}
/* Sequence of a tick
* 1) lets see if we have any stuff to do via midi
* 2) Lets do that stuff
* 3) Lets see if the event queue has anything for us
* 4) Lets empty that out
* 5) Lets remove old/stale samples
* 6) Lets see if we need to generate samples
* 7) Lets generate some
* 8) Lets return those samples to the primary and secondary outputs
* 9) Lets wait for another tick
*/
const Stereo<float *> OutMgr::tick(unsigned int frameSize)
{
InMgr::getInstance().flush();
removeStaleSmps();
while (frameSize > storedSmps())
{
for (std::set<IMixer*>::iterator i = this->mixers.begin(); i != this->mixers.end(); ++i)
{
((IMixer*)*i)->Lock();
((IMixer*)*i)->AudioOut(outl, outr);
((IMixer*)*i)->UnLock();
}
addSmps(outl, outr);
}
stales = frameSize;
return priBuf;
}
AudioOut *OutMgr::getOut(string name)
{
return dynamic_cast<AudioOut *>(EngineMgr::getInstance().getEng(name));
}
string OutMgr::getDriver() const
{
return currentOut->name;
}
bool OutMgr::setSink(string name)
{
AudioOut *sink = getOut(name);
if(!sink)
return false;
if(currentOut)
currentOut->setAudioEn(false);
currentOut = sink;
currentOut->setAudioEn(true);
bool success = currentOut->getAudioEn();
//Keep system in a valid state (aka with a running driver)
if(!success)
(currentOut = getOut("NULL"))->setAudioEn(true);
return success;
}
string OutMgr::getSink() const
{
if(currentOut)
return currentOut->name;
else {
cerr << "BUG: No current output in OutMgr " << __LINE__ << endl;
return "ERROR";
}
return "ERROR";
}
//perform a cheap linear interpolation for resampling
//This will result in some distortion at frame boundries
//returns number of samples produced
static size_t resample(float *dest,
const float *src,
float s_in,
float s_out,
size_t elms)
{
size_t out_elms = size_t(elms * s_out / s_in);
float r_pos = 0.0f;
for(int i = 0; i < (int)out_elms; ++i, r_pos += s_in / s_out)
dest[i] = interpolate(src, elms, r_pos);
return out_elms;
}
void OutMgr::addSmps(float *l, float *r)
{
//allow wave file to syphon off stream
wave->push(Stereo<float *>(l, r), synth->buffersize);
// I skip this code for now, because AudioOut::getSampleRate() is never used except here
// const int s_out = currentOut->getSampleRate(),
// s_sys = synth->samplerate;
// if(s_out != s_sys) { //we need to resample
// const size_t steps = resample(priBuffCurrent.l,
// l,
// s_sys,
// s_out,
// synth->buffersize);
// resample(priBuffCurrent.r, r, s_sys, s_out, synth->buffersize);
// priBuffCurrent.l += steps;
// priBuffCurrent.r += steps;
// }
// else { //just copy the samples
memcpy(priBuffCurrent.l, l, synth->bufferbytes);
memcpy(priBuffCurrent.r, r, synth->bufferbytes);
priBuffCurrent.l += synth->buffersize;
priBuffCurrent.r += synth->buffersize;
// }
}
void OutMgr::removeStaleSmps()
{
if(!stales)
return;
const int leftover = storedSmps() - stales;
assert(leftover > -1);
//leftover samples [seen at very low latencies]
if(leftover) {
memmove(priBuf.l, priBuffCurrent.l - leftover, leftover * sizeof(float));
memmove(priBuf.r, priBuffCurrent.r - leftover, leftover * sizeof(float));
priBuffCurrent.l = priBuf.l + leftover;
priBuffCurrent.r = priBuf.r + leftover;
}
else
priBuffCurrent = priBuf;
stales = 0;
}
| {
"content_hash": "5fa1194ab56b3118bf569f5ac220f2b7",
"timestamp": "",
"source": "github",
"line_count": 193,
"max_line_length": 96,
"avg_line_length": 25.05699481865285,
"alnum_prop": 0.5913978494623656,
"repo_name": "wtrsltnk/libzynth",
"id": "8e29d120df487af66939f551ec7a508c8545eed9",
"size": "4836",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Nio/OutMgr.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "29443"
},
{
"name": "C++",
"bytes": "911613"
}
],
"symlink_target": ""
} |
craftEd.controller('OldLikeController', ['$scope', '$http', '$location', '$state', function($scope, $http, $location, $state){
var config = {
headers: {
'content-type': 'application/json'
}
};
var tokens = {
headers: {
"access-token": window.sessionStorage.token,
"token-type": "Bearer",
"client": window.sessionStorage.client,
"expiry": window.sessionStorage.expiry,
"uid": window.sessionStorage.uid
}
};
var tokensConfig = {
headers: {
'content-type': 'application/json',
"access-token": window.sessionStorage.token,
"token-type": "Bearer",
"client": window.sessionStorage.client,
"expiry": window.sessionStorage.expiry,
"uid": window.sessionStorage.uid
}
};
$scope.onDrag = function(id) {
$state.go('app.rating',{beerId: id});
}
$scope.$on('$ionicView.beforeEnter', function (event, viewData) {
viewData.enableBack = true;
});
$http.get(rootUrl + '/users/:user_id/beer_types/rec_like', tokens)
.then(function(response){
$scope.allRecs = response.data
console.log($scope.allRecs)
$scope.rootUrl = rootUrl
$scope.title = "Beers I'll probably like"
});
$scope.selectBeer = function(newRecId){
$state.go('app.rating',{beerId: newRecId})
}
}]);
| {
"content_hash": "d57fdc641de43ebf64fb8a9cc52fd107",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 126,
"avg_line_length": 26.857142857142858,
"alnum_prop": 0.6170212765957447,
"repo_name": "TallOrderDev/HowToTrainYourPalateFrontEnd",
"id": "f8453f5bf9f5344583101b324de714f32ad090e4",
"size": "1316",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "www/js/controllers/OldLikeController.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "856835"
},
{
"name": "HTML",
"bytes": "16731"
},
{
"name": "JavaScript",
"bytes": "3370564"
}
],
"symlink_target": ""
} |
all: ethereum-base ethereum-node ethereum-bootnode ethereum-netstats
nodes: ethereum-node ethereum-bootnode
ethereum-base:
docker build -t ethereum-base base
docker tag ethereum-base enettet/ethereum-base
ethereum-node: ethereum-base
docker build -t ethereum-node node
docker tag ethereum-node enettet/ethereum-node
ethereum-bootnode: ethereum-node
docker build -t ethereum-bootnode bootnode
docker tag ethereum-bootnode enettet/ethereum-bootnode
ethereum-netstats: ethereum-netstats
docker build -t ethereum-netstats eth-netstats
docker tag ethereum-netstats enettet/ethereum-netstats
docker build -t ethereum-netstats-api eth-net-intelligence-api
docker tag ethereum-netstats-api enettet/ethereum-netstats-api
push:
docker push enettet/ethereum-base
docker push enettet/ethereum-node
docker push enettet/ethereum-bootnode
docker push enettet/ethereum-netstats
docker push enettet/ethereum-netstats-api
| {
"content_hash": "08cd314c99bd85c3673565a5e7be6436",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 68,
"avg_line_length": 41.77272727272727,
"alnum_prop": 0.8324265505984766,
"repo_name": "e-nettet/CustomerConsentEthereum",
"id": "060dd44bb6c0e22ab694669ccc19f9fcc980a83b",
"size": "919",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Makefile",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HCL",
"bytes": "20809"
},
{
"name": "Makefile",
"bytes": "919"
},
{
"name": "Shell",
"bytes": "1568"
}
],
"symlink_target": ""
} |
.. _config_http_filters:
HTTP filters
============
.. toctree::
:maxdepth: 2
buffer_filter
cors_filter
fault_filter
dynamodb_filter
grpc_http1_bridge_filter
grpc_json_transcoder_filter
grpc_web_filter
health_check_filter
rate_limit_filter
router_filter
lua_filter
| {
"content_hash": "480eadd912a38bd903bb8e4abccb1739",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 29,
"avg_line_length": 15.31578947368421,
"alnum_prop": 0.6941580756013745,
"repo_name": "rshriram/envoy-api",
"id": "3469bd8d6a4a45b7870646db749c734ac9442c87",
"size": "291",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/root/configuration/http_filters/http_filters.rst",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "1320"
},
{
"name": "Python",
"bytes": "11442"
},
{
"name": "Shell",
"bytes": "2514"
}
],
"symlink_target": ""
} |
# Serialization guide
## JSON serialization
In most cases, this library uses `indexedDB` storage, which allows any value type.
But in special cases (like in Firefox private mode,
see the [browser support guide](./BROWSERS_SUPPORT.md) for details),
the library will fall back to `localStorage`, where JSON serialization will happen.
Everything can be serialized (`JSON.stringify()`), but when you unserialize (`JSON.parse()`),
you'll only get a JSON, ie. a primitive type, an array or a *literal* object.
So if you store an instance of a specific class in `localStorage`, like `Date`, `Map`, `Set` or `Blob`,
what you'll get then with `.get()` won't be a `Map`, `Set` or `Blob`, but just a literal object.
So, it's safer to **stick to JSON-compatible values**.
## Validation
Also, this library uses JSON schemas for validation, which can only describe JSON-compatible values.
So if you're storing special structures like `Map`, `Set` or `Blob`,
you'll have to manage your own validation (which is possible but painful).
## Examples
Here are some examples of the recommended way to store special structures.
### Storing a `Date`
```typescript
const someDate = new Date('2019-07-19');
/* Writing */
this.storage.set('date', someDate.toJSON()).subscribe();
/* Reading */
this.storage.get('date', { type: 'string' }).pipe(
map((dateJSON) => new Date(dateJSON)),
).subscribe((date) => {});
```
### Storing a `Map`
```typescript
const someMap = new Map<string, number>([['hello', 1], ['world', 2]]);
/* Writing */
this.storage.set('test', Array.from(someMap)).subscribe();
/* Reading */
const schema: JSONSchema = {
type: 'array',
items: {
type: 'array',
items: [
{ type: 'string' },
{ type: 'number' },
],
},
};
this.storage.get<[string, number][]>('test', schema).pipe(
map((dataArray) => new Map(dataArray)),
).subscribe((data) => {
data.get('hello'); // 1
});
```
### Storing a `Set`
```typescript
const someSet = new Set<string>(['hello', 'world']);
/* Writing */
this.storage.set('test', Array.from(someSet)).subscribe();
/* Reading */
const schema: JSONSchema = {
type: 'array',
items: { type: 'string' },
};
this.storage.get('test', schema).pipe(
map((dataArray) => new Set(dataArray)),
).subscribe((data) => {
data.has('hello'); // true
});
```
[Back to general documentation](../README.md)
| {
"content_hash": "2527878b8489c82a9ab415c2c6548b02",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 103,
"avg_line_length": 26.233333333333334,
"alnum_prop": 0.662007623888183,
"repo_name": "cyrilletuzi/angular2-async-local-storage",
"id": "fe70290b9596cf3ce09bf9b8467af13dc6ebc69d",
"size": "2361",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "docs/SERIALIZATION.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "714"
},
{
"name": "TypeScript",
"bytes": "17231"
}
],
"symlink_target": ""
} |
package com.weygo.common.base;
/**
* Created by muma on 2016/11/29.
*/
public class JHResponse extends JHObject {
public boolean reLogin() {
return false;
}
}
| {
"content_hash": "b35c3bd910cb2eccfe2c70296578f147",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 42,
"avg_line_length": 16.272727272727273,
"alnum_prop": 0.6424581005586593,
"repo_name": "mumabinggan/WeygoPhone",
"id": "46e5c1f04ee276ea49f7e4dbe9161fad5c3d60b6",
"size": "179",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/java/com/weygo/common/base/JHResponse.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "1091817"
}
],
"symlink_target": ""
} |
FROM dockerhubtrial/base5
RUN mkdir -p /opt/app && cd /opt/app && git init && git pull https://github.com/arun-sfdc/Analytics-API.git
WORKDIR /opt/app/MassEmail
RUN cd src/ && npm install
EXPOSE 443
CMD service nginx start && nodejs ./src/index.js
| {
"content_hash": "1cb5cbe243f3fd25c058a0941eaa9488",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 107,
"avg_line_length": 41.5,
"alnum_prop": 0.7309236947791165,
"repo_name": "geeljire/Analytics-API",
"id": "8624fd727c0479d35ef494d34cba559cf284fcd8",
"size": "249",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "MassEmail/Dockerfile",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Apex",
"bytes": "4573"
},
{
"name": "JavaScript",
"bytes": "19003"
},
{
"name": "Nginx",
"bytes": "564"
}
],
"symlink_target": ""
} |
var express = require('express');
var router = express.Router();
var session = require('../modules/sessions/sessionUtils');
var userUtils = require('../modules/auth/userUtils');
/* GET home page. */
router.route('/').get(function (req, res) {
userUtils.logOutUser(session, req, function(err){
res.redirect('/login');
});
});
module.exports = router; | {
"content_hash": "b809919176038531f63aa0f4d5ab7db6",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 58,
"avg_line_length": 26.285714285714285,
"alnum_prop": 0.6603260869565217,
"repo_name": "rockdragon/cloud-drive",
"id": "49093f6576c44320974c26e1347f1e7dccb02a86",
"size": "368",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "routes/logout.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "147591"
},
{
"name": "JavaScript",
"bytes": "109724"
}
],
"symlink_target": ""
} |
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.annotations.GwtCompatible;
import java.io.Serializable;
import java.util.Iterator;
/** An ordering that uses the reverse of the natural order of the values. */
@GwtCompatible(serializable = true)
@SuppressWarnings({"unchecked", "rawtypes"}) // TODO(kevinb): the right way to explain this??
final class ReverseNaturalOrdering extends Ordering<Comparable> implements Serializable {
static final ReverseNaturalOrdering INSTANCE = new ReverseNaturalOrdering();
@Override
public int compare(Comparable left, Comparable right) {
checkNotNull(left); // right null is caught later
if (left == right) {
return 0;
}
return right.compareTo(left);
}
@Override
public <S extends Comparable> Ordering<S> reverse() {
return Ordering.natural();
}
// Override the min/max methods to "hoist" delegation outside loops
@Override
public <E extends Comparable> E min(E a, E b) {
return NaturalOrdering.INSTANCE.max(a, b);
}
@Override
public <E extends Comparable> E min(E a, E b, E c, E... rest) {
return NaturalOrdering.INSTANCE.max(a, b, c, rest);
}
@Override
public <E extends Comparable> E min(Iterator<E> iterator) {
return NaturalOrdering.INSTANCE.max(iterator);
}
@Override
public <E extends Comparable> E min(Iterable<E> iterable) {
return NaturalOrdering.INSTANCE.max(iterable);
}
@Override
public <E extends Comparable> E max(E a, E b) {
return NaturalOrdering.INSTANCE.min(a, b);
}
@Override
public <E extends Comparable> E max(E a, E b, E c, E... rest) {
return NaturalOrdering.INSTANCE.min(a, b, c, rest);
}
@Override
public <E extends Comparable> E max(Iterator<E> iterator) {
return NaturalOrdering.INSTANCE.min(iterator);
}
@Override
public <E extends Comparable> E max(Iterable<E> iterable) {
return NaturalOrdering.INSTANCE.min(iterable);
}
// preserving singleton-ness gives equals()/hashCode() for free
private Object readResolve() {
return INSTANCE;
}
@Override
public String toString() {
return "Ordering.natural().reverse()";
}
private ReverseNaturalOrdering() {}
private static final long serialVersionUID = 0;
}
| {
"content_hash": "2b9189702a60d35e2644ad5ff6f93782",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 93,
"avg_line_length": 26.517241379310345,
"alnum_prop": 0.7069787602947551,
"repo_name": "EdwardLee03/guava",
"id": "612d846a772752c81547683876544c5e9cbc768e",
"size": "2907",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "guava/src/com/google/common/collect/ReverseNaturalOrdering.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "11478"
},
{
"name": "Java",
"bytes": "13083336"
},
{
"name": "Shell",
"bytes": "1128"
}
],
"symlink_target": ""
} |
package codingbo.fishdaily.data.source.local;
import android.support.test.InstrumentationRegistry;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import codingbo.fishdaily.data.entity.Daily;
import codingbo.fishdaily.data.entity.Task;
import codingbo.fishdaily.data.source.DailyDataSource;
import static org.junit.Assert.*;
/**
* Created by bob
* on 17.7.20.
*/
public class DailyLocalDataSourceTest {
private DailyLocalDataSource mDailyLocalDataSource;
@Before
public void setUp() throws Exception {
mDailyLocalDataSource = DailyLocalDataSource.getInstance(InstrumentationRegistry.getTargetContext());
}
@After
public void cleanUp() {
mDailyLocalDataSource.deleteAllDailies();
}
// @Test
// public void getDailies() throws Exception {
// mDailyLocalDataSource.getDailies(new DailyDataSource.LoadDailiesCallback() {
// @Override
// public void onDailiesLoaded(List<Daily> dailies) {
// assertEquals(3, dailies.size());
//// assertEquals(4, dailies.size());
// }
//
// @Override
// public void onDataNotAvailable() {
//
// }
// });
// }
//
// @Test
// public void saveDaily() throws Exception {
// final Daily d = new Daily();
// d.setContent("今天有点虚度啊,改天再好好学习吧!");
// d.setDate(System.currentTimeMillis());
//
// Long dailyId = mDailyLocalDataSource.saveDaily(d);
//
// mDailyLocalDataSource.getDaily(String.valueOf(dailyId), new DailyDataSource.GetDailyCallback() {
// @Override
// public void onDailyLoaded(Daily daily) {
// assertEquals(d.getContent(), daily.getContent());
// assertEquals(d.getDate(), daily.getContent());
// }
//
// @Override
// public void onDataNotAvailable() {
//
// }
// });
//
//
// }
@Test
public void save_retrieveDaily() throws Exception {
Task task1 = new Task("跑了个不", 1L, 20, 1);
Task task2 = new Task("吃了个饭", 1L, 10, 2);
Task task3 = new Task("玩了个球", 1L, 50, 3);
Task task4 = new Task("喝了口水", 1L, 90, 1);
final List<Task> tasks = new ArrayList<>();
tasks.add(task1);
tasks.add(task2);
tasks.add(task3);
tasks.add(task4);
final Daily oldDaily = new Daily();
oldDaily.setDate(1498842061L);
oldDaily.setContent("这是一个日常的描述");
oldDaily.setTaskList(tasks);
String dailyId = mDailyLocalDataSource.saveDaily(oldDaily);
mDailyLocalDataSource.getDaily(dailyId, new DailyDataSource.GetDailyCallback() {
@Override
public void onDailyLoaded(Daily daily) {
assertEquals(0, daily.compareTo(oldDaily));
assertEquals(tasks.size(), daily.getTaskList().size());
}
@Override
public void onDataNotAvailable() {
fail("callback error");
}
});
}
@Test
public void delete_retrieveDaily() throws Exception {
}
@Test
public void update_retrieveDaily() throws Exception {
}
@Test
public void deleteAll_emptyListOfRetrieveDaily() {
}
@Test
public void getDailies_retrieveSaveDailies() {
}
} | {
"content_hash": "693e1b4f634e3250a5fbb5b7e040825b",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 109,
"avg_line_length": 25.69924812030075,
"alnum_prop": 0.6021064950263312,
"repo_name": "codingbooo/FishDaily",
"id": "a8a87e66d932f3c94d7ab216c2ae9c36163b2c26",
"size": "3498",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fishdaily/app/src/androidTest/java/codingbo/fishdaily/data/source/local/DailyLocalDataSourceTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "42052"
}
],
"symlink_target": ""
} |
#import "UIView+WebCache.h"
#import "objc/runtime.h"
#import "UIView+WebCacheOperation.h"
NSString * const SDWebImageInternalSetImageGroupKey = @"internalSetImageGroup";
NSString * const SDWebImageExternalCustomManagerKey = @"externalCustomManager";
const int64_t SDWebImageProgressUnitCountUnknown = 1LL;
static char imageURLKey;
#if SD_UIKIT
static char TAG_ACTIVITY_INDICATOR;
static char TAG_ACTIVITY_STYLE;
static char TAG_ACTIVITY_SHOW;
#endif
@implementation UIView (WebCache)
- (nullable NSURL *)sd_imageURL {
return objc_getAssociatedObject(self, &imageURLKey);
}
- (NSProgress *)sd_imageProgress {
NSProgress *progress = objc_getAssociatedObject(self, @selector(sd_imageProgress));
if (!progress) {
progress = [[NSProgress alloc] initWithParent:nil userInfo:nil];
self.sd_imageProgress = progress;
}
return progress;
}
- (void)setSd_imageProgress:(NSProgress *)sd_imageProgress {
objc_setAssociatedObject(self, @selector(sd_imageProgress), sd_imageProgress, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
- (void)sd_internalSetImageWithURL:(nullable NSURL *)url
placeholderImage:(nullable UIImage *)placeholder
options:(SDWebImageOptions)options
operationKey:(nullable NSString *)operationKey
setImageBlock:(nullable SDSetImageBlock)setImageBlock
progress:(nullable SDWebImageDownloaderProgressBlock)progressBlock
completed:(nullable SDExternalCompletionBlock)completedBlock {
return [self sd_internalSetImageWithURL:url placeholderImage:placeholder options:options operationKey:operationKey setImageBlock:setImageBlock progress:progressBlock completed:completedBlock context:nil];
}
- (void)sd_internalSetImageWithURL:(nullable NSURL *)url
placeholderImage:(nullable UIImage *)placeholder
options:(SDWebImageOptions)options
operationKey:(nullable NSString *)operationKey
setImageBlock:(nullable SDSetImageBlock)setImageBlock
progress:(nullable SDWebImageDownloaderProgressBlock)progressBlock
completed:(nullable SDExternalCompletionBlock)completedBlock
context:(nullable NSDictionary<NSString *, id> *)context {
NSString *validOperationKey = operationKey ?: NSStringFromClass([self class]);
[self sd_cancelImageLoadOperationWithKey:validOperationKey];
objc_setAssociatedObject(self, &imageURLKey, url, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
dispatch_group_t group = context[SDWebImageInternalSetImageGroupKey];
if (!(options & SDWebImageDelayPlaceholder)) {
if (group) {
dispatch_group_enter(group);
}
dispatch_main_async_safe(^{
[self sd_setImage:placeholder imageData:nil basedOnClassOrViaCustomSetImageBlock:setImageBlock];
});
}
if (url) {
#if SD_UIKIT
// check if activityView is enabled or not
if ([self sd_showActivityIndicatorView]) {
[self sd_addActivityIndicator];
}
#endif
// reset the progress
self.sd_imageProgress.totalUnitCount = 0;
self.sd_imageProgress.completedUnitCount = 0;
SDWebImageManager *manager = [context objectForKey:SDWebImageExternalCustomManagerKey];
if (!manager) {
manager = [SDWebImageManager sharedManager];
}
__weak __typeof(self)wself = self;
SDWebImageDownloaderProgressBlock combinedProgressBlock = ^(NSInteger receivedSize, NSInteger expectedSize, NSURL * _Nullable targetURL) {
wself.sd_imageProgress.totalUnitCount = expectedSize;
wself.sd_imageProgress.completedUnitCount = receivedSize;
if (progressBlock) {
progressBlock(receivedSize, expectedSize, targetURL);
}
};
id <SDWebImageOperation> operation = [manager loadImageWithURL:url options:options progress:combinedProgressBlock completed:^(UIImage *image, NSData *data, NSError *error, SDImageCacheType cacheType, BOOL finished, NSURL *imageURL) {
__strong __typeof (wself) sself = wself;
if (!sself) { return; }
#if SD_UIKIT
[sself sd_removeActivityIndicator];
#endif
// if the progress not been updated, mark it to complete state
if (finished && !error && sself.sd_imageProgress.totalUnitCount == 0 && sself.sd_imageProgress.completedUnitCount == 0) {
sself.sd_imageProgress.totalUnitCount = SDWebImageProgressUnitCountUnknown;
sself.sd_imageProgress.completedUnitCount = SDWebImageProgressUnitCountUnknown;
}
BOOL shouldCallCompletedBlock = finished || (options & SDWebImageAvoidAutoSetImage);
BOOL shouldNotSetImage = ((image && (options & SDWebImageAvoidAutoSetImage)) ||
(!image && !(options & SDWebImageDelayPlaceholder)));
SDWebImageNoParamsBlock callCompletedBlockClojure = ^{
if (!sself) { return; }
if (!shouldNotSetImage) {
[sself sd_setNeedsLayout];
}
if (completedBlock && shouldCallCompletedBlock) {
completedBlock(image, error, cacheType, url);
}
};
// case 1a: we got an image, but the SDWebImageAvoidAutoSetImage flag is set
// OR
// case 1b: we got no image and the SDWebImageDelayPlaceholder is not set
if (shouldNotSetImage) {
dispatch_main_async_safe(callCompletedBlockClojure);
return;
}
UIImage *targetImage = nil;
NSData *targetData = nil;
if (image) {
// case 2a: we got an image and the SDWebImageAvoidAutoSetImage is not set
targetImage = image;
targetData = data;
} else if (options & SDWebImageDelayPlaceholder) {
// case 2b: we got no image and the SDWebImageDelayPlaceholder flag is set
targetImage = placeholder;
targetData = nil;
}
#if SD_UIKIT || SD_MAC
// check whether we should use the image transition
SDWebImageTransition *transition = nil;
if (finished && (options & SDWebImageForceTransition || cacheType == SDImageCacheTypeNone)) {
transition = sself.sd_imageTransition;
}
#endif
dispatch_main_async_safe(^{
if (group) {
dispatch_group_enter(group);
}
#if SD_UIKIT || SD_MAC
[sself sd_setImage:targetImage imageData:targetData basedOnClassOrViaCustomSetImageBlock:setImageBlock transition:transition cacheType:cacheType imageURL:imageURL];
#else
[sself sd_setImage:targetImage imageData:targetData basedOnClassOrViaCustomSetImageBlock:setImageBlock];
#endif
if (group) {
// compatible code for FLAnimatedImage, because we assume completedBlock called after image was set. This will be removed in 5.x
BOOL shouldUseGroup = [objc_getAssociatedObject(group, &SDWebImageInternalSetImageGroupKey) boolValue];
if (shouldUseGroup) {
dispatch_group_notify(group, dispatch_get_main_queue(), callCompletedBlockClojure);
} else {
callCompletedBlockClojure();
}
} else {
callCompletedBlockClojure();
}
});
}];
[self sd_setImageLoadOperation:operation forKey:validOperationKey];
} else {
dispatch_main_async_safe(^{
#if SD_UIKIT
[self sd_removeActivityIndicator];
#endif
if (completedBlock) {
NSError *error = [NSError errorWithDomain:SDWebImageErrorDomain code:-1 userInfo:@{NSLocalizedDescriptionKey : @"Trying to load a nil url"}];
completedBlock(nil, error, SDImageCacheTypeNone, url);
}
});
}
}
- (void)sd_cancelCurrentImageLoad {
[self sd_cancelImageLoadOperationWithKey:NSStringFromClass([self class])];
}
- (void)sd_setImage:(UIImage *)image imageData:(NSData *)imageData basedOnClassOrViaCustomSetImageBlock:(SDSetImageBlock)setImageBlock {
#if SD_UIKIT || SD_MAC
[self sd_setImage:image imageData:imageData basedOnClassOrViaCustomSetImageBlock:setImageBlock transition:nil cacheType:0 imageURL:nil];
#else
// watchOS does not support view transition. Simplify the logic
if (setImageBlock) {
setImageBlock(image, imageData);
} else if ([self isKindOfClass:[UIImageView class]]) {
UIImageView *imageView = (UIImageView *)self;
[imageView setImage:image];
}
#endif
}
#if SD_UIKIT || SD_MAC
- (void)sd_setImage:(UIImage *)image imageData:(NSData *)imageData basedOnClassOrViaCustomSetImageBlock:(SDSetImageBlock)setImageBlock transition:(SDWebImageTransition *)transition cacheType:(SDImageCacheType)cacheType imageURL:(NSURL *)imageURL {
UIView *view = self;
SDSetImageBlock finalSetImageBlock;
if (setImageBlock) {
finalSetImageBlock = setImageBlock;
} else if ([view isKindOfClass:[UIImageView class]]) {
UIImageView *imageView = (UIImageView *)view;
finalSetImageBlock = ^(UIImage *setImage, NSData *setImageData) {
imageView.image = setImage;
};
}
#if SD_UIKIT
else if ([view isKindOfClass:[UIButton class]]) {
UIButton *button = (UIButton *)view;
finalSetImageBlock = ^(UIImage *setImage, NSData *setImageData){
[button setImage:setImage forState:UIControlStateNormal];
};
}
#endif
if (transition) {
#if SD_UIKIT
[UIView transitionWithView:view duration:0 options:0 animations:^{
// 0 duration to let UIKit render placeholder and prepares block
if (transition.prepares) {
transition.prepares(view, image, imageData, cacheType, imageURL);
}
} completion:^(BOOL finished) {
[UIView transitionWithView:view duration:transition.duration options:transition.animationOptions animations:^{
if (finalSetImageBlock && !transition.avoidAutoSetImage) {
finalSetImageBlock(image, imageData);
}
if (transition.animations) {
transition.animations(view, image);
}
} completion:transition.completion];
}];
#elif SD_MAC
[NSAnimationContext runAnimationGroup:^(NSAnimationContext * _Nonnull prepareContext) {
// 0 duration to let AppKit render placeholder and prepares block
prepareContext.duration = 0;
if (transition.prepares) {
transition.prepares(view, image, imageData, cacheType, imageURL);
}
} completionHandler:^{
[NSAnimationContext runAnimationGroup:^(NSAnimationContext * _Nonnull context) {
context.duration = transition.duration;
context.timingFunction = transition.timingFunction;
context.allowsImplicitAnimation = (transition.animationOptions & SDWebImageAnimationOptionAllowsImplicitAnimation);
if (finalSetImageBlock && !transition.avoidAutoSetImage) {
finalSetImageBlock(image, imageData);
}
if (transition.animations) {
transition.animations(view, image);
}
} completionHandler:^{
if (transition.completion) {
transition.completion(YES);
}
}];
}];
#endif
} else {
if (finalSetImageBlock) {
finalSetImageBlock(image, imageData);
}
}
}
#endif
- (void)sd_setNeedsLayout {
#if SD_UIKIT
[self setNeedsLayout];
#elif SD_MAC
[self setNeedsLayout:YES];
#elif SD_WATCH
// Do nothing because WatchKit automatically layout the view after property change
#endif
}
#if SD_UIKIT || SD_MAC
#pragma mark - Image Transition
- (SDWebImageTransition *)sd_imageTransition {
return objc_getAssociatedObject(self, @selector(sd_imageTransition));
}
- (void)setSd_imageTransition:(SDWebImageTransition *)sd_imageTransition {
objc_setAssociatedObject(self, @selector(sd_imageTransition), sd_imageTransition, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
#if SD_UIKIT
#pragma mark - Activity indicator
- (UIActivityIndicatorView *)activityIndicator {
return (UIActivityIndicatorView *)objc_getAssociatedObject(self, &TAG_ACTIVITY_INDICATOR);
}
- (void)setActivityIndicator:(UIActivityIndicatorView *)activityIndicator {
objc_setAssociatedObject(self, &TAG_ACTIVITY_INDICATOR, activityIndicator, OBJC_ASSOCIATION_RETAIN);
}
- (void)sd_setShowActivityIndicatorView:(BOOL)show {
objc_setAssociatedObject(self, &TAG_ACTIVITY_SHOW, @(show), OBJC_ASSOCIATION_RETAIN);
}
- (BOOL)sd_showActivityIndicatorView {
return [objc_getAssociatedObject(self, &TAG_ACTIVITY_SHOW) boolValue];
}
- (void)sd_setIndicatorStyle:(UIActivityIndicatorViewStyle)style{
objc_setAssociatedObject(self, &TAG_ACTIVITY_STYLE, [NSNumber numberWithInt:style], OBJC_ASSOCIATION_RETAIN);
}
- (int)sd_getIndicatorStyle{
return [objc_getAssociatedObject(self, &TAG_ACTIVITY_STYLE) intValue];
}
- (void)sd_addActivityIndicator {
dispatch_main_async_safe(^{
if (!self.activityIndicator) {
self.activityIndicator = [[UIActivityIndicatorView alloc] initWithActivityIndicatorStyle:[self sd_getIndicatorStyle]];
self.activityIndicator.translatesAutoresizingMaskIntoConstraints = NO;
[self addSubview:self.activityIndicator];
[self addConstraint:[NSLayoutConstraint constraintWithItem:self.activityIndicator
attribute:NSLayoutAttributeCenterX
relatedBy:NSLayoutRelationEqual
toItem:self
attribute:NSLayoutAttributeCenterX
multiplier:1.0
constant:0.0]];
[self addConstraint:[NSLayoutConstraint constraintWithItem:self.activityIndicator
attribute:NSLayoutAttributeCenterY
relatedBy:NSLayoutRelationEqual
toItem:self
attribute:NSLayoutAttributeCenterY
multiplier:1.0
constant:0.0]];
}
[self.activityIndicator startAnimating];
});
}
- (void)sd_removeActivityIndicator {
dispatch_main_async_safe(^{
if (self.activityIndicator) {
[self.activityIndicator removeFromSuperview];
self.activityIndicator = nil;
}
});
}
#endif
#endif
@end
| {
"content_hash": "bf29d507b9546b9a4ec2fe416a55aade",
"timestamp": "",
"source": "github",
"line_count": 360,
"max_line_length": 247,
"avg_line_length": 43.19444444444444,
"alnum_prop": 0.6227009646302251,
"repo_name": "gs01md/ColorfulWoodUIBase",
"id": "112a5e1d612282d79f680a39f7c4a6e178308848",
"size": "15778",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "TestUI/Pods/SDWebImage/SDWebImage/UIView+WebCache.m",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "1065007"
},
{
"name": "Ruby",
"bytes": "1416"
}
],
"symlink_target": ""
} |
"""
based on a Java version:
Based on original version written in BCPL by Dr Martin Richards
in 1981 at Cambridge University Computer Laboratory, England
and a C++ version derived from a Smalltalk version written by
L Peter Deutsch.
Java version: Copyright (C) 1995 Sun Microsystems, Inc.
Translation from C++, Mario Wolczko
Outer loop added by Alex Jacoby
"""
import pyperf
# Task IDs
I_IDLE = 1
I_WORK = 2
I_HANDLERA = 3
I_HANDLERB = 4
I_DEVA = 5
I_DEVB = 6
# Packet types
K_DEV = 1000
K_WORK = 1001
# Packet
BUFSIZE = 4
BUFSIZE_RANGE = range(BUFSIZE)
class Packet(object):
def __init__(self, l, i, k):
self.link = l
self.ident = i
self.kind = k
self.datum = 0
self.data = [0] * BUFSIZE
def append_to(self, lst):
self.link = None
if lst is None:
return self
else:
p = lst
next = p.link
while next is not None:
p = next
next = p.link
p.link = self
return lst
# Task Records
class TaskRec(object):
pass
class DeviceTaskRec(TaskRec):
def __init__(self):
self.pending = None
class IdleTaskRec(TaskRec):
def __init__(self):
self.control = 1
self.count = 10000
class HandlerTaskRec(TaskRec):
def __init__(self):
self.work_in = None
self.device_in = None
def workInAdd(self, p):
self.work_in = p.append_to(self.work_in)
return self.work_in
def deviceInAdd(self, p):
self.device_in = p.append_to(self.device_in)
return self.device_in
class WorkerTaskRec(TaskRec):
def __init__(self):
self.destination = I_HANDLERA
self.count = 0
# Task
class TaskState(object):
def __init__(self):
self.packet_pending = True
self.task_waiting = False
self.task_holding = False
def packetPending(self):
self.packet_pending = True
self.task_waiting = False
self.task_holding = False
return self
def waiting(self):
self.packet_pending = False
self.task_waiting = True
self.task_holding = False
return self
def running(self):
self.packet_pending = False
self.task_waiting = False
self.task_holding = False
return self
def waitingWithPacket(self):
self.packet_pending = True
self.task_waiting = True
self.task_holding = False
return self
def isPacketPending(self):
return self.packet_pending
def isTaskWaiting(self):
return self.task_waiting
def isTaskHolding(self):
return self.task_holding
def isTaskHoldingOrWaiting(self):
return self.task_holding or (not self.packet_pending and self.task_waiting)
def isWaitingWithPacket(self):
return self.packet_pending and self.task_waiting and not self.task_holding
tracing = False
layout = 0
def trace(a):
global layout
layout -= 1
if layout <= 0:
print()
layout = 50
print(a, end='')
TASKTABSIZE = 10
class TaskWorkArea(object):
def __init__(self):
self.taskTab = [None] * TASKTABSIZE
self.taskList = None
self.holdCount = 0
self.qpktCount = 0
taskWorkArea = TaskWorkArea()
class Task(TaskState):
def __init__(self, i, p, w, initialState, r):
self.link = taskWorkArea.taskList
self.ident = i
self.priority = p
self.input = w
self.packet_pending = initialState.isPacketPending()
self.task_waiting = initialState.isTaskWaiting()
self.task_holding = initialState.isTaskHolding()
self.handle = r
taskWorkArea.taskList = self
taskWorkArea.taskTab[i] = self
def fn(self, pkt, r):
raise NotImplementedError
def addPacket(self, p, old):
if self.input is None:
self.input = p
self.packet_pending = True
if self.priority > old.priority:
return self
else:
p.append_to(self.input)
return old
def runTask(self):
if self.isWaitingWithPacket():
msg = self.input
self.input = msg.link
if self.input is None:
self.running()
else:
self.packetPending()
else:
msg = None
return self.fn(msg, self.handle)
def waitTask(self):
self.task_waiting = True
return self
def hold(self):
taskWorkArea.holdCount += 1
self.task_holding = True
return self.link
def release(self, i):
t = self.findtcb(i)
t.task_holding = False
if t.priority > self.priority:
return t
else:
return self
def qpkt(self, pkt):
t = self.findtcb(pkt.ident)
taskWorkArea.qpktCount += 1
pkt.link = None
pkt.ident = self.ident
return t.addPacket(pkt, self)
def findtcb(self, id):
t = taskWorkArea.taskTab[id]
if t is None:
raise Exception("Bad task id %d" % id)
return t
# DeviceTask
class DeviceTask(Task):
def __init__(self, i, p, w, s, r):
Task.__init__(self, i, p, w, s, r)
def fn(self, pkt, r):
d = r
assert isinstance(d, DeviceTaskRec)
if pkt is None:
pkt = d.pending
if pkt is None:
return self.waitTask()
else:
d.pending = None
return self.qpkt(pkt)
else:
d.pending = pkt
if tracing:
trace(pkt.datum)
return self.hold()
class HandlerTask(Task):
def __init__(self, i, p, w, s, r):
Task.__init__(self, i, p, w, s, r)
def fn(self, pkt, r):
h = r
assert isinstance(h, HandlerTaskRec)
if pkt is not None:
if pkt.kind == K_WORK:
h.workInAdd(pkt)
else:
h.deviceInAdd(pkt)
work = h.work_in
if work is None:
return self.waitTask()
count = work.datum
if count >= BUFSIZE:
h.work_in = work.link
return self.qpkt(work)
dev = h.device_in
if dev is None:
return self.waitTask()
h.device_in = dev.link
dev.datum = work.data[count]
work.datum = count + 1
return self.qpkt(dev)
# IdleTask
class IdleTask(Task):
def __init__(self, i, p, w, s, r):
Task.__init__(self, i, 0, None, s, r)
def fn(self, pkt, r):
i = r
assert isinstance(i, IdleTaskRec)
i.count -= 1
if i.count == 0:
return self.hold()
elif i.control & 1 == 0:
i.control //= 2
return self.release(I_DEVA)
else:
i.control = i.control // 2 ^ 0xd008
return self.release(I_DEVB)
# WorkTask
A = ord('A')
class WorkTask(Task):
def __init__(self, i, p, w, s, r):
Task.__init__(self, i, p, w, s, r)
def fn(self, pkt, r):
w = r
assert isinstance(w, WorkerTaskRec)
if pkt is None:
return self.waitTask()
if w.destination == I_HANDLERA:
dest = I_HANDLERB
else:
dest = I_HANDLERA
w.destination = dest
pkt.ident = dest
pkt.datum = 0
for i in BUFSIZE_RANGE: # range(BUFSIZE)
w.count += 1
if w.count > 26:
w.count = 1
pkt.data[i] = A + w.count - 1
return self.qpkt(pkt)
def schedule():
t = taskWorkArea.taskList
while t is not None:
if tracing:
print("tcb =", t.ident)
if t.isTaskHoldingOrWaiting():
t = t.link
else:
if tracing:
trace(chr(ord("0") + t.ident))
t = t.runTask()
class Richards(object):
def run(self, iterations):
for i in range(iterations):
taskWorkArea.holdCount = 0
taskWorkArea.qpktCount = 0
IdleTask(I_IDLE, 1, 10000, TaskState().running(), IdleTaskRec())
wkq = Packet(None, 0, K_WORK)
wkq = Packet(wkq, 0, K_WORK)
WorkTask(I_WORK, 1000, wkq, TaskState(
).waitingWithPacket(), WorkerTaskRec())
wkq = Packet(None, I_DEVA, K_DEV)
wkq = Packet(wkq, I_DEVA, K_DEV)
wkq = Packet(wkq, I_DEVA, K_DEV)
HandlerTask(I_HANDLERA, 2000, wkq, TaskState(
).waitingWithPacket(), HandlerTaskRec())
wkq = Packet(None, I_DEVB, K_DEV)
wkq = Packet(wkq, I_DEVB, K_DEV)
wkq = Packet(wkq, I_DEVB, K_DEV)
HandlerTask(I_HANDLERB, 3000, wkq, TaskState(
).waitingWithPacket(), HandlerTaskRec())
wkq = None
DeviceTask(I_DEVA, 4000, wkq,
TaskState().waiting(), DeviceTaskRec())
DeviceTask(I_DEVB, 5000, wkq,
TaskState().waiting(), DeviceTaskRec())
schedule()
if taskWorkArea.holdCount == 9297 and taskWorkArea.qpktCount == 23246:
pass
else:
return False
return True
if __name__ == "__main__":
runner = pyperf.Runner()
runner.metadata['description'] = "The Richards benchmark"
richard = Richards()
runner.bench_func('richards', richard.run, 1)
| {
"content_hash": "32fa503c7bbb28ae534f6b59aac1ecbf",
"timestamp": "",
"source": "github",
"line_count": 423,
"max_line_length": 83,
"avg_line_length": 22.55082742316785,
"alnum_prop": 0.5377922214068561,
"repo_name": "python/performance",
"id": "b9167d1e5574007e89694fc38ac0b19ce25d4fdb",
"size": "9539",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "pyperformance/benchmarks/bm_richards.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "133837"
},
{
"name": "Python",
"bytes": "463402"
},
{
"name": "Shell",
"bytes": "14726"
}
],
"symlink_target": ""
} |
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
@namespace url(http://www.w3.org/1999/xhtml); /* set default namespace to HTML */
/* This binding is specified separately from the others so that Linux distros can
override the default Mozilla plugin finder service with their own mechanism. */
embed:-moz-type-unsupported,
applet:-moz-type-unsupported,
object:-moz-type-unsupported,
embed:-moz-type-unsupported-platform,
applet:-moz-type-unsupported-platform,
object:-moz-type-unsupported-platform {
display: inline-block;
overflow: hidden;
-moz-binding: url('chrome://pluginproblem/content/pluginProblem.xml#pluginProblem') !important;
}
| {
"content_hash": "1dcb8b1bff474e01b27db25f723bd740",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 99,
"avg_line_length": 45.05555555555556,
"alnum_prop": 0.750924784217016,
"repo_name": "freesamael/npu-moboapp-programming-fall-2015",
"id": "6fc40666d723303db47126d3ea17d9bdae56342e",
"size": "811",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "b2g-dist-win32-20151125/chrome/toolkit/pluginproblem/pluginFinderBinding.css",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "22692"
},
{
"name": "CSS",
"bytes": "446482"
},
{
"name": "HTML",
"bytes": "108094"
},
{
"name": "Java",
"bytes": "11830"
},
{
"name": "JavaScript",
"bytes": "14107189"
},
{
"name": "Python",
"bytes": "5242"
},
{
"name": "XSLT",
"bytes": "5704"
}
],
"symlink_target": ""
} |
'''
Functions to start and configure the Flask
application. Loads routes from the routes.py
script.
'''
import flask
from app.classes.ckan import CKAN
from app.routes.users import blueprint_users
from app.routes.queues import blueprint_queues
from app.routes.status import blueprint_status
from app.routes.datasets import blueprint_datasets
from app.routes.countries import blueprint_countries
from app.routes.revisions import blueprint_revisions
from app.routes.resources import blueprint_resources
from app.routes.organizations import blueprint_organizations
from app.routes.gallery_items import blueprint_gallery_items
def createServer(database_uri, debug=False):
'''
Creates a Flask application as an object.
'''
app = flask.Flask(__name__)
app.debug = debug
app.host = '0.0.0.0'
app.register_blueprint(blueprint_users)
app.register_blueprint(blueprint_status)
app.register_blueprint(blueprint_queues)
app.register_blueprint(blueprint_datasets)
app.register_blueprint(blueprint_countries)
app.register_blueprint(blueprint_revisions)
app.register_blueprint(blueprint_resources)
app.register_blueprint(blueprint_organizations)
app.register_blueprint(blueprint_gallery_items)
return app
| {
"content_hash": "a2bb743fc220c53e3e0a687bfa65ee4f",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 60,
"avg_line_length": 31.435897435897434,
"alnum_prop": 0.800978792822186,
"repo_name": "luiscape/hdx-monitor-sql-collect",
"id": "6df3da4c0e05f97264ef865d4ac13c3de86668d7",
"size": "1268",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/server.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "171"
},
{
"name": "Python",
"bytes": "49025"
},
{
"name": "Shell",
"bytes": "1094"
}
],
"symlink_target": ""
} |
<?php defined('BASEPATH') OR exit('No direct script access allowed.');
// Load the url helper too!
if (!function_exists('url_title')) {
load_class('Loader', 'core')->helper('url');
}
if (!function_exists('clean_file_name')) {
// For UTF-8 encoded sites only.
function clean_file_name($file_name, $separator = '-', $lowercase = false, $language = NULL) {
$extension = UTF8::strtolower(extension($file_name));
if ($extension != '') {
$file_name = substr($file_name, 0, -(strlen($extension) + 1));
}
$file_name = url_title($file_name, $separator, $lowercase, true, $language);
if ($file_name == '') {
$file_name = 'file';
}
if ($extension != '') {
$file_name .= '.'.$extension;
}
return $file_name;
}
}
if (!function_exists('fixed_basename')) {
// See http://api.drupal.org/api/drupal/core!includes!file.inc/function/drupal_basename/8
function fixed_basename($uri, $suffix = NULL) {
$separators = '/';
if (DIRECTORY_SEPARATOR != '/') {
// For Windows OS add special separator.
$separators .= DIRECTORY_SEPARATOR;
}
// Remove right-most slashes when $uri points to directory.
$uri = rtrim($uri, $separators);
// Returns the trailing part of the $uri starting after one of the directory
// separators.
$filename = preg_match('@[^' . preg_quote($separators, '@') . ']+$@', $uri, $matches) ? $matches[0] : '';
// Cuts off a suffix from the filename.
if ($suffix) {
$filename = preg_replace('@' . preg_quote($suffix, '@') . '$@', '', $filename);
}
return $filename;
}
}
if (!function_exists('add_basename_suffix')) {
function add_basename_suffix($path, $suffix) {
// See Image_lib::explode_name().
$ext = strrchr($path, '.');
$name = ($ext === FALSE) ? $path : substr($path, 0, -strlen($ext));
return $name.$suffix.$ext;
}
}
if (!function_exists('extension')) {
function extension($path) {
$qpos = strpos($path, '?');
if ($qpos !== false) {
// Eliminate query string.
$path = substr($path, 0, $qpos);
}
return substr(strrchr($path, '.'), 1);
}
}
if (!function_exists('recursive_chmod')) {
/*
Example:
// Platform's core initialization.
require dirname(__FILE__).'/config.php'; // This is www/config.php file.
require $PLATFORMCREATE;
$dir = DEFAULTFCPATH.'images/';
ci()->load->helper('file');
recursive_chmod($dir, FILE_WRITE_MODE, DIR_WRITE_MODE);
echo 'OK';
*/
// See http://snipplr.com/view.php?codeview&id=5350
/**
Chmods files and folders with different permissions.
This is an all-PHP alternative to using: \n
<tt>exec("find ".$path." -type f -exec chmod 644 {} \;");</tt> \n
<tt>exec("find ".$path." -type d -exec chmod 755 {} \;");</tt>
@author Jeppe Toustrup (tenzer at tenzer dot dk)
@param $path An either relative or absolute path to a file or directory
which should be processed.
@param $file_permissions The permissions any found files should get.
@param $directory_permissions The permissions any found folder should get.
@return Returns TRUE if the path if found and FALSE if not.
@warning The permission levels has to be entered in octal format, which
normally means adding a zero ("0") in front of the permission level. \n
More info at: http://php.net/chmod.
*/
function recursive_chmod($path, $file_permissions = 0644, $directory_permissions = 0755) {
// Check if the path exists
if (!file_exists($path)) {
return false;
}
// See whether this is a file
if (is_file($path)) {
// Chmod the file with our given filepermissions
chmod($path, $file_permissions);
// If this is a directory...
} elseif (is_dir($path)) {
// Then get an array of the contents
$folders_and_files = scandir($path);
// Remove "." and ".." from the list
$entries = array_slice($folders_and_files, 2);
// Parse every result...
foreach ($entries as $entry) {
// And call this function again recursively, with the same permissions
recursive_chmod($path.'/'.$entry, $file_permissions, $directory_permissions);
}
// When we are done with the contents of the directory, we chmod the directory itself
chmod($path, $directory_permissions);
}
// Everything seemed to work out well, return true
return true;
}
if (!function_exists('file_type_icon')) {
function file_type_icon($path = null) {
static $_icons;
if (!isset($_icons)) {
if (file_exists(COMMONPATH.'config/file_type_icons.php')) {
include(COMMONPATH.'config/file_type_icons.php');
}
if (file_exists(COMMONPATH.'config/'.ENVIRONMENT.'/file_type_icons.php')) {
include(COMMONPATH.'config/'.ENVIRONMENT.'/file_type_icons.php');
}
if (file_exists(APPPATH.'config/file_type_icons.php')) {
include(APPPATH.'config/file_type_icons.php');
}
if (file_exists(APPPATH.'config/'.ENVIRONMENT.'/file_type_icons.php')) {
include(APPPATH.'config/'.ENVIRONMENT.'/file_type_icons.php');
}
if (empty($icons) OR !is_array($icons)) {
$icons = array();
}
$_icons = array();
if (!empty($icons)) {
foreach ($icons as $key => $icon) {
if (is_array($icon)) {
foreach ($icon as $i) {
$_icons[(string) $i] = $key;
}
} else {
$_icons[(string) $icon] = $key;
}
}
}
}
if ($path === null) {
return $_icons;
}
$ext = extension($path);
if (isset($_icons[$ext])) {
return $_icons[$ext];
}
return null;
}
}
if (!function_exists('file_type_icon_fa')) {
function file_type_icon_fa($path) {
$result = file_type_icon($path);
if (is_array($result)) {
foreach ($result as $key => & $value) {
$value = 'fa-file-'.$value.'-o';
}
return $result;
}
if ($result == '') {
return 'fa-file-o';
}
return 'fa-file-'.$result.'-o';
}
}
}
| {
"content_hash": "9f340fc11317b2b94c4e7c380a0cbccb",
"timestamp": "",
"source": "github",
"line_count": 253,
"max_line_length": 113,
"avg_line_length": 27.845849802371543,
"alnum_prop": 0.5053229240596168,
"repo_name": "jaffarsolo/starter-public-edition-4",
"id": "a3f77336c6e4324719dfcb015a6385d64450f082",
"size": "7045",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "platform/common/helpers/MY_file_helper.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "31501"
},
{
"name": "CSS",
"bytes": "309542"
},
{
"name": "HTML",
"bytes": "54614"
},
{
"name": "JavaScript",
"bytes": "1594001"
},
{
"name": "PHP",
"bytes": "5086479"
}
],
"symlink_target": ""
} |
package org.pico.fp.impl.syntax.toOps
import org.pico.fp.ToOps
trait ToMonadOps extends ToOps
with ToApplicativeOps
with ToBindOps
| {
"content_hash": "33a9f039649cbac6ee227a91c46f55cc",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 37,
"avg_line_length": 20.142857142857142,
"alnum_prop": 0.7872340425531915,
"repo_name": "newhoggy/pico-fp",
"id": "bf308163d713ab6e3af89cf9a9e26b1367a1fd48",
"size": "141",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "pico-fp/src/main/scala/org/pico/fp/impl/syntax/toOps/ToMonadOps.scala",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Scala",
"bytes": "20059"
},
{
"name": "Shell",
"bytes": "3036"
}
],
"symlink_target": ""
} |
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>L4AllPortal</groupId>
<artifactId>L4AllPortal</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>war</packaging>
<build>
<sourceDirectory>src</sourceDirectory>
<plugins>
<plugin>
<artifactId>maven-war-plugin</artifactId>
<version>2.4</version>
<configuration>
<warSourceDirectory>WebContent</warSourceDirectory>
<failOnMissingWebXml>false</failOnMissingWebXml>
</configuration>
</plugin>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.3</version>
<configuration>
<source>1.7</source>
<target>1.7</target>
</configuration>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>antlr</groupId>
<artifactId>antlr</artifactId>
<version>2.7.7</version>
</dependency>
<dependency>
<groupId>org.glassfish.hk2.external</groupId>
<artifactId>aopalliance-repackaged</artifactId>
<version>2.4.0-b06</version>
</dependency>
<dependency>
<groupId>org.ow2.asm</groupId>
<artifactId>asm-debug-all</artifactId>
<version>5.0.2</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.axis/axis -->
<dependency>
<groupId>org.apache.axis</groupId>
<artifactId>axis</artifactId>
<version>1.4</version>
</dependency>
<!-- https://mvnrepository.com/artifact/commons-discovery/commons-discovery -->
<dependency>
<groupId>commons-discovery</groupId>
<artifactId>commons-discovery</artifactId>
<version>0.2</version>
</dependency>
<!-- https://mvnrepository.com/artifact/commons-io/commons-io -->
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.4</version>
</dependency>
<!-- https://mvnrepository.com/artifact/commons-logging/commons-logging -->
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>1.2</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.thetransactioncompany/cors-filter -->
<dependency>
<groupId>com.thetransactioncompany</groupId>
<artifactId>cors-filter</artifactId>
<version>2.1.2</version>
</dependency>
<!-- https://mvnrepository.com/artifact/dom4j/dom4j -->
<dependency>
<groupId>dom4j</groupId>
<artifactId>dom4j</artifactId>
<version>1.6.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.owlike/genson -->
<dependency>
<groupId>com.owlike</groupId>
<artifactId>genson</artifactId>
<version>1.2</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.google.code.gson/gson -->
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.3.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.hibernate.common/hibernate-commons-annotations -->
<dependency>
<groupId>org.hibernate.common</groupId>
<artifactId>hibernate-commons-annotations</artifactId>
<version>4.0.5.Final</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.hibernate/hibernate-core -->
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core</artifactId>
<version>4.3.8.Final</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.hibernate.javax.persistence/hibernate-jpa-2.1-api -->
<dependency>
<groupId>org.hibernate.javax.persistence</groupId>
<artifactId>hibernate-jpa-2.1-api</artifactId>
<version>1.0.0.Final</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.glassfish.hk2/hk2-api -->
<dependency>
<groupId>org.glassfish.hk2</groupId>
<artifactId>hk2-api</artifactId>
<version>2.4.0-b06</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.glassfish.hk2/hk2-locator -->
<dependency>
<groupId>org.glassfish.hk2</groupId>
<artifactId>hk2-locator</artifactId>
<version>2.4.0-b06</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.glassfish.hk2/hk2-utils -->
<dependency>
<groupId>org.glassfish.hk2</groupId>
<artifactId>hk2-utils</artifactId>
<version>2.4.0-b06</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.jboss/jandex -->
<dependency>
<groupId>org.jboss</groupId>
<artifactId>jandex</artifactId>
<version>1.1.0.Final</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.thetransactioncompany/java-property-utils -->
<dependency>
<groupId>com.thetransactioncompany</groupId>
<artifactId>java-property-utils</artifactId>
<version>1.9.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.javassist/javassist -->
<dependency>
<groupId>org.javassist</groupId>
<artifactId>javassist</artifactId>
<version>3.18.1-GA</version>
</dependency>
<!-- https://mvnrepository.com/artifact/javax.annotation/javax.annotation-api -->
<dependency>
<groupId>javax.annotation</groupId>
<artifactId>javax.annotation-api</artifactId>
<version>1.2</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.glassfish.hk2.external/javax.inject -->
<dependency>
<groupId>org.glassfish.hk2.external</groupId>
<artifactId>javax.inject</artifactId>
<version>2.4.0-b06</version>
</dependency>
<!-- https://mvnrepository.com/artifact/javax.servlet/javax.servlet-api -->
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>3.0.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/javax.ws.rs/javax.ws.rs-api -->
<dependency>
<groupId>javax.ws.rs</groupId>
<artifactId>javax.ws.rs-api</artifactId>
<version>2.0.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/javax.xml.bind/jaxb-api -->
<dependency>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
<version>2.2.7</version>
</dependency>
<!-- https://mvnrepository.com/artifact/javax.xml/jaxrpc -->
<dependency>
<groupId>javax.xml</groupId>
<artifactId>jaxrpc</artifactId>
<version>1.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.jboss.logging/jboss-logging -->
<dependency>
<groupId>org.jboss.logging</groupId>
<artifactId>jboss-logging</artifactId>
<version>3.1.3.GA</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.jboss.logging/jboss-logging-annotations -->
<dependency>
<groupId>org.jboss.logging</groupId>
<artifactId>jboss-logging-annotations</artifactId>
<version>1.2.0.Beta1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.jboss.spec.javax.transaction/jboss-transaction-api_1.2_spec -->
<dependency>
<groupId>org.jboss.spec.javax.transaction</groupId>
<artifactId>jboss-transaction-api_1.2_spec</artifactId>
<version>1.0.0.Final</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.glassfish.jersey.core/jersey-client -->
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-client</artifactId>
<version>2.15</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.glassfish.jersey.core/jersey-common -->
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-common</artifactId>
<version>2.15</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.glassfish.jersey.containers/jersey-container-servlet -->
<dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet</artifactId>
<version>2.15</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.glassfish.jersey.containers/jersey-container-servlet-core -->
<dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet-core</artifactId>
<version>2.15</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.glassfish.jersey.bundles.repackaged/jersey-guava -->
<dependency>
<groupId>org.glassfish.jersey.bundles.repackaged</groupId>
<artifactId>jersey-guava</artifactId>
<version>2.15</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.glassfish.jersey.core/jersey-server -->
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-server</artifactId>
<version>2.15</version>
</dependency>
<!-- https://mvnrepository.com/artifact/log4j/log4j -->
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
<!-- https://mvnrepository.com/artifact/mysql/mysql-connector-java -->
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.34</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.osgi/org.osgi.core -->
<dependency>
<groupId>org.osgi</groupId>
<artifactId>org.osgi.core</artifactId>
<version>4.2.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.glassfish.hk2/osgi-resource-locator -->
<dependency>
<groupId>org.glassfish.hk2</groupId>
<artifactId>osgi-resource-locator</artifactId>
<version>1.0.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/javax.persistence/persistence-api -->
<dependency>
<groupId>javax.persistence</groupId>
<artifactId>persistence-api</artifactId>
<version>1.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/nz.net.osnz.common/common-pherialize -->
<dependency>
<groupId>nz.net.osnz.common</groupId>
<artifactId>common-pherialize</artifactId>
<version>1.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/javax.xml.soap/saaj-api -->
<dependency>
<groupId>javax.xml.soap</groupId>
<artifactId>saaj-api</artifactId>
<version>1.3</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.tuckey/urlrewritefilter -->
<dependency>
<groupId>org.tuckey</groupId>
<artifactId>urlrewritefilter</artifactId>
<version>3.1.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/javax.validation/validation-api -->
<dependency>
<groupId>javax.validation</groupId>
<artifactId>validation-api</artifactId>
<version>1.1.0.Final</version>
</dependency>
<!-- https://mvnrepository.com/artifact/wsdl4j/wsdl4j -->
<dependency>
<groupId>wsdl4j</groupId>
<artifactId>wsdl4j</artifactId>
<version>1.6.2</version>
</dependency>
<dependency>
<groupId>org.daisy.pipeline</groupId>
<artifactId>webservice-utils</artifactId>
<version>1.0.0</version>
</dependency>
<dependency>
<groupId>it.unisalento.l4allportal</groupId>
<artifactId>L4AllPortalDTO</artifactId>
<version>0.0.1-SNAPSHOT</version>
</dependency>
</dependencies>
</project> | {
"content_hash": "22d58048235e972afe6f4c38fcb725f7",
"timestamp": "",
"source": "github",
"line_count": 307,
"max_line_length": 204,
"avg_line_length": 35.729641693811075,
"alnum_prop": 0.7095450815935819,
"repo_name": "gsalab/L4AllPortal",
"id": "be11ae81484580ec4c9c32a0df4ec70ba9ef712c",
"size": "10969",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "L4AllPortal/pom.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "104482"
},
{
"name": "HTML",
"bytes": "150667"
},
{
"name": "Java",
"bytes": "124163"
},
{
"name": "JavaScript",
"bytes": "1238420"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals, absolute_import
import six
from collections import namedtuple
from decimal import Decimal as D
from dateutil.parser import parse
from . import base
from . import stores
from . import devices
Price = namedtuple('Price', ['value', 'currency'])
class Product(base.AppFigureObject):
def _load_from_json(self, json):
self.id = json.get('id')
self.name = json.get('name')
self.developer = json.get('developer')
self.icon = json.get('icon')
self.vendor_identifier = json.get('vendor_identifier')
self.package_name = json.get('package_name')
self.store = json.get('store')
self.store_id = json.get('store_id')
self.sku = json.get('sku')
self.ref_no = json.get('ref_no')
self.vendor_identifier = json.get('vendor_identifier')
self.release_date = parse(json.get('release_date'))
self.added_date = parse(json.get('added_date'))
self.updated_date = parse(json.get('updated_date'))
self.version = json.get('version')
if self.version:
self.version = self.version.strip()
self.source = json.get('source')
self.type = json.get('type')
self.devices = json.get('devices', [])
price = json.get('price')
if not price:
self.price = None
try:
self.price = Price(D(price.get('price')), price.get('currency'))
except (InvalidOperation, ValueError):
self.price = None
meta_json = json.get('meta', {})
self.metadata = ProductMetadataCollection.from_json(meta_json)
@property
def is_handheld(self):
return devices.HANDHELD in self.devices
@property
def is_tablet(self):
return devices.TABLET in self.devices
@property
def is_desktop(self):
return devices.DESKTOP in self.devices
@property
def has_metadata(self):
return len(self.metadata) > 0
@classmethod
def from_json(cls, json):
return cls(json)
def json(self):
return self._json_data
class ProductMetadataCollection(dict):
DEFAULT_LANGUAGE = 'en'
def __init__(self, json):
for language, metadata in six.iteritems(json):
self[language] = ProductMetadata.from_json(language, metadata)
def __getattr__(self, key):
"""
Expose the language metadata as attributes and allow direct access
to attributes of the english language metadata if it is present.
"""
if key in self:
return self[key]
elif hasattr(self[self.DEFAULT_LANGUAGE], key):
return getattr(self[self.DEFAULT_LANGUAGE], key)
raise AttributeError()
@classmethod
def from_json(cls, json):
return cls(json)
class ProductMetadata(base.AppFigureObject):
def __init__(self, language, json):
self.language = language
super(ProductMetadata, self).__init__(json)
def _load_from_json(self, json):
self.all_rating = D(json.get('all_rating'))
self.all_rating_count = int(json.get('all_rating_count'))
self.description = json.get('description')
self.developer_email = json.get('developer_email')
self.developer_site = json.get('developer_site')
self.downloads = json.get('downloads')
try:
self.download_size = int(json.get('download_size'))
except (ValueError, TypeError):
self.download_size = None
self.has_in_app_purchases = (json.get('has_inapps') == 'true')
self.name = json.get('name')
self.rating = json.get('rating')
self.release_notes = json.get('release_notes')
self.top_developer = (json.get('top_developer') == 'true')
self.view_url = json.get('view_url')
@classmethod
def from_json(cls, language, json):
flattened_json = {}
for data in json:
if data.get('language') != language:
continue
flattened_json[data.get('key')] = data.get('value')
return cls(language, flattened_json)
| {
"content_hash": "d465c5a8d9878c4c5c47a768bc2de671",
"timestamp": "",
"source": "github",
"line_count": 138,
"max_line_length": 76,
"avg_line_length": 29.985507246376812,
"alnum_prop": 0.6075398743354278,
"repo_name": "mobify/python-appfigures",
"id": "e29df54310eb684821c2d1ad4a989720754eb04d",
"size": "4162",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "appfigures/products.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "33940"
}
],
"symlink_target": ""
} |
package com.example.jerry.testing.ptr;
public class HeaderState
{
public static final int drag = 0; //正在下拉,且高度处于刷新线之前
public static final int over = 1; //下拉超过刷新线
public static final int release = 2; //从超过刷新线返回到刷新线
public static final int refreshing = 3;//正在刷新线
public static final int finish = 4;//刷新完成-正在返回顶部
public static final int fail = 5;//刷新失败-正在返回顶部
public static final int hide = 6; //返回顶部,完全隐藏
} | {
"content_hash": "0e09ce6a07834c503aec5fbc73088d32",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 56,
"avg_line_length": 36.25,
"alnum_prop": 0.7034482758620689,
"repo_name": "LiuZhangRed/Android-Testing",
"id": "2e83fbe59a918212a94cda9cfd12ed55a66aeb52",
"size": "569",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Testing/app/src/main/java/com/example/jerry/testing/ptr/HeaderState.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "265523"
}
],
"symlink_target": ""
} |
package weixin.popular.support.msg.handle;
import weixin.popular.support.msg.beans.receive.Msg;
import weixin.popular.support.msg.beans.receive.MsgImage;
import weixin.popular.support.msg.beans.receive.MsgLink;
import weixin.popular.support.msg.beans.receive.MsgLocation;
import weixin.popular.support.msg.beans.receive.MsgShortVideo;
import weixin.popular.support.msg.beans.receive.MsgText;
import weixin.popular.support.msg.beans.receive.MsgVideo;
import weixin.popular.support.msg.beans.receive.MsgVoice;
/**
* 消息类型
*
* @author Moyq5
*
*/
public enum MsgType {
/**
* 文本消息
*/
TEXT("text", MsgText.class),
/**
* 图片消息
*/
IMAGE("image", MsgImage.class),
/**
* 视频消息
*/
VIDEO("video", MsgVideo.class),
/**
* 语音消息
*/
VOICE("voice", MsgVoice.class),
/**
* 短视频消息
*/
SHORT_VIDEO("shortvideo", MsgShortVideo.class),
/**
* 地理位置消息
*/
LOCATION("location", MsgLocation.class),
/**
* 链接消息
*/
LINK("link", MsgLink.class),
/**
* 事件消息
*/
EVENT("event", null);
private Class<? extends Msg> msgClass;
private String msgType;
MsgType(String msgType, Class<? extends Msg> msgClass) {
this.msgType = msgType;
this.msgClass = msgClass;
}
public Class<? extends Msg> getMsgClass() {
return msgClass;
}
public String getMsgType() {
return msgType;
}
}
| {
"content_hash": "6cd5be10a63d12ce2fc143e55ac53699",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 62,
"avg_line_length": 20.205882352941178,
"alnum_prop": 0.6557496360989811,
"repo_name": "moyq5/weixin-popular",
"id": "9bf784d7a71eb2a02692c14f4cf24417691dd46d",
"size": "1452",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/weixin/popular/support/msg/handle/MsgType.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "950054"
}
],
"symlink_target": ""
} |
package org.tensorflow.lite;
import java.nio.ByteBuffer;
/**
* Extension of NativeInterpreterWrapper that adds support for experimental methods.
*
* <p><b>WARNING:</b> Resources consumed by the {@code NativeInterpreterWrapperExperimental} object
* must be explicitly freed by invoking the {@link #close()} method when the {@code
* NativeInterpreterWrapperExperimental} object is no longer needed.
*
* <p>Note: This class is not thread safe.
*/
final class NativeInterpreterWrapperExperimental extends NativeInterpreterWrapper {
NativeInterpreterWrapperExperimental(String modelPath) {
super(modelPath);
}
NativeInterpreterWrapperExperimental(ByteBuffer byteBuffer) {
super(byteBuffer);
}
NativeInterpreterWrapperExperimental(String modelPath, InterpreterImpl.Options options) {
super(modelPath, options);
}
NativeInterpreterWrapperExperimental(ByteBuffer buffer, InterpreterImpl.Options options) {
super(buffer, options);
}
void resetVariableTensors() {
resetVariableTensors(interpreterHandle, errorHandle);
}
private static native void resetVariableTensors(long interpreterHandle, long errorHandle);
}
| {
"content_hash": "a445769ed2c475ab3cd75c0ca1e3aeae",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 99,
"avg_line_length": 29.82051282051282,
"alnum_prop": 0.7807394668959587,
"repo_name": "frreiss/tensorflow-fred",
"id": "eccf39ff7f6e20463477ccb1ce3b0d2c4a9f5aaa",
"size": "1831",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tensorflow/lite/java/src/main/java/org/tensorflow/lite/NativeInterpreterWrapperExperimental.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "6729"
},
{
"name": "Batchfile",
"bytes": "49527"
},
{
"name": "C",
"bytes": "871761"
},
{
"name": "C#",
"bytes": "8562"
},
{
"name": "C++",
"bytes": "79093233"
},
{
"name": "CMake",
"bytes": "6500"
},
{
"name": "Dockerfile",
"bytes": "110545"
},
{
"name": "Go",
"bytes": "1852128"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "961600"
},
{
"name": "Jupyter Notebook",
"bytes": "549457"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "1644156"
},
{
"name": "Makefile",
"bytes": "62398"
},
{
"name": "Objective-C",
"bytes": "116558"
},
{
"name": "Objective-C++",
"bytes": "303063"
},
{
"name": "PHP",
"bytes": "20523"
},
{
"name": "Pascal",
"bytes": "3982"
},
{
"name": "Pawn",
"bytes": "18876"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "40003007"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Roff",
"bytes": "2472"
},
{
"name": "Ruby",
"bytes": "7464"
},
{
"name": "Shell",
"bytes": "681596"
},
{
"name": "Smarty",
"bytes": "34740"
},
{
"name": "Swift",
"bytes": "62814"
},
{
"name": "Vim Snippet",
"bytes": "58"
}
],
"symlink_target": ""
} |
//go:build e2e
// +build e2e
package conformance
import (
"log"
"os"
"testing"
"knative.dev/eventing-kafka/test"
eventingTest "knative.dev/eventing/test"
testlib "knative.dev/eventing/test/lib"
"knative.dev/pkg/system"
"knative.dev/pkg/test/zipkin"
)
var channelTestRunner testlib.ComponentsTestRunner
func TestMain(m *testing.M) {
os.Exit(func() int {
eventingTest.InitializeEventingFlags()
channelTestRunner = testlib.ComponentsTestRunner{
ComponentFeatureMap: test.ChannelFeatureMap,
ComponentsToTest: eventingTest.EventingFlags.Channels,
}
// Any tests may SetupZipkinTracing, it will only actually be done once. This should be the ONLY
// place that cleans it up. If an individual test calls this instead, then it will break other
// tests that need the tracing in place.
defer zipkin.CleanupZipkinTracingSetup(log.Printf)
defer testlib.ExportLogs(testlib.SystemLogsDir, system.Namespace())
return m.Run()
}())
}
| {
"content_hash": "094de5df7565f5fc082016deb2f0df1d",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 98,
"avg_line_length": 24.743589743589745,
"alnum_prop": 0.755440414507772,
"repo_name": "knative-sandbox/eventing-kafka-broker",
"id": "2b7c1ca53f141ae3e118dd1846a974796d5f7cb8",
"size": "1567",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "test/e2e_channel/conformance/main_test.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "1623324"
},
{
"name": "Java",
"bytes": "744409"
},
{
"name": "Shell",
"bytes": "74812"
}
],
"symlink_target": ""
} |
package org.springframework.cloud.app;
import java.util.Map;
/**
* Information about the application instance.
*
* Except for the instance and application id, everything else is loosely
* defined as a map holding properties to allow each cloud to define in
* whatever way it suits them.
*
* @author Ramnivas Laddad
*
*/
public interface ApplicationInstanceInfo {
/**
* Id for this particular instance
*
* @return typically some unique id or instance index
*
*/
public String getInstanceId();
/**
* Id for the app
*
* @return typically the name of the application
*/
public String getAppId();
/**
* Loosely defined map of application and instance properties.
*
* <p>
* Typical properties could include hostname and port etc.
*
* @return map of properties
*/
public Map<String, Object> getProperties();
}
| {
"content_hash": "a2baf91e80325a8821439db648c828ab",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 73,
"avg_line_length": 21.625,
"alnum_prop": 0.6936416184971098,
"repo_name": "spring-cloud/spring-cloud-connectors",
"id": "7bc3d2a339790ad96e070672a8e1a5e36b4c6310",
"size": "865",
"binary": false,
"copies": "7",
"ref": "refs/heads/main",
"path": "spring-cloud-core/src/main/java/org/springframework/cloud/app/ApplicationInstanceInfo.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "537053"
},
{
"name": "Shell",
"bytes": "1390"
}
],
"symlink_target": ""
} |
subdir=src/backend/cdb
top_builddir=../../../..
include $(top_builddir)/src/Makefile.global
TARGETS= cdbdistributedsnapshot
TARGETS += cdbappendonlyxlog
include $(top_srcdir)/src/backend/mock.mk
cdbdistributedsnapshot.t: $(MOCK_DIR)/backend/access/transam/distributedlog_mock.o \
$(MOCK_DIR)/backend/access/hash/hash_mock.o \
$(MOCK_DIR)/backend/utils/fmgr/fmgr_mock.o
cdbappendonlyxlog.t: \
$(MOCK_DIR)/backend/storage/file/fd_mock.o \
$(MOCK_DIR)/backend/access/transam/xlogutils_mock.o \
$(MOCK_DIR)/backend/access/hash/hash_mock.o \
$(MOCK_DIR)/backend/utils/fmgr/fmgr_mock.o
| {
"content_hash": "ff82436e0df793619f9d17ef75bbbcd2",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 84,
"avg_line_length": 31.105263157894736,
"alnum_prop": 0.7445008460236887,
"repo_name": "greenplum-db/gpdb",
"id": "f47af88065f43d328097fbedd74882fa87a63524",
"size": "591",
"binary": false,
"copies": "10",
"ref": "refs/heads/main",
"path": "src/backend/cdb/test/Makefile",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "3266"
},
{
"name": "Awk",
"bytes": "836"
},
{
"name": "Batchfile",
"bytes": "15613"
},
{
"name": "C",
"bytes": "48211707"
},
{
"name": "C++",
"bytes": "12681024"
},
{
"name": "CMake",
"bytes": "41408"
},
{
"name": "DTrace",
"bytes": "3833"
},
{
"name": "Emacs Lisp",
"bytes": "4164"
},
{
"name": "Fortran",
"bytes": "14873"
},
{
"name": "GDB",
"bytes": "576"
},
{
"name": "Gherkin",
"bytes": "504216"
},
{
"name": "HTML",
"bytes": "215381"
},
{
"name": "JavaScript",
"bytes": "23969"
},
{
"name": "Lex",
"bytes": "254578"
},
{
"name": "M4",
"bytes": "133878"
},
{
"name": "Makefile",
"bytes": "511186"
},
{
"name": "PLpgSQL",
"bytes": "9280413"
},
{
"name": "Perl",
"bytes": "1161283"
},
{
"name": "PowerShell",
"bytes": "422"
},
{
"name": "Python",
"bytes": "3404111"
},
{
"name": "Roff",
"bytes": "30385"
},
{
"name": "Ruby",
"bytes": "299639"
},
{
"name": "SCSS",
"bytes": "339"
},
{
"name": "Shell",
"bytes": "403369"
},
{
"name": "XS",
"bytes": "7098"
},
{
"name": "XSLT",
"bytes": "448"
},
{
"name": "Yacc",
"bytes": "748098"
},
{
"name": "sed",
"bytes": "1231"
}
],
"symlink_target": ""
} |
layout: post
title: Mr Porter, grooming campaign
img: mrporter-grooming-featured.jpg
---
Grooming is an important category for Mr Porter. I was tasked with producing a shoot that would be used on the Journal and for various marketing uses, showing the breadth of product on offer.
<div><img src="{{ site.baseurl }}/public/images/mrporter-grooming-shot-02.jpg" alt=""></div>
<div><img src="{{ site.baseurl }}/public/images/mrporter-grooming-shot-03.jpg" alt=""></div>
<div><img src="{{ site.baseurl }}/public/images/mrporter-grooming-shot-04.jpg" alt=""></div>
<div><img src="{{ site.baseurl }}/public/images/mrporter-grooming-shot-05.jpg" alt=""></div>
<div><img src="{{ site.baseurl }}/public/images/mrporter-grooming-shot-06.jpg" alt=""></div>
<div><img src="{{ site.baseurl }}/public/images/mrporter-grooming-shot-07.jpg" alt=""></div>
Photography: Mark Sanders | {
"content_hash": "7a0239565c5a467302e250bcd5aefa7a",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 192,
"avg_line_length": 44.6,
"alnum_prop": 0.6995515695067265,
"repo_name": "sampsonpete/jessruiz",
"id": "8b792f8670e13ab8d985ca3aff4f567507882c8d",
"size": "897",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "_posts/2015-01-23-mrporter-grooming.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11811"
},
{
"name": "HTML",
"bytes": "3717"
},
{
"name": "JavaScript",
"bytes": "3686"
},
{
"name": "Ruby",
"bytes": "6212"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright (C) 2014 Google, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.google.auto</groupId>
<artifactId>auto-parent</artifactId>
<version>6</version>
</parent>
<groupId>com.google.auto</groupId>
<artifactId>auto-common</artifactId>
<version>HEAD-SNAPSHOT</version>
<name>Auto Common Libraries</name>
<description>
Common utilities for creating annotation processors.
</description>
<scm>
<url>http://github.com/google/auto</url>
<connection>scm:git:git://github.com/google/auto.git</connection>
<developerConnection>scm:git:ssh://[email protected]/google/auto.git</developerConnection>
<tag>HEAD</tag>
</scm>
<dependencies>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
<dependency>
<!-- Used only by GeneratedAnnotationSpecs.
If you use JavaPoet, you can use GeneratedAnnotationSpecs. -->
<groupId>com.squareup</groupId>
<artifactId>javapoet</artifactId>
<optional>true</optional>
</dependency>
<!-- test dependencies -->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava-testlib</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.google.testing.compile</groupId>
<artifactId>compile-testing</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.google.truth</groupId>
<artifactId>truth</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.eclipse.jdt.core.compiler</groupId>
<artifactId>ecj</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
| {
"content_hash": "8641772c59cb5993293481bf4161ed09",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 104,
"avg_line_length": 31.36263736263736,
"alnum_prop": 0.6758934828311143,
"repo_name": "MaTriXy/auto",
"id": "8f8eba371ceedb417fc0bb470c660f43bde69abc",
"size": "2854",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "common/pom.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "1395665"
},
{
"name": "Shell",
"bytes": "3355"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.