max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
1,350 |
<reponame>ppartarr/azure-sdk-for-java
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.microsoft.azure.keyvault.cryptography;
/**
* Abstract base class for all Algorithm objects.
*
*/
public abstract class Algorithm {
private final String name;
/**
* Constructor.
*
* @param name The name of the algorithm.
*/
protected Algorithm(String name) {
if (Strings.isNullOrWhiteSpace(name)) {
throw new IllegalArgumentException("name");
}
this.name = name;
}
/**
* Gets the name of the algorithm.
*
* @return The name of the algorithm.
*/
public String getName() {
return name;
}
}
| 289 |
646 |
<gh_stars>100-1000
/*
Copyright (c) 2017 TOSHIBA Digital Solutions Corporation
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*!
@file
@brief Point class for 3D-geometry data
*/
#ifndef GIS_POINT_H_
#define GIS_POINT_H_
#include "util/container.h"
#include <cfloat>
#include <iomanip>
#include <limits>
#include <sstream>
#include "expression.h"
#include "gis_geometry.h"
#define GIS_POINT_PRECISION 16
class PointGeom;
class LineString;
/*!
* @brief expresses a 2D or 3D Point
*
*/
class Point : public Geometry {
friend class LineString;
friend class PointGeom;
friend class Geometry;
public:
/*!
* @brief Constructor for empty objects
* @param txn The transaction context
*/
explicit Point(TransactionContext &txn) : Geometry(txn) {
isEmpty_ = true;
isAssigned_ = true;
xExpr_ = NULL;
yExpr_ = NULL;
zExpr_ = NULL;
srId_ = -1;
x_ = y_ = z_ = std::numeric_limits<double>::quiet_NaN();
dimension_ = 0;
}
/*!
* @brief Constructor from xyz-coordinate
*
* @param id SRID
* @param x X-coordinate
* @param y Y-coordinate
* @param z Z-coordinate
* @param txn The transaction context
*
*/
Point(srid_t id, double x, double y, double z, TransactionContext &txn)
: Geometry(txn) {
isEmpty_ = false;
isAssigned_ = true;
x_ = x;
boundingRect_.xmax = boundingRect_.xmin = x_;
y_ = y;
boundingRect_.ymax = boundingRect_.ymin = y_;
z_ = z;
if (!util::isNaN(z)) {
boundingRect_.zmax = boundingRect_.zmin = z_;
dimension_ = 3;
}
else {
boundingRect_.zmax = std::numeric_limits<double>::infinity();
boundingRect_.zmin = -std::numeric_limits<double>::infinity();
dimension_ = 2;
}
srId_ = id;
xExpr_ = NULL;
yExpr_ = NULL;
zExpr_ = NULL;
}
/*!
* @brief Construct an non-deterministic point object.
*
* @param id SRID
* @param x Expression treated as X
* @param y Expression treated as Y
* @param z Expression treated as Z
* @param txn The transaction context
* @param txn Object manager
*
*/
Point(srid_t id, Expr *x, Expr *y, Expr *z, TransactionContext &txn,
ObjectManager &objectManager)
: Geometry(txn) {
isEmpty_ = (x == NULL) && (y == NULL) && (z == NULL);
srId_ = id;
isAssigned_ = true;
xExpr_ = NULL;
if (x != NULL && x->isNumeric()) {
x_ = *x;
}
else if (x != NULL) {
x_ = std::numeric_limits<double>::quiet_NaN();
xExpr_ = x->dup(txn, objectManager);
isAssigned_ = false;
}
else {
GS_THROW_USER_ERROR(GS_ERROR_TQ_INTERNAL_GIS_LOGIC_ERROR,
"Internal logic error: Point constructed with no expr");
}
yExpr_ = NULL;
if (y != NULL && y->isNumeric()) {
y_ = *y;
}
else if (y != NULL) {
y_ = std::numeric_limits<double>::quiet_NaN();
yExpr_ = y->dup(txn, objectManager);
isAssigned_ = false;
}
else {
GS_THROW_USER_ERROR(GS_ERROR_TQ_INTERNAL_GIS_LOGIC_ERROR,
"Internal logic error: Point constructed with no expr");
}
dimension_ = 2;
zExpr_ = NULL;
if (z != NULL && z->isNumeric()) {
z_ = *z;
dimension_ = 3;
}
else {
z_ = std::numeric_limits<double>::quiet_NaN();
if (z != NULL) {
zExpr_ = z->dup(txn, objectManager);
dimension_ = 3;
}
isAssigned_ = (z == NULL);
}
if (isAssigned_) {
boundingRect_.xmax = boundingRect_.xmin = x_;
boundingRect_.ymax = boundingRect_.ymin = y_;
boundingRect_.zmax = boundingRect_.zmin = z_;
}
else {
boundingRect_.xmax = boundingRect_.xmin = boundingRect_.ymax =
boundingRect_.ymin = boundingRect_.zmax = boundingRect_.zmin =
std::numeric_limits<double>::quiet_NaN();
}
}
/*!
* @brief Check for the object is deterministic object
*
*/
bool isAssigned() const {
return isAssigned_;
}
/*!
* @brief Generate new assigned point object
* ex) assign POINT(A 1 2) to A=0 -> new POINT(0 1 2) is generated.
* @param txn The transaction context
* @param txn Object manager
* @param amap Symbol map to assign used in expression evaluation
* @param fmap Function map to call used in expression evaluation
* @param mode EvalMode
*
* @return newly generated point object
*/
virtual Point *assign(TransactionContext &txn, ObjectManager &objectManager,
ContainerRowWrapper *amap, FunctionMap *fmap, EvalMode mode) {
if (isEmpty_ || isAssigned_) {
return dup(txn, objectManager);
}
Expr *e1 = NULL, *e2 = NULL, *e3 = NULL;
e1 = (xExpr_ != NULL)
? (xExpr_->eval(txn, objectManager, amap, fmap, mode))
: (Expr::newNumericValue(x_, txn));
e2 = (yExpr_ != NULL)
? (yExpr_->eval(txn, objectManager, amap, fmap, mode))
: (Expr::newNumericValue(y_, txn));
if (dimension_ >= 3) {
e3 = (zExpr_ != NULL)
? (zExpr_->eval(txn, objectManager, amap, fmap, mode))
: (Expr::newNumericValue(z_, txn));
}
return QP_NEW Point(srId_, e1, e2, e3, txn, objectManager);
}
/*!
* @brief Check for the object is simple
*
* @return result
*/
bool isSimple() const {
return isAssigned_;
}
/*!
* @brief Get type
*
* @return type
*/
GeometryType getType() const {
return POINT;
}
virtual size_t getRawSize() const {
return sizeof(*this);
}
/*!
* @brief Get MBB (Mininum-Bounding-Box) rectangle (just point).
*
* @return MBB rect
*/
const TrRectTag &getBoundingRect() const {
return boundingRect_;
}
/*!
* @brief Generate duplication of the object
*
* @param txn The transaction context
* @param txn Object manager
* @param id SRID
*
* @return duplicated object, caller must release.
*/
Point *dup(
TransactionContext &txn, ObjectManager &objectManager, srid_t id) {
if (isEmpty()) {
return QP_NEW Point(txn);
}
else {
Point *p = QP_NEW Point(id, x_, y_, z_, txn);
if (xExpr_ != NULL) {
p->xExpr_ = xExpr_->dup(txn, objectManager);
}
if (yExpr_ != NULL) {
p->yExpr_ = yExpr_->dup(txn, objectManager);
}
if (zExpr_ != NULL) {
p->zExpr_ = zExpr_->dup(txn, objectManager);
}
p->isAssigned_ = isAssigned_;
p->isEmpty_ = isEmpty_;
return p;
}
}
/*!
* @brief Duplicate object
* @param txn The transaction context
* @param txn Object manager
*
* @return duplicated object, caller must release
*/
Point *dup(TransactionContext &txn, ObjectManager &objectManager) {
return dup(txn, objectManager, srId_);
}
/*!
* @brief Get X coordinate
*
* @return x-coordinate if possible to evaluate.
*/
double x() const {
if (isEmpty_) {
GS_THROW_USER_ERROR(
GS_ERROR_TQ_INTERNAL_GIS_GET_VALUE_IN_EMPTY_OBJECT,
"Cannot obtain coordinate from empty object");
}
else if (isAssigned_ || xExpr_ == NULL) {
return x_;
}
else if (!isAssigned_ && xExpr_->isNumeric()) {
return double(*xExpr_);
}
GS_THROW_USER_ERROR(GS_ERROR_TQ_INTERNAL_GIS_LOGIC_ERROR,
"Internal logic error: cannot obtain undetermined coordinate");
}
/*!
* @brief Get Y coordinate
*
* @return y-coordinate if possible to evaluate.
*/
double y() const {
if (isEmpty_) {
GS_THROW_USER_ERROR(
GS_ERROR_TQ_INTERNAL_GIS_GET_VALUE_IN_EMPTY_OBJECT,
"Cannot obtain coordinate from empty object");
}
else if (isAssigned_ || yExpr_ == NULL) {
return y_;
}
else if (!isAssigned_ && yExpr_->isNumeric()) {
return double(*yExpr_);
}
GS_THROW_USER_ERROR(GS_ERROR_TQ_INTERNAL_GIS_LOGIC_ERROR,
"Internal logic error: cannot obtain undetermined coordinate");
}
/*!
* @brief Get Z coordinate
*
* @return z-coordinate if possible to evaluate.
*/
double z() const {
if (isEmpty_) {
GS_THROW_USER_ERROR(
GS_ERROR_TQ_INTERNAL_GIS_GET_VALUE_IN_EMPTY_OBJECT,
"Cannot obtain coordinate from empty object");
}
else if (dimension_ >= 3 && (isAssigned_ || zExpr_ == NULL)) {
return z_;
}
else if (dimension_ >= 3 && !isAssigned_ && zExpr_->isNumeric()) {
return double(*zExpr_);
}
else if (dimension_ == 2) {
return z_;
}
GS_THROW_USER_ERROR(GS_ERROR_TQ_INTERNAL_GIS_LOGIC_ERROR,
"Internal logic error: cannot obtain undetermined coordinate");
}
/*!
* @brief Get WKT string of the object
*
* @return WKT
*/
virtual const char *getString(TransactionContext &txn) const {
util::NormalOStringStream os;
os << "POINT(";
stringify(txn, os);
os << ')';
std::string str = os.str();
const size_t len = str.size();
char *ret = static_cast<char *>(QP_ALLOCATOR.allocate(len + 1));
memcpy(ret, str.c_str(), len);
ret[len] = '\0';
return ret;
}
#ifdef _WIN32
/*!
* @brief Stringify a double value
*
* @param os output string stream
* @param x value
*/
inline void stringifyFp(util::NormalOStringStream &os, double x) const {
if (util::isNaN(x)) {
os << "nan";
}
else if (x == std::numeric_limits<double>::infinity()) {
os << "inf";
}
else if (x == -std::numeric_limits<double>::infinity()) {
os << "-inf";
}
else {
os << x;
}
}
#else
/*!
* @brief Stringify a double value
*
* @param os output string stream
* @param x value
*/
inline void stringifyFp(util::NormalOStringStream &os, double x) const {
os << x;
}
#endif
/*!
* @brief Get object coordinate in WKT-subset string
*
* @param txn The transaction context
* @param os string-stream to save
*/
virtual void stringify(
TransactionContext &txn, util::NormalOStringStream &os) const {
std::streamsize precision = os.precision();
os << std::setprecision(GIS_POINT_PRECISION);
if (isEmpty_) {
os << "EMPTY";
}
else if (isAssigned_) {
stringifyFp(os, x_);
os << ' ';
stringifyFp(os, y_);
if (dimension_ >= 3) {
os << ' ';
stringifyFp(os, z_);
}
}
else {
if (xExpr_ != NULL) {
os << xExpr_->getValueAsString(txn);
}
else {
os << x_;
}
os << ' ';
if (yExpr_ != NULL) {
os << yExpr_->getValueAsString(txn);
}
else {
os << y_;
}
if (dimension_ >= 3) {
os << ' ';
if (zExpr_ != NULL) {
os << zExpr_->getValueAsString(txn);
}
else {
os << z_;
}
}
}
if (srId_ != -1) {
os << ";" << srId_;
}
os << std::setprecision(static_cast<int32_t>(precision));
}
/*!
* @brief Check for equality
*
* @param p point to test
*
* @return result
*/
bool operator==(Point &p) const {
if (isEmpty_) {
return p.isEmpty();
}
if (p.isEmpty_) {
return false;
}
if (dimension_ == 2) {
return (x_ == p.x() && y_ == p.y());
}
return (x_ == p.x() && y_ == p.y() && z_ == p.z());
}
/*!
* @brief Check for equality
*
* @param g geometry to test
*
* @return result
*/
bool operator==(Geometry &g) const {
if (g.getType() != POINT) {
return false;
}
return *this == dynamic_cast<Point &>(g);
}
/*!
* @brief Check for non-equality
*
* @param p point to test
*
* @return
*/
bool operator!=(Point &p) const {
return !(*this == p);
}
protected:
double x_, y_, z_;
Expr *xExpr_, *yExpr_, *zExpr_;
/*!
* @brief Get the serialized size of geometry body
* (Not equal to "sizeof")
*
* @return size
*/
virtual size_t getSerializedSize() const {
return sizeof(double) * 3;
}
/*!
* @brief Serialization of geometry's body
*
* @param out output buffer
* @param offset write pointer
*/
void serializeObject(util::XArray<uint8_t> &out, uint64_t &offset) {
if (isAssigned_) {
if (dimension_ != 0) {
out.push_back(reinterpret_cast<uint8_t *>(&x_), sizeof(double));
out.push_back(reinterpret_cast<uint8_t *>(&y_), sizeof(double));
out.push_back(reinterpret_cast<uint8_t *>(&z_), sizeof(double));
offset += getSerializedSize();
}
}
else {
GS_THROW_USER_ERROR(GS_ERROR_TQ_INTERNAL_GIS_CANNOT_SERIALIZE,
"Cannot serialize geometry object");
}
}
/*!
* @brief Deserialize geometry from byte array
*
* @param txn Transaction context
* @param in Input byte array
* @param offset Read pointer
* @param srId SRID if set
* @param dimension Dimension read in Geometry::deserialize
*
* @return Generated geometry object
*/
static Point *deserializeObject(TransactionContext &txn, const uint8_t *in,
uint64_t &offset, srid_t srId, int dimension) {
double p[3];
assignFromMemory(&p[0], in + offset);
offset += sizeof(double);
assignFromMemory(&p[1], in + offset);
offset += sizeof(double);
assignFromMemory(&p[2], in + offset);
offset += sizeof(double);
if (dimension == 2) {
return QP_NEW Point(srId, p[0], p[1],
std::numeric_limits<double>::quiet_NaN(), txn);
}
else {
if (dimension != 3) {
GS_THROW_USER_ERROR(GS_ERROR_TQ_INTERNAL_GIS_CANNOT_DESERIALIZE,
"Cannot use dimension >3");
}
return QP_NEW Point(srId, p[0], p[1], p[2], txn);
}
}
/*!
* @brief Deserialize geometry from byte stream
*
* @param txn Transaction context
* @param in Input byte stream
* @param srId SRID if set
* @param dimension Dimension read in Geometry::deserialize
*
* @return Generated geometry object
*/
static Point *deserializeObject(TransactionContext &txn,
util::ByteStream<util::ArrayInStream> &in, srid_t srId, int dimension) {
double p[3];
in >> p[0] >> p[1] >> p[2];
if (dimension == 2) {
return QP_NEW Point(srId, p[0], p[1],
std::numeric_limits<double>::quiet_NaN(), txn);
}
else if (dimension == 3) {
return QP_NEW Point(srId, p[0], p[1], p[2], txn);
}
util::NormalOStringStream os;
os << "Invalid dimension: " << dimension;
GS_THROW_USER_ERROR(
GS_ERROR_TQ_INTERNAL_GIS_CANNOT_DESERIALIZE, os.str().c_str());
}
};
#endif /* GIS_POINT_H_*/
| 5,901 |
1,681 |
<filename>easyreport-membership/src/main/java/com/easytoolsoft/easyreport/membership/service/impl/EventServiceImpl.java
package com.easytoolsoft.easyreport.membership.service.impl;
import java.util.Date;
import com.easytoolsoft.easyreport.membership.data.EventRepository;
import com.easytoolsoft.easyreport.membership.domain.Event;
import com.easytoolsoft.easyreport.membership.domain.example.EventExample;
import com.easytoolsoft.easyreport.membership.service.EventService;
import com.easytoolsoft.easyreport.mybatis.service.AbstractCrudService;
import org.springframework.stereotype.Service;
/**
* @author <NAME>
* @date 2017-03-25
*/
@Service("EventService")
public class EventServiceImpl
extends AbstractCrudService<EventRepository, Event, EventExample, Integer>
implements EventService {
@Override
protected EventExample getPageExample(final String fieldName, final String keyword) {
final EventExample example = new EventExample();
example.createCriteria().andFieldLike(fieldName, keyword);
return example;
}
@Override
public void clear() {
this.dao.deleteByExample(null);
}
@Override
public void add(final String source, final String account, final String message, final String level,
final String url) {
final Event event = Event.builder()
.source(source)
.account(account)
.userId(-1)
.message(message)
.level(level)
.url(url)
.gmtCreated(new Date())
.build();
this.add(event);
}
}
| 592 |
333 |
class Test_DescrOperation:
def test_nonzero(self):
space = self.space
assert space.nonzero(space.w_True) is space.w_True
assert space.nonzero(space.w_False) is space.w_False
assert space.nonzero(space.wrap(42)) is space.w_True
assert space.nonzero(space.wrap(0)) is space.w_False
l = space.newlist([])
assert space.nonzero(l) is space.w_False
space.call_method(l, 'append', space.w_False)
assert space.nonzero(l) is space.w_True
def test_isinstance_and_issubtype_ignore_special(self):
space = self.space
w_tup = space.appexec((), """():
class Meta(type):
def __subclasscheck__(mcls, cls):
return False
class Base:
__metaclass__ = Meta
class Sub(Base):
pass
return Base, Sub""")
w_base, w_sub = space.unpackiterable(w_tup)
assert space.issubtype_w(w_sub, w_base)
w_inst = space.call_function(w_sub)
assert space.isinstance_w(w_inst, w_base)
| 505 |
4,054 |
<filename>searchcore/src/tests/proton/metrics/metrics_engine/metrics_engine_test.cpp<gh_stars>1000+
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
#include <vespa/metrics/metricset.h>
#include <vespa/searchcore/proton/metrics/attribute_metrics.h>
#include <vespa/searchcore/proton/metrics/metrics_engine.h>
#include <vespa/vespalib/testkit/testapp.h>
#include <vespa/log/log.h>
LOG_SETUP("metrics_engine_test");
using namespace proton;
struct DummyMetricSet : public metrics::MetricSet {
DummyMetricSet(const vespalib::string &name) : metrics::MetricSet(name, {}, "", nullptr) {}
};
struct AttributeMetricsFixture {
MetricsEngine engine;
DummyMetricSet parent;
AttributeMetrics metrics;
AttributeMetricsFixture()
: engine(),
parent("parent"),
metrics(&parent)
{}
void addAttribute(const vespalib::string &attrName) {
engine.addAttribute(metrics, attrName);
}
void removeAttribute(const vespalib::string &attrName) {
engine.removeAttribute(metrics, attrName);
}
void cleanAttributes() {
engine.cleanAttributes(metrics);
}
void assertRegisteredMetrics(size_t expNumMetrics) const {
EXPECT_EQUAL(expNumMetrics, parent.getRegisteredMetrics().size());
}
void assertMetricsExists(const vespalib::string &attrName) {
EXPECT_TRUE(metrics.get(attrName) != nullptr);
}
void assertMetricsNotExists(const vespalib::string &attrName) {
EXPECT_TRUE(metrics.get(attrName) == nullptr);
}
};
TEST_F("require that attribute metrics can be added", AttributeMetricsFixture)
{
TEST_DO(f.assertRegisteredMetrics(0));
f.addAttribute("foo");
TEST_DO(f.assertRegisteredMetrics(1));
TEST_DO(f.assertMetricsExists("foo"));
}
TEST_F("require that attribute metrics can be removed", AttributeMetricsFixture)
{
TEST_DO(f.assertRegisteredMetrics(0));
f.addAttribute("foo");
f.addAttribute("bar");
TEST_DO(f.assertRegisteredMetrics(2));
f.removeAttribute("foo");
TEST_DO(f.assertRegisteredMetrics(1));
TEST_DO(f.assertMetricsNotExists("foo"));
TEST_DO(f.assertMetricsExists("bar"));
}
TEST_F("require that all attribute metrics can be cleaned", AttributeMetricsFixture)
{
TEST_DO(f.assertRegisteredMetrics(0));
f.addAttribute("foo");
f.addAttribute("bar");
TEST_DO(f.assertRegisteredMetrics(2));
f.cleanAttributes();
TEST_DO(f.assertRegisteredMetrics(0));
TEST_DO(f.assertMetricsNotExists("foo"));
TEST_DO(f.assertMetricsNotExists("bar"));
}
TEST_MAIN() { TEST_RUN_ALL(); }
| 1,018 |
345 |
<reponame>ChenZhangg/dmix
/*
* Copyright (C) 2004 <NAME>
* Copyright (C) 2010-2014 The MPDroid Project
*
* All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice,this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.a0z.mpd.item;
import org.a0z.mpd.Tools;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Arrays;
public class Stream extends Item {
private final String mName;
private final String mUrl;
private int mPos;
public Stream(final String name, final String url, final int pos) {
super();
mName = name;
mUrl = url;
mPos = pos;
}
public static String addStreamName(final String url, final String name) {
final StringBuilder streamName;
if (name == null) {
streamName = new StringBuilder(url.length() + 3);
} else {
streamName = new StringBuilder(url.length() + name.length() + 3);
}
streamName.append(url);
if (name != null && !name.isEmpty()) {
String path = null;
try {
path = new URL(url).getPath();
} catch (final MalformedURLException ignored) {
}
if (path == null || path.isEmpty()) {
streamName.append('/');
}
streamName.append('#');
streamName.append(name);
}
return streamName.toString();
}
/**
* Compares an Artist object with a general contract of
* comparison that is reflexive, symmetric and transitive.
*
* @param o The object to compare this instance with.
* @return True if the objects are equal with regard to te general contract, false otherwise.
* @see Object#equals(Object)
*/
@Override
public boolean equals(final Object o) {
Boolean isEqual = null;
if (this == o) {
isEqual = Boolean.TRUE;
} else if (o == null || getClass() != o.getClass()) {
isEqual = Boolean.FALSE;
}
if (isEqual == null || isEqual.equals(Boolean.TRUE)) {
final Stream stream = (Stream) o;
if (Tools.isNotEqual(mName, stream.mName)) {
isEqual = Boolean.FALSE;
}
if (Tools.isNotEqual(mUrl, stream.mUrl)) {
isEqual = Boolean.FALSE;
}
}
if (isEqual == null) {
isEqual = Boolean.TRUE;
}
return isEqual.booleanValue();
}
@Override
public String getName() {
return mName;
}
public int getPos() {
return mPos;
}
public String getUrl() {
return mUrl;
}
@Override
public int hashCode() {
return Arrays.hashCode(new Object[]{mName, mUrl});
}
public void setPos(final int pos) {
mPos = pos;
}
}
| 1,627 |
892 |
{
"schema_version": "1.2.0",
"id": "GHSA-7v3x-h7r2-34jv",
"modified": "2022-01-20T16:18:28Z",
"published": "2022-01-21T18:43:05Z",
"aliases": [
],
"summary": "Insufficient Session Expiration in Pterodactyl API",
"details": "### Impact\nA vulnerability exists in Pterodactyl Panel `<= 1.6.6` that could allow a malicious attacker that compromises an API key to generate an authenticated user session that is not revoked when the API key is deleted, thus allowing the malicious user to remain logged in as the user the key belonged to.\n\nIt is important to note that **a malicious user must first compromise an existing API key for a user to exploit this issue**. It cannot be exploited by chance, and requires a coordinated attack against an individual account using a known API key.\n\n### Patches\nThis issue has been addressed in the `v1.7.0` release of Pterodactyl Panel.\n\n### Workarounds\nThose not wishing to upgrade may apply the change below:\n\n```diff\ndiff --git a/app/Http/Middleware/Api/AuthenticateKey.php b/app/Http/Middleware/Api/AuthenticateKey.php\nindex eb25dac6..857bfab2 100644\n--- a/app/Http/Middleware/Api/AuthenticateKey.php\n+++ b/app/Http/Middleware/Api/AuthenticateKey.php\n@@ -70,7 +70,7 @@ class AuthenticateKey\n } else {\n $model = $this->authenticateApiKey($request->bearerToken(), $keyType);\n\n- $this->auth->guard()->loginUsingId($model->user_id);\n+ $this->auth->guard()->onceUsingId($model->user_id);\n }\n```\n\n### For more information\nIf you have any questions or comments about this advisory please reach out to `Tactical Fish#8008` on [Discord](https://discord.gg/pterodactyl) or email `<EMAIL>`.\n",
"severity": [
{
"type": "CVSS_V3",
"score": "CVSS:3.1/AV:N/AC:H/PR:H/UI:R/S:U/C:H/I:H/A:L"
}
],
"affected": [
{
"package": {
"ecosystem": "Packagist",
"name": "pterodactyl/panel"
},
"ranges": [
{
"type": "ECOSYSTEM",
"events": [
{
"introduced": "0"
},
{
"fixed": "1.7.0"
}
]
}
]
}
],
"references": [
{
"type": "WEB",
"url": "https://github.com/pterodactyl/panel/security/advisories/GHSA-7v3x-h7r2-34jv"
},
{
"type": "WEB",
"url": "https://github.com/pterodactyl/panel/commit/dfa329ddf242908b60e22e3340ea36359eab1ef4"
},
{
"type": "WEB",
"url": "https://github.com/pterodactyl/panel/releases/tag/v1.7.0"
},
{
"type": "PACKAGE",
"url": "https://github.com/pterodactyl/panel"
}
],
"database_specific": {
"cwe_ids": [
"CWE-613"
],
"severity": "MODERATE",
"github_reviewed": true
}
}
| 1,249 |
519 |
//
// ConfigViewController.h
// iOSBlogReader
//
// Created by everettjf on 16/4/9.
// Copyright © 2016年 everettjf. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "EEViewController.h"
@interface ConfigViewController : EEViewController
@end
| 91 |
2,206 |
/*
*
* Copyright (c) 2006-2020, Speedment, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); You may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.speedment.runtime.core.internal.util.document;
import com.speedment.runtime.config.*;
import com.speedment.runtime.config.trait.HasAliasUtil;
import com.speedment.runtime.config.trait.HasEnableUtil;
import com.speedment.runtime.config.trait.HasNameUtil;
import com.speedment.runtime.config.trait.HasTypeMapperUtil;
import org.junit.jupiter.api.BeforeEach;
import java.util.AbstractMap;
import java.util.Map;
import java.util.stream.Stream;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toMap;
/**
*
* @author <NAME>
*/
public abstract class AbstractDocumentTest {
Project project;
protected Dbms dbmsA, dbmsB;
protected Schema schemaA, schemaB;
protected Table tableA, tableB, tableC, tableD;
protected Column columnA1, columnA2, columnB1, columnB2, columnC1, columnC2, columnD1, columnD2;
protected PrimaryKeyColumn primaryKeyColumnA1, primaryKeyColumnB1, primaryKeyColumnC1, primaryKeyColumnD1;
protected Index indexA2, indexB2;
protected IndexColumn indexColumnA2, indexColumnB2;
protected ForeignKey foreignKeyA2_C1, foreignKeyB2_D1;
protected ForeignKeyColumn foreignKeyColumnA2_C1, foreignKeyColumnB2_D1;
@BeforeEach
public void setUp() {
final Map<String, Object> data = map(
entry(HasNameUtil.NAME, "Project"),
entry(HasEnableUtil.ENABLED, true),
entry(ProjectUtil.DBMSES, map(
entry(HasNameUtil.NAME, "Dbms A"),
entry(HasEnableUtil.ENABLED, true),
entry(DbmsUtil.SCHEMAS, map(
entry(HasNameUtil.NAME, "Schema A"),
entry(HasAliasUtil.ALIAS, "Custom Schema A"),
entry(HasEnableUtil.ENABLED, true),
entry(SchemaUtil.TABLES, map(
entry(HasNameUtil.NAME, "Table A"),
entry(HasAliasUtil.ALIAS, "Custom Table A"),
entry(HasEnableUtil.ENABLED, true),
entry(TableUtil.COLUMNS, map(
entry(HasNameUtil.NAME, "Column A1"),
entry(HasAliasUtil.ALIAS, "Custom Column A1"),
entry(HasEnableUtil.ENABLED, true),
entry(HasTypeMapperUtil.DATABASE_TYPE, Long.class.getName())
), map(
entry(HasNameUtil.NAME, "Column A2"),
entry(HasAliasUtil.ALIAS, "Custom Column A2"),
entry(HasEnableUtil.ENABLED, true),
entry(HasTypeMapperUtil.DATABASE_TYPE, Integer.class.getName())
)),
entry(TableUtil.PRIMARY_KEY_COLUMNS, map(
entry(HasNameUtil.NAME, "Column A1")
)),
entry(TableUtil.INDEXES, map(
entry(HasNameUtil.NAME, "Index A2"),
entry(HasEnableUtil.ENABLED, true),
entry(IndexUtil.INDEX_COLUMNS, map(
entry(HasNameUtil.NAME, "Column A2")
))
)),
entry(TableUtil.FOREIGN_KEYS, map(
entry(HasNameUtil.NAME, "ForeignKey A2 to C1"),
entry(HasEnableUtil.ENABLED, true),
entry(ForeignKeyUtil.FOREIGN_KEY_COLUMNS, map(
entry(HasNameUtil.NAME, "Column A2"),
entry(ForeignKeyColumnUtil.FOREIGN_TABLE_NAME, "Table C"),
entry(ForeignKeyColumnUtil.FOREIGN_COLUMN_NAME, "Column C1")
))
))
), map(
entry(HasNameUtil.NAME, "Table C"),
entry(HasAliasUtil.ALIAS, "Custom Table C"),
entry(HasEnableUtil.ENABLED, true),
entry(TableUtil.COLUMNS, map(
entry(HasNameUtil.NAME, "Column C1"),
entry(HasAliasUtil.ALIAS, "Custom Column C1"),
entry(HasEnableUtil.ENABLED, true),
entry(HasTypeMapperUtil.DATABASE_TYPE, Integer.class.getName())
), map(
entry(HasNameUtil.NAME, "Column C2"),
entry(HasAliasUtil.ALIAS, "Custom Column C2"),
entry(HasEnableUtil.ENABLED, true),
entry(HasTypeMapperUtil.DATABASE_TYPE, String.class.getName())
)),
entry(TableUtil.PRIMARY_KEY_COLUMNS, map(
entry(HasNameUtil.NAME, "Column C1")
))
))
))
), map(
entry(HasNameUtil.NAME, "Dbms B"),
entry(HasEnableUtil.ENABLED, true),
entry(DbmsUtil.SCHEMAS, map(
entry(HasNameUtil.NAME, "Schema B"),
entry(HasAliasUtil.ALIAS, "Custom Schema B"),
entry(HasEnableUtil.ENABLED, true),
entry(SchemaUtil.TABLES, map(
entry(HasNameUtil.NAME, "Table B"),
entry(HasAliasUtil.ALIAS, "Custom Table B"),
entry(HasEnableUtil.ENABLED, true),
entry(TableUtil.COLUMNS, map(
entry(HasNameUtil.NAME, "Column B1"),
entry(HasAliasUtil.ALIAS, "Custom Column B1"),
entry(HasEnableUtil.ENABLED, true),
entry(HasTypeMapperUtil.DATABASE_TYPE, Long.class.getName())
), map(
entry(HasNameUtil.NAME, "Column B2"),
entry(HasAliasUtil.ALIAS, "Custom Column B2"),
entry(HasEnableUtil.ENABLED, true),
entry(HasTypeMapperUtil.DATABASE_TYPE, Integer.class.getName())
)),
entry(TableUtil.PRIMARY_KEY_COLUMNS, map(
entry(HasNameUtil.NAME, "Column B1")
)),
entry(TableUtil.INDEXES, map(
entry(HasNameUtil.NAME, "Index B2"),
entry(HasEnableUtil.ENABLED, true),
entry(IndexUtil.UNIQUE, true),
entry(IndexUtil.INDEX_COLUMNS, map(
entry(HasNameUtil.NAME, "Column B2")
))
)),
entry(TableUtil.FOREIGN_KEYS, map(
entry(HasNameUtil.NAME, "ForeignKey B2 to D1"),
entry(HasEnableUtil.ENABLED, true),
entry(ForeignKeyUtil.FOREIGN_KEY_COLUMNS, map(
entry(HasNameUtil.NAME, "Column B2"),
entry(ForeignKeyColumnUtil.FOREIGN_TABLE_NAME, "Table D"),
entry(ForeignKeyColumnUtil.FOREIGN_COLUMN_NAME, "Column D1")
))
))
), map(
entry(HasNameUtil.NAME, "Table D"),
entry(HasAliasUtil.ALIAS, "Custom Table D"),
entry(HasEnableUtil.ENABLED, true),
entry(TableUtil.COLUMNS, map(
entry(HasNameUtil.NAME, "Column D1"),
entry(HasAliasUtil.ALIAS, "Custom Column D1"),
entry(HasEnableUtil.ENABLED, true),
entry(HasTypeMapperUtil.DATABASE_TYPE, Integer.class.getName())
), map(
entry(HasNameUtil.NAME, "Column D2"),
entry(HasAliasUtil.ALIAS, "Custom Column D2"),
entry(HasEnableUtil.ENABLED, true),
entry(HasTypeMapperUtil.DATABASE_TYPE, String.class.getName())
)),
entry(TableUtil.PRIMARY_KEY_COLUMNS, map(
entry(HasNameUtil.NAME, "Column D1")
))
))
))
))
);
project = Project.create(data);
dbmsA = project.children(ProjectUtil.DBMSES, Dbms::create).findFirst().get();
dbmsB = project.children(ProjectUtil.DBMSES, Dbms::create).skip(1).findFirst().get();
schemaA = dbmsA.children(DbmsUtil.SCHEMAS, Schema::create).findFirst().get();
schemaB = dbmsB.children(DbmsUtil.SCHEMAS, Schema::create).findFirst().get();
tableA = schemaA.children(SchemaUtil.TABLES, Table::create).findFirst().get();
tableB = schemaB.children(SchemaUtil.TABLES, Table::create).findFirst().get();
tableC = schemaA.children(SchemaUtil.TABLES, Table::create).skip(1).findFirst().get();
tableD = schemaB.children(SchemaUtil.TABLES, Table::create).skip(1).findFirst().get();
columnA1 = tableA.children(TableUtil.COLUMNS, Column::create).findFirst().get();
columnA2 = tableA.children(TableUtil.COLUMNS, Column::create).skip(1).findFirst().get();
columnB1 = tableB.children(TableUtil.COLUMNS, Column::create).findFirst().get();
columnB2 = tableB.children(TableUtil.COLUMNS, Column::create).skip(1).findFirst().get();
columnC1 = tableC.children(TableUtil.COLUMNS, Column::create).findFirst().get();
columnC2 = tableC.children(TableUtil.COLUMNS, Column::create).skip(1).findFirst().get();
columnD1 = tableD.children(TableUtil.COLUMNS, Column::create).findFirst().get();
columnD2 = tableD.children(TableUtil.COLUMNS, Column::create).skip(1).findFirst().get();
primaryKeyColumnA1 = tableA.children(TableUtil.PRIMARY_KEY_COLUMNS, PrimaryKeyColumn::create).findFirst().get();
primaryKeyColumnB1 = tableB.children(TableUtil.PRIMARY_KEY_COLUMNS, PrimaryKeyColumn::create).findFirst().get();
primaryKeyColumnC1 = tableC.children(TableUtil.PRIMARY_KEY_COLUMNS, PrimaryKeyColumn::create).findFirst().get();
primaryKeyColumnD1 = tableD.children(TableUtil.PRIMARY_KEY_COLUMNS, PrimaryKeyColumn::create).findFirst().get();
indexA2 = tableA.children(TableUtil.INDEXES, Index::create).findFirst().get();
indexB2 = tableB.children(TableUtil.INDEXES, Index::create).findFirst().get();
indexColumnA2 = indexA2.children(IndexUtil.INDEX_COLUMNS, IndexColumn::create).findFirst().get();
indexColumnB2 = indexB2.children(IndexUtil.INDEX_COLUMNS, IndexColumn::create).findFirst().get();
foreignKeyA2_C1 = tableA.children(TableUtil.FOREIGN_KEYS, ForeignKey::create).findFirst().get();
foreignKeyB2_D1 = tableB.children(TableUtil.FOREIGN_KEYS, ForeignKey::create).findFirst().get();
foreignKeyColumnA2_C1 = foreignKeyA2_C1.children(ForeignKeyUtil.FOREIGN_KEY_COLUMNS, ForeignKeyColumn::create).findFirst().get();
foreignKeyColumnB2_D1 = foreignKeyB2_D1.children(ForeignKeyUtil.FOREIGN_KEY_COLUMNS, ForeignKeyColumn::create).findFirst().get();
}
public Stream<Document> stream() {
return Stream.of(
project,
dbmsA, dbmsB,
schemaA, schemaB,
tableA, tableB, tableC, tableD,
columnA1, columnA2, columnB1, columnB2, columnC1, columnC2, columnD1, columnD2,
primaryKeyColumnA1, primaryKeyColumnB1, primaryKeyColumnC1, primaryKeyColumnD1,
indexA2, indexB2,
indexColumnA2, indexColumnB2,
foreignKeyA2_C1, foreignKeyB2_D1,
foreignKeyColumnA2_C1, foreignKeyColumnB2_D1
);
}
public Stream<Project> projects() {
return Stream.of(project);
}
public <T extends Document> Stream<T> streamOf(Class<T> clazz) {
return stream().filter(clazz::isInstance).map(clazz::cast);
}
private static Map.Entry<String, Object> entry(String key, String value) {
return new AbstractMap.SimpleEntry<>(key, value);
}
private static Map.Entry<String, Object> entry(String key, boolean value) {
return new AbstractMap.SimpleEntry<>(key, value);
}
@SafeVarargs
@SuppressWarnings("varargs")
private static Map.Entry<String, Object> entry(String key, Map<String, Object>... children) {
return new AbstractMap.SimpleEntry<>(key, Stream.of(children).collect(toList()));
}
@SafeVarargs
@SuppressWarnings("varargs")
private static Map<String, Object> map(Map.Entry<String, Object>... entries) {
return Stream.of(entries)
.collect(toMap(Map.Entry::getKey, Map.Entry::getValue));
}
}
| 7,059 |
679 |
<filename>main/framework/inc/acceleratorconst.h<gh_stars>100-1000
/**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
#ifndef _FRAMEWORK_ACCELERATORCONST_H_
#define _FRAMEWORK_ACCELERATORCONST_H_
//_______________________________________________
// own includes
#include <general.h>
//_______________________________________________
// interface includes
//_______________________________________________
// other includes
namespace framework{
#define DOCTYPE_ACCELERATORS DECLARE_ASCII("<!DOCTYPE accel:acceleratorlist PUBLIC \"-//OpenOffice.org//DTD OfficeDocument 1.0//EN\" \"accelerator.dtd\">")
#define ATTRIBUTE_TYPE_CDATA DECLARE_ASCII("CDATA")
#define XMLNS_ACCEL DECLARE_ASCII("accel")
#define XMLNS_XLINK DECLARE_ASCII("xlink")
#define ATTRIBUTE_URL DECLARE_ASCII("href" )
#define ATTRIBUTE_KEYCODE DECLARE_ASCII("code" )
#define ATTRIBUTE_MOD_SHIFT DECLARE_ASCII("shift")
#define ATTRIBUTE_MOD_MOD1 DECLARE_ASCII("mod1" )
#define ATTRIBUTE_MOD_MOD2 DECLARE_ASCII("mod2" )
#define ATTRIBUTE_MOD_MOD3 DECLARE_ASCII("mod3" )
// same items with a name space alias
#define AL_ELEMENT_ACCELERATORLIST DECLARE_ASCII("accel:acceleratorlist")
#define AL_ELEMENT_ITEM DECLARE_ASCII("accel:item" )
#define AL_XMLNS_ACCEL DECLARE_ASCII("xmlns:accel")
#define AL_XMLNS_XLINK DECLARE_ASCII("xmlns:xlink")
#define AL_ATTRIBUTE_URL DECLARE_ASCII("xlink:href" )
#define AL_ATTRIBUTE_KEYCODE DECLARE_ASCII("accel:code" )
#define AL_ATTRIBUTE_MOD_SHIFT DECLARE_ASCII("accel:shift")
#define AL_ATTRIBUTE_MOD_MOD1 DECLARE_ASCII("accel:mod1" )
#define AL_ATTRIBUTE_MOD_MOD2 DECLARE_ASCII("accel:mod2" )
#define AL_ATTRIBUTE_MOD_MOD3 DECLARE_ASCII("accel:mod3" )
// same items with full qualified name space
#define NS_ELEMENT_ACCELERATORLIST DECLARE_ASCII("http://openoffice.org/2001/accel^acceleratorlist")
#define NS_ELEMENT_ITEM DECLARE_ASCII("http://openoffice.org/2001/accel^item" )
#define NS_XMLNS_ACCEL DECLARE_ASCII("http://openoffice.org/2001/accel")
#define NS_XMLNS_XLINK DECLARE_ASCII("http://www.w3.org/1999/xlink" )
#define NS_ATTRIBUTE_URL DECLARE_ASCII("http://www.w3.org/1999/xlink^href" )
#define NS_ATTRIBUTE_KEYCODE DECLARE_ASCII("http://openoffice.org/2001/accel^code" )
#define NS_ATTRIBUTE_MOD_SHIFT DECLARE_ASCII("http://openoffice.org/2001/accel^shift")
#define NS_ATTRIBUTE_MOD_MOD1 DECLARE_ASCII("http://openoffice.org/2001/accel^mod1" )
#define NS_ATTRIBUTE_MOD_MOD2 DECLARE_ASCII("http://openoffice.org/2001/accel^mod2" )
#define NS_ATTRIBUTE_MOD_MOD3 DECLARE_ASCII("http://openoffice.org/2001/accel^mod3" )
} // namespace framework
#endif // _FRAMEWORK_ACCELERATORCONST_H_
| 1,637 |
704 |
package org.jgroups.demos;
import org.jgroups.client.StompConnection;
import org.jgroups.util.Util;
import javax.swing.*;
import java.awt.*;
import java.awt.event.*;
import java.util.*;
import java.util.List;
/**
* Chat client using STOMP to talk to other clients
*/
public class StompChat implements StompConnection.Listener {
JFrame mainFrame;
TextArea txtArea;
JTextField txtField;
final JLabel csLabel=new JLabel("Send: "), status=new JLabel("");
JButton leaveButton;
JButton sendButton;
JButton clearButton;
final JLabel cluster=new JLabel("Cluster: "), users_label=new JLabel("Users: ");
private int num_servers=1;
private int num_clients=0;
protected String username=null;
protected final Set<String> users=new HashSet<>();
private final List<String> servers=new ArrayList<>();
private final Set<String> clients=new HashSet<>();
protected StompConnection stomp_client;
// ======================== reserved topic ==========================
public static final String MESSAGES = "/messages"; // headers + body
public static final String CLIENT_JOINED = "/client-joined"; // client: 1234-2532-2665
public static final String CLIENT_LEFT = "/client-left"; // client: 1432-7263-1002
public static final String CLIENTS = "/clients"; // clients: 355352,3343,2232
public static final String USER_JOINED = "/user-joined"; // user: Bela
public static final String USER_LEFT = "/user-left"; // user: Bela
public static final String GET_USERS = "/get-users"; //
public static final String USERS = "/users"; // users: Bela, Michelle
// reserved keywords in INFO messages
public static final String ENDPOINTS = "endpoints";
public static final String VIEW = "view";
public static final String CLIENTS_KW = "clients";
public static final String DESTINATION = "destination";
public static final String USER = "user";
public static final String USERS_KW = "users";
public static final String CLIENT = "client";
public StompChat(String host, int port, String user) {
stomp_client=new StompConnection(host + ":" + port);
stomp_client.addListener(this);
username=user;
try {
if(username == null)
username=System.getProperty("user.name");
}
catch(Throwable t) {
}
}
public static void main(String[] args) throws Exception {
String host="localhost";
int port=8787;
String user=null;
for(int i=0; i < args.length; i++) {
if(args[i].equals("-host") || args[i].equals("-h")) {
host=args[++i];
continue;
}
if(args[i].equals("-port") || args[i].equals("-p")) {
port=Integer.parseInt(args[++i]);
continue;
}
if(args[i].equals("-user") || args[i].equals("-name")) {
user=args[++i];
continue;
}
help();
return;
}
StompChat instance=new StompChat(host, port, user);
instance.start();
}
void showMessage(String msg) {
txtArea.append(msg + "\n");
}
void userJoined(String name) {
users.add(name);
showStatus(name + " joined the chat");
users_label.setText("Users: " + users);
}
void userLeft(String name) {
users.remove(name);
showStatus(name + " left the chat");
users_label.setText("Users: " + users);
}
void newView(String view) {
cluster.setText("Cluster: " + view);
}
void usersReceived(Collection<String> users) {
this.users.addAll(users);
users_label.setText("Users: " + this.users);
}
static void help() {
System.out.println("Chat [-help] [-host <host>] [-port <port>] [-user <user>]");
}
public void start() throws Exception {
mainFrame=new JFrame("Chat demo");
mainFrame.setPreferredSize(new Dimension(600,600));
mainFrame.setBackground(Color.white);
mainFrame.addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent e) {
stomp_client.send(USER_LEFT, USER, username);
stomp_client.send(CLIENT_LEFT, CLIENT, username);
stomp_client.disconnect();
System.exit(0);
}
});
connect();
Box main_box=Box.createVerticalBox();
main_box.setBackground(Color.white);
Box input=Box.createHorizontalBox(); // input field
Box buttons=Box.createHorizontalBox(); // for all the buttons
mainFrame.add(main_box);
main_box.add(Box.createVerticalStrut(10));
main_box.add(cluster);
cluster.setAlignmentX(Component.LEFT_ALIGNMENT);
main_box.add(Box.createVerticalStrut(10));
main_box.add(Box.createVerticalStrut(10));
main_box.add(users_label);
main_box.add(Box.createVerticalStrut(10));
txtArea=new TextArea();
txtArea.setPreferredSize(new Dimension(550, 500));
txtArea.setEditable(false);
txtArea.setBackground(Color.white);
main_box.add(txtArea);
main_box.add(Box.createVerticalStrut(10));
main_box.add(input);
main_box.add(Box.createVerticalStrut(10));
main_box.add(buttons);
csLabel.setPreferredSize(new Dimension(85, 30));
input.add(csLabel);
txtField=new JTextField();
txtField.setPreferredSize(new Dimension(200, 30));
txtField.setBackground(Color.white);
input.add(txtField);
leaveButton=new JButton("Leave");
leaveButton.setPreferredSize(new Dimension(150, 30));
buttons.add(leaveButton);
leaveButton.addMouseListener(new MouseAdapter() {
public void mouseClicked(MouseEvent e) {
stomp_client.send(USER_LEFT, USER, username);
stomp_client.send(CLIENT_LEFT, CLIENT, username);
stomp_client.disconnect();
System.exit(0);
}
});
sendButton=new JButton("Send");
sendButton.setPreferredSize(new Dimension(150, 30));
buttons.add(sendButton);
sendButton.addMouseListener(new MouseAdapter() {
public void mouseClicked(MouseEvent e) {
send(txtField.getText());
txtField.selectAll();
}
});
clearButton=new JButton("Clear");
clearButton.setPreferredSize(new Dimension(150, 30));
clearButton.addMouseListener(new MouseAdapter() {
public void mouseClicked(MouseEvent e) {
txtArea.setText("");
}
});
buttons.add(clearButton);
status.setForeground(Color.red);
main_box.add(status);
mainFrame.pack();
mainFrame.setLocation(15, 25);
Dimension main_frame_size=mainFrame.getSize();
txtArea.setPreferredSize(new Dimension((int)(main_frame_size.width * 0.9), (int)(main_frame_size.height * 0.8)));
mainFrame.setVisible(true);
txtField.setFocusable(true);
txtField.requestFocusInWindow();
txtField.setToolTipText("type and then press enter to send");
txtField.addActionListener(e -> {
String cmd=e.getActionCommand();
if(cmd != null && !cmd.isEmpty()) {
send(txtField.getText());
txtField.selectAll();
}
});
sendGetUsers();
}
protected void connect() throws Exception {
stomp_client.connect();
stomp_client.send(USER_JOINED, USER, username);
stomp_client.subscribe(MESSAGES);
stomp_client.subscribe(CLIENT_JOINED);
stomp_client.subscribe(CLIENT_LEFT);
stomp_client.subscribe(CLIENTS);
stomp_client.subscribe(USER_JOINED);
stomp_client.subscribe(USER_LEFT);
stomp_client.subscribe(GET_USERS);
stomp_client.subscribe(USERS);
stomp_client.send(CLIENT_JOINED, CLIENT, username);
stomp_client.send(USER_JOINED, USER, username);
}
protected void send(String msg) {
try {
String tmp=username + ": " + msg;
byte[] buf=tmp.getBytes();
stomp_client.send(MESSAGES, buf, 0, buf.length);
}
catch(Exception e) {
System.err.println("Failed sending message: " + e);
}
}
public void sendGetUsers() {
stomp_client.send(GET_USERS);
}
protected void showStatus(final String msg) {
new Thread(() -> {
synchronized(status) {
status.setText(msg);
Util.sleep(2000);
status.setText("");
}
}).start();
}
public void onInfo(Map<String, String> information) {
String view=information.get("view");
Collection<String> list;
if(view != null) {
list=Util.parseCommaDelimitedStrings(view);
if(list != null) {
num_servers=list.size();
if(mainFrame != null)
setTitle();
servers.clear();
servers.addAll(list);
newView(view);
}
else {
String targets=information.get(ENDPOINTS);
if(targets != null) {
list=Util.parseCommaDelimitedStrings(targets);
if(list != null) {
num_servers=list.size();
if(mainFrame != null)
setTitle();
servers.clear();
servers.addAll(list);
}
}
}
}
}
public void onMessage(Map<String, String> headers, byte[] buf, int offset, int length) {
String destination=headers.get(DESTINATION);
if(destination == null)
return;
if(destination.equals(MESSAGES)) {
showMessage(new String(buf, offset, length));
return;
}
if(destination.equals(CLIENT_JOINED)) {
String new_client=headers.get(CLIENT);
if(new_client != null) {
synchronized(clients) {
if(clients.add(new_client)) {
num_clients=clients.size();
setTitle();
}
}
stomp_client.send(CLIENTS, null, 0, 0, CLIENTS_KW, getAllClients());
}
return;
}
if(destination.equals(CLIENT_LEFT)) {
String left_client=headers.get(CLIENT);
if(left_client != null) {
synchronized(clients) {
if(clients.remove(left_client)) {
num_clients=clients.size();
setTitle();
}
}
}
return;
}
if(destination.equals(CLIENTS)) {
String all_clients=headers.get(CLIENTS_KW);
if(all_clients != null) {
List<String> list=Util.parseCommaDelimitedStrings(all_clients);
if(list != null) {
synchronized(clients) {
if(clients.addAll(list)) {
num_clients=clients.size();
setTitle();
}
}
}
}
return;
}
if(destination.equals(USER_JOINED)) {
String name=headers.get(USER);
if(name != null)
userJoined(name);
return;
}
if(destination.equals(USER_LEFT)) {
String name=headers.get(USER);
if(name != null)
userLeft(name);
return;
}
if(destination.equals(GET_USERS)) {
stomp_client.send(USERS, USERS_KW, usersToStr());
return;
}
if(destination.equals(USERS)) {
String tmp=headers.get(USERS_KW);
if(tmp != null) {
List<String> list=Util.parseCommaDelimitedStrings(tmp);
if(list != null)
usersReceived(list);
}
}
}
private String usersToStr() {
StringBuilder sb=new StringBuilder();
boolean first=true;
for(String user: users) {
if(first)
first=false;
else
sb.append(",");
sb.append(user);
}
return sb.toString();
}
void setTitle() {
if(mainFrame != null)
mainFrame.setTitle(num_servers + " server(s), " + num_clients + " client(s)");
}
int getNumberOfClients() {
synchronized(clients) {
return clients.size();
}
}
String getAllClients() {
StringBuilder sb=new StringBuilder();
boolean first=true;
for(String client: clients) {
if(first)
first=false;
else
sb.append(",");
sb.append(client);
}
return sb.toString();
}
}
| 6,889 |
3,269 |
// Time: O(n)
// Space: O(n)
class Solution {
public:
long long subArrayRanges(vector<int>& nums) {
int64_t result = 0;
vector<int> stk;
for (int i = 0; i <= size(nums); ++i) {
const int x = (i < size(nums)) ? nums[i] : numeric_limits<int>::max();
while (!empty(stk) && nums[stk.back()] <= x) {
const int64_t j = stk.back(); stk.pop_back();
const int64_t k = !empty(stk) ? stk.back() : -1;
result += nums[j] * (j - k) * (i - j);
}
stk.emplace_back(i);
}
stk.clear();
for (int i = 0; i <= size(nums); ++i) {
const int x = (i < size(nums)) ? nums[i] : numeric_limits<int>::min();
while (!empty(stk) && nums[stk.back()] >= x) {
const int64_t j = stk.back(); stk.pop_back();
const int64_t k = !empty(stk) ? stk.back() : -1;
result -= nums[j] * (j - k) * (i - j);
}
stk.emplace_back(i);
}
return result;
}
};
| 611 |
348 |
<filename>docs/data/leg-t1/018/01802028.json
{"nom":"Berry-Bouy","circ":"2ème circonscription","dpt":"Cher","inscrits":926,"abs":489,"votants":437,"blancs":7,"nuls":2,"exp":428,"res":[{"nuance":"MDM","nom":"Mme <NAME>","voix":166},{"nuance":"LR","nom":"Mme <NAME>","voix":79},{"nuance":"COM","nom":"<NAME>","voix":68},{"nuance":"FN","nom":"Mme <NAME>","voix":64},{"nuance":"SOC","nom":"Mme <NAME>","voix":23},{"nuance":"ECO","nom":"Mme <NAME>","voix":13},{"nuance":"DIV","nom":"Mme <NAME>","voix":5},{"nuance":"EXG","nom":"M. <NAME>","voix":4},{"nuance":"DIV","nom":"M. <NAME>","voix":3},{"nuance":"DIV","nom":"M. <NAME>","voix":3}]}
| 262 |
1,745 |
<filename>Source/Plugins/bsfNullRenderAPI/BsNullCommandBuffer.h
//************************************ bs::framework - Copyright 2018 <NAME> **************************************//
//*********** Licensed under the MIT license. See LICENSE.md for full terms. This notice is not to be removed. ***********//
#pragma once
#include "BsNullPrerequisites.h"
#include "RenderAPI/BsCommandBuffer.h"
#include "Managers/BsCommandBufferManager.h"
namespace bs { namespace ct
{
/** @addtogroup NullRenderAPI
* @{
*/
/** Handles creation of Null command buffers. See CommandBuffer. */
class NullCommandBufferManager final : public CommandBufferManager
{
public:
/** @copydoc CommandBufferManager::createInternal() */
SPtr<CommandBuffer> createInternal(GpuQueueType type, UINT32 deviceIdx = 0, UINT32 queueIdx = 0,
bool secondary = false) override;
};
/** Command buffer implementation for the null render backend. */
class NullCommandBuffer final : public CommandBuffer
{
private:
friend class NullCommandBufferManager;
NullCommandBuffer(GpuQueueType type, UINT32 deviceIdx, UINT32 queueIdx, bool secondary)
: CommandBuffer(type, deviceIdx, queueIdx, secondary)
{ }
};
/** @} */
}}
| 356 |
4,339 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.ml.composition.stacking;
import java.util.ArrayList;
import org.apache.ignite.ml.IgniteModel;
import org.apache.ignite.ml.environment.LearningEnvironmentBuilder;
import org.apache.ignite.ml.math.functions.IgniteBinaryOperator;
import org.apache.ignite.ml.math.functions.IgniteFunction;
import org.apache.ignite.ml.math.primitives.vector.Vector;
import org.apache.ignite.ml.trainers.DatasetTrainer;
/**
* {@link DatasetTrainer} with same type of input and output of submodels.
*
* @param <I> Type of submodels input.
* @param <O> Type of aggregator model output.
* @param <AM> Type of aggregator model.
* @param <L> Type of labels.
*/
public class SimpleStackedDatasetTrainer<I, O, AM extends IgniteModel<I, O>, L> extends StackedDatasetTrainer<I, I, O, AM, L> {
/**
* Construct instance of this class.
*
* @param aggregatingTrainer Aggregator trainer.
* @param aggregatingInputMerger Function used to merge submodels outputs into one.
* @param submodelInput2AggregatingInputConverter Function used to convert input of submodel to output of submodel
* this function is used if user chooses to keep original features.
*/
public SimpleStackedDatasetTrainer(DatasetTrainer<AM, L> aggregatingTrainer,
IgniteBinaryOperator<I> aggregatingInputMerger,
IgniteFunction<I, I> submodelInput2AggregatingInputConverter,
IgniteFunction<Vector, I> vector2SubmodelInputConverter,
IgniteFunction<I, Vector> submodelOutput2VectorConverter) {
super(aggregatingTrainer,
aggregatingInputMerger,
submodelInput2AggregatingInputConverter,
new ArrayList<>(),
vector2SubmodelInputConverter,
submodelOutput2VectorConverter);
}
/**
* Construct instance of this class.
*
* @param aggregatingTrainer Aggregator trainer.
* @param aggregatingInputMerger Function used to merge submodels outputs into one.
*/
public SimpleStackedDatasetTrainer(DatasetTrainer<AM, L> aggregatingTrainer,
IgniteBinaryOperator<I> aggregatingInputMerger) {
super(aggregatingTrainer, aggregatingInputMerger, IgniteFunction.identity());
}
/**
* Constructs instance of this class.
*/
public SimpleStackedDatasetTrainer() {
}
//TODO: IGNITE-10441 -- Look for options to avoid boilerplate overrides.
/** {@inheritDoc} */
@Override public <M1 extends IgniteModel<I, I>> SimpleStackedDatasetTrainer<I, O, AM, L> addTrainer(
DatasetTrainer<M1, L> trainer) {
return (SimpleStackedDatasetTrainer<I, O, AM, L>)super.addTrainer(trainer);
}
/** {@inheritDoc} */
@Override public SimpleStackedDatasetTrainer<I, O, AM, L> withAggregatorTrainer(
DatasetTrainer<AM, L> aggregatorTrainer) {
return (SimpleStackedDatasetTrainer<I, O, AM, L>)super.withAggregatorTrainer(aggregatorTrainer);
}
/** {@inheritDoc} */
@Override public SimpleStackedDatasetTrainer<I, O, AM, L> withOriginalFeaturesDropped() {
return (SimpleStackedDatasetTrainer<I, O, AM, L>)super.withOriginalFeaturesDropped();
}
/** {@inheritDoc} */
@Override public SimpleStackedDatasetTrainer<I, O, AM, L> withOriginalFeaturesKept(
IgniteFunction<I, I> submodelInput2AggregatingInputConverter) {
return (SimpleStackedDatasetTrainer<I, O, AM, L>)super.withOriginalFeaturesKept(
submodelInput2AggregatingInputConverter);
}
/** {@inheritDoc} */
@Override public SimpleStackedDatasetTrainer<I, O, AM, L> withAggregatorInputMerger(IgniteBinaryOperator<I> merger) {
return (SimpleStackedDatasetTrainer<I, O, AM, L>)super.withAggregatorInputMerger(merger);
}
/** {@inheritDoc} */
@Override public SimpleStackedDatasetTrainer<I, O, AM, L> withEnvironmentBuilder(
LearningEnvironmentBuilder envBuilder) {
return (SimpleStackedDatasetTrainer<I, O, AM, L>)super.withEnvironmentBuilder(envBuilder);
}
/** {@inheritDoc} */
@Override public <L1> SimpleStackedDatasetTrainer<I, O, AM, L1> withConvertedLabels(IgniteFunction<L1, L> new2Old) {
return (SimpleStackedDatasetTrainer<I, O, AM, L1>)super.withConvertedLabels(new2Old);
}
/**
* Keep original features using {@link IgniteFunction#identity()} as submodelInput2AggregatingInputConverter.
*
* @return This object.
*/
public SimpleStackedDatasetTrainer<I, O, AM, L> withOriginalFeaturesKept() {
return (SimpleStackedDatasetTrainer<I, O, AM, L>)super.withOriginalFeaturesKept(IgniteFunction.identity());
}
}
| 1,959 |
1,342 |
{
"databaseAuthVariableOverride": { "some#key": "some#val" },
"databaseURL": "https://hipster-chat.firebaseio.mock",
"projectId": "hipster-chat-mock",
"storageBucket": "hipster-chat.appspot.mock"
}
| 80 |
4,200 |
package com.dtolabs.rundeck.core.authentication.tokens;
import java.util.Date;
import java.util.Set;
public class SimpleTokenBuilder implements AuthenticationToken {
private String token;
private Set<String> authRolesSet;
private String uuid;
private String creator;
private String ownerName;
private AuthTokenType type;
private Date expiration;
private String name;
@Override
public String getToken() {
return token;
}
@Override
public Set<String> authRolesSet() {
return authRolesSet;
}
@Override
public String getUuid() {
return uuid;
}
@Override
public String getCreator() {
return creator;
}
@Override
public String getOwnerName() {
return ownerName;
}
@Override
public AuthTokenType getType() {
return type;
}
@Override
public String getPrintableToken() {
return token;
}
@Override
public Date getExpiration() {
return expiration;
}
@Override
public String getName() {
return name;
}
public SimpleTokenBuilder setToken(String token) {
this.token = token;
return this;
}
public SimpleTokenBuilder setAuthRolesSet(Set<String> authRolesSet) {
this.authRolesSet = authRolesSet;
return this;
}
public SimpleTokenBuilder setUuid(String uuid) {
this.uuid = uuid;
return this;
}
public SimpleTokenBuilder setCreator(String creator) {
this.creator = creator;
return this;
}
public SimpleTokenBuilder setOwnerName(String ownerName) {
this.ownerName = ownerName;
return this;
}
public SimpleTokenBuilder setType(AuthTokenType type) {
this.type = type;
return this;
}
public SimpleTokenBuilder setExpiration(Date expiration) {
this.expiration = expiration;
return this;
}
public SimpleTokenBuilder setName(String name) {
this.name = name;
return this;
}
}
| 619 |
11,616 |
/**
* Copyright © 2016-2021 The Thingsboard Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thingsboard.server.service.rule;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import org.thingsboard.common.util.JacksonUtil;
import org.thingsboard.rule.engine.flow.TbRuleChainInputNode;
import org.thingsboard.rule.engine.flow.TbRuleChainInputNodeConfiguration;
import org.thingsboard.rule.engine.flow.TbRuleChainOutputNode;
import org.thingsboard.server.common.data.id.RuleChainId;
import org.thingsboard.server.common.data.id.RuleNodeId;
import org.thingsboard.server.common.data.id.TenantId;
import org.thingsboard.server.common.data.relation.EntityRelation;
import org.thingsboard.server.common.data.rule.RuleChain;
import org.thingsboard.server.common.data.rule.RuleChainMetaData;
import org.thingsboard.server.common.data.rule.RuleChainOutputLabelsUsage;
import org.thingsboard.server.common.data.rule.RuleChainUpdateResult;
import org.thingsboard.server.common.data.rule.RuleNode;
import org.thingsboard.server.common.data.rule.RuleNodeUpdateResult;
import org.thingsboard.server.dao.relation.RelationService;
import org.thingsboard.server.dao.rule.RuleChainService;
import org.thingsboard.server.queue.util.TbCoreComponent;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.stream.Collectors;
@RequiredArgsConstructor
@Service
@TbCoreComponent
@Slf4j
public class DefaultTbRuleChainService implements TbRuleChainService {
private final RuleChainService ruleChainService;
private final RelationService relationService;
@Override
public Set<String> getRuleChainOutputLabels(TenantId tenantId, RuleChainId ruleChainId) {
RuleChainMetaData metaData = ruleChainService.loadRuleChainMetaData(tenantId, ruleChainId);
Set<String> outputLabels = new TreeSet<>();
for (RuleNode ruleNode : metaData.getNodes()) {
if (isOutputRuleNode(ruleNode)) {
outputLabels.add(ruleNode.getName());
}
}
return outputLabels;
}
@Override
public List<RuleChainOutputLabelsUsage> getOutputLabelUsage(TenantId tenantId, RuleChainId ruleChainId) {
List<RuleNode> ruleNodes = ruleChainService.findRuleNodesByTenantIdAndType(tenantId, TbRuleChainInputNode.class.getName(), ruleChainId.getId().toString());
Map<RuleChainId, String> ruleChainNamesCache = new HashMap<>();
// Additional filter, "just in case" the structure of the JSON configuration will change.
var filteredRuleNodes = ruleNodes.stream().filter(node -> {
try {
TbRuleChainInputNodeConfiguration configuration = JacksonUtil.treeToValue(node.getConfiguration(), TbRuleChainInputNodeConfiguration.class);
return ruleChainId.getId().toString().equals(configuration.getRuleChainId());
} catch (Exception e) {
log.warn("[{}][{}] Failed to decode rule node configuration", tenantId, ruleChainId, e);
return false;
}
}).collect(Collectors.toList());
return filteredRuleNodes.stream()
.map(ruleNode -> {
RuleChainOutputLabelsUsage usage = new RuleChainOutputLabelsUsage();
usage.setRuleNodeId(ruleNode.getId());
usage.setRuleNodeName(ruleNode.getName());
usage.setRuleChainId(ruleNode.getRuleChainId());
List<EntityRelation> relations = ruleChainService.getRuleNodeRelations(tenantId, ruleNode.getId());
if (relations != null && !relations.isEmpty()) {
usage.setLabels(relations.stream().map(EntityRelation::getType).collect(Collectors.toSet()));
}
return usage;
})
.filter(usage -> usage.getLabels() != null)
.peek(usage -> {
String ruleChainName = ruleChainNamesCache.computeIfAbsent(usage.getRuleChainId(),
id -> ruleChainService.findRuleChainById(tenantId, id).getName());
usage.setRuleChainName(ruleChainName);
})
.sorted(Comparator
.comparing(RuleChainOutputLabelsUsage::getRuleChainName)
.thenComparing(RuleChainOutputLabelsUsage::getRuleNodeName))
.collect(Collectors.toList());
}
@Override
public List<RuleChain> updateRelatedRuleChains(TenantId tenantId, RuleChainId ruleChainId, RuleChainUpdateResult result) {
Set<RuleChainId> ruleChainIds = new HashSet<>();
log.debug("[{}][{}] Going to update links in related rule chains", tenantId, ruleChainId);
if (result.getUpdatedRuleNodes() == null || result.getUpdatedRuleNodes().isEmpty()) {
return Collections.emptyList();
}
Set<String> oldLabels = new HashSet<>();
Set<String> newLabels = new HashSet<>();
Set<String> confusedLabels = new HashSet<>();
Map<String, String> updatedLabels = new HashMap<>();
for (RuleNodeUpdateResult update : result.getUpdatedRuleNodes()) {
var oldNode = update.getOldRuleNode();
var newNode = update.getNewRuleNode();
if (isOutputRuleNode(newNode)) {
try {
oldLabels.add(oldNode.getName());
newLabels.add(newNode.getName());
if (!oldNode.getName().equals(newNode.getName())) {
String oldLabel = oldNode.getName();
String newLabel = newNode.getName();
if (updatedLabels.containsKey(oldLabel) && !updatedLabels.get(oldLabel).equals(newLabel)) {
confusedLabels.add(oldLabel);
log.warn("[{}][{}] Can't automatically rename the label from [{}] to [{}] due to conflict [{}]", tenantId, ruleChainId, oldLabel, newLabel, updatedLabels.get(oldLabel));
} else {
updatedLabels.put(oldLabel, newLabel);
}
}
} catch (Exception e) {
log.warn("[{}][{}][{}] Failed to decode rule node configuration", tenantId, ruleChainId, newNode.getId(), e);
}
}
}
// Remove all output labels that are renamed to two or more different labels, since we don't which new label to use;
confusedLabels.forEach(updatedLabels::remove);
// Remove all output labels that are renamed but still present in the rule chain;
newLabels.forEach(updatedLabels::remove);
if (!oldLabels.equals(newLabels)) {
ruleChainIds.addAll(updateRelatedRuleChains(tenantId, ruleChainId, updatedLabels));
}
return ruleChainIds.stream().map(id -> ruleChainService.findRuleChainById(tenantId, id)).collect(Collectors.toList());
}
public Set<RuleChainId> updateRelatedRuleChains(TenantId tenantId, RuleChainId ruleChainId, Map<String, String> labelsMap) {
Set<RuleChainId> updatedRuleChains = new HashSet<>();
List<RuleChainOutputLabelsUsage> usageList = getOutputLabelUsage(tenantId, ruleChainId);
for (RuleChainOutputLabelsUsage usage : usageList) {
labelsMap.forEach((oldLabel, newLabel) -> {
if (usage.getLabels().contains(oldLabel)) {
updatedRuleChains.add(usage.getRuleChainId());
renameOutgoingLinks(tenantId, usage.getRuleNodeId(), oldLabel, newLabel);
}
});
}
return updatedRuleChains;
}
private void renameOutgoingLinks(TenantId tenantId, RuleNodeId ruleNodeId, String oldLabel, String newLabel) {
List<EntityRelation> relations = ruleChainService.getRuleNodeRelations(tenantId, ruleNodeId);
for (EntityRelation relation : relations) {
if (relation.getType().equals(oldLabel)) {
relationService.deleteRelation(tenantId, relation);
relation.setType(newLabel);
relationService.saveRelation(tenantId, relation);
}
}
}
private boolean isOutputRuleNode(RuleNode ruleNode) {
return isRuleNode(ruleNode, TbRuleChainOutputNode.class);
}
private boolean isInputRuleNode(RuleNode ruleNode) {
return isRuleNode(ruleNode, TbRuleChainInputNode.class);
}
private boolean isRuleNode(RuleNode ruleNode, Class<?> clazz) {
return ruleNode != null && ruleNode.getType().equals(clazz.getName());
}
}
| 3,811 |
359 |
/*
Copyright 2019 The Matrix.org Foundation C.I.C
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#import <Foundation/Foundation.h>
#import "MXHTTPClient.h"
#import "MXInvite3PID.h"
#import "MXJSONModels.h"
#import "MXCredentials.h"
#import "MXIdentityServerRestClient.h"
@class MXServiceTerms;
NS_ASSUME_NONNULL_BEGIN
#pragma mark - Defines & Constants
/**
Notification name sent when "M_TERMS_NOT_SIGNED" error occured. Provides identity server and identity server access token.
Give an associated userInfo dictionary of type NSDictionary<NSString*, NSString*> with following keys: "userId", "identityServer", "accessToken". Use constants below for convenience.
*/
extern NSString *const MXIdentityServiceTermsNotSignedNotification;
/**
Notification name to send when MXServiceTerms have been accepted. Must provide the identity server whose terms were accepted.
Give an associated userInfo dictionary of type NSDictionary<NSString*, NSString*> with following key: "identityServer". Use
`MXIdentityServiceNotificationIdentityServerKey` for convenience.
*/
extern NSString *const MXIdentityServiceTermsAcceptedNotification;
/**
Notification name sent when access token change. Provides user id, identity server and access token.
Give an associated userInfo dictionary of type NSDictionary<NSString*, NSString*> with following keys: "userId", "identityServer", "accessToken". Use constants below for convenience.
*/
extern NSString *const MXIdentityServiceDidChangeAccessTokenNotification;
/**
userInfo dictionary keys used by `MXIdentityServiceTermsNotSignedNotification` and `MXIdentityServiceDidChangeAccessTokenNotification`.
*/
extern NSString *const MXIdentityServiceNotificationUserIdKey;
extern NSString *const MXIdentityServiceNotificationIdentityServerKey;
extern NSString *const MXIdentityServiceNotificationAccessTokenKey;
@class MXRestClient;
/**
`MXIdentityService` manages requests to Matrix identity servers and abstract identity server REST client token and version management.
*/
@interface MXIdentityService : NSObject
#pragma mark - Properties
/**
The identity server URL.
*/
@property (nonatomic, readonly) NSString *identityServer;
/**
Whether or not the terms for this identity server have been agreed to, or nil if unknown.
This value is automatically updated by MXSession as the user account data changes.
*/
@property (nonatomic) BOOL areAllTermsAgreed;
/**
The queue on which asynchronous response blocks are called.
Default is dispatch_get_main_queue().
*/
@property (nonatomic, strong) dispatch_queue_t completionQueue;
#pragma mark - Setup
/**
Create an instance based on identity server URL.
@param identityServer The identity server URL.
@param accessToken the identity server access token. Nil if not known yet.
@param homeserverRestClient The homeserver REST client.
@return a MXIdentityService instance.
*/
- (instancetype)initWithIdentityServer:(NSString *)identityServer accessToken:(nullable NSString*)accessToken andHomeserverRestClient:(MXRestClient*)homeserverRestClient NS_REFINED_FOR_SWIFT;
/**
Create an instance based on identity server URL.
@param identityServerRestClient The identity server REST client.
@param homeserverRestClient The homeserver REST client.
@return a MXIdentityService instance.
*/
- (instancetype)initWithIdentityServerRestClient:(MXIdentityServerRestClient*)identityServerRestClient andHomeserverRestClient:(MXRestClient*)homeserverRestClient;
#pragma mark - Access token
/**
Get the access token to use on the identity server.
The method triggers an /account request in order to force the setup of the
access token, which can lead to a "M_TERMS_NOT_SIGNED" error.
@param success A block object called when the operation succeeds.
@param failure A block object called when the operation fails.
@return a MXHTTPOperation instance. Nil if the access token is already known
and no HTTP request is required.
*/
- (nullable MXHTTPOperation *)accessTokenWithSuccess:(void (^)(NSString * _Nullable accessToken))success
failure:(void (^)(NSError *error))failure NS_REFINED_FOR_SWIFT;
#pragma mark -
#pragma mark Association lookup
/**
Retrieve user matrix ids from a list of 3rd party ids.
@param threepids the list of 3rd party ids: [[<(MX3PIDMedium)media1>, <(NSString*)address1>], [<(MX3PIDMedium)media2>, <(NSString*)address2>], ...].
@param success A block object called when the operation succeeds. It provides the array of the discovered users returned by the identity server.
[[<(MX3PIDMedium)media>, <(NSString*)address>, <(NSString*)userId>], ...].
@param failure A block object called when the operation fails.
@return a MXHTTPOperation instance.
*/
- (MXHTTPOperation*)lookup3pids:(NSArray*)threepids
success:(void (^)(NSArray *discoveredUsers))success
failure:(void (^)(NSError *error))failure NS_REFINED_FOR_SWIFT;
#pragma mark Establishing associations
/**
Request the validation of an email address.
The identity server will send an email to this address. The end user
will have to click on the link it contains to validate the address.
Use the returned sid to complete operations that require authenticated email
like [MXRestClient add3PID:].
@param email the email address to validate.
@param clientSecret a secret key generated by the client. ([MXTools generateSecret] creates such key)
@param sendAttempt the number of the attempt for the validation request. Increment this value to make the
identity server resend the email. Keep it to retry the request in case the previous request
failed.
@param nextLink the link the validation page will automatically open. Can be nil
@param success A block object called when the operation succeeds. It provides the id of the
email validation session.
@param failure A block object called when the operation fails.
@return a MXHTTPOperation instance.
*/
- (MXHTTPOperation*)requestEmailValidation:(NSString*)email
clientSecret:(NSString*)clientSecret
sendAttempt:(NSUInteger)sendAttempt
nextLink:(nullable NSString*)nextLink
success:(void (^)(NSString *sid))success
failure:(void (^)(NSError *error))failure NS_REFINED_FOR_SWIFT;
/**
Request the validation of a phone number.
The identity server will send a validation token by sms. The end user
will have to send this token by using [MXRestClient submit3PIDValidationToken].
Use the returned sid to complete operations that require authenticated phone number
like [MXRestClient add3PID:].
@param phoneNumber the phone number (in international or national format).
@param countryCode the ISO 3166-1 country code representation (required when the phone number is in national format).
@param clientSecret a secret key generated by the client. ([MXTools generateSecret] creates such key)
@param sendAttempt the number of the attempt for the validation request. Increment this value to make the
identity server resend the sms token. Keep it to retry the request in case the previous request
failed.
@param nextLink the link the validation page will automatically open. Can be nil
@param success A block object called when the operation succeeds. It provides the id of the validation session and the msisdn.
@param failure A block object called when the operation fails.
@return a MXHTTPOperation instance.
*/
- (MXHTTPOperation*)requestPhoneNumberValidation:(NSString*)phoneNumber
countryCode:(NSString*)countryCode
clientSecret:(NSString*)clientSecret
sendAttempt:(NSUInteger)sendAttempt
nextLink:(nullable NSString *)nextLink
success:(void (^)(NSString *sid, NSString *msisdn))success
failure:(void (^)(NSError *error))failure NS_REFINED_FOR_SWIFT;
/**
Submit the validation token received by an email or a sms.
In case of success, the related third-party id has been validated.
@param token the validation token.
@param medium the type of the third-party id (see kMX3PIDMediumEmail, kMX3PIDMediumMSISDN).
@param clientSecret the clientSecret used during the validation request.
@param sid the validation session id returned by the server.
@param success A block object called when the operation succeeds.
@param failure A block object called when the operation fails.
@return a MXHTTPOperation instance.
*/
- (MXHTTPOperation*)submit3PIDValidationToken:(NSString *)token
medium:(NSString *)medium
clientSecret:(NSString *)clientSecret
sid:(NSString *)sid
success:(void (^)(void))success
failure:(void (^)(NSError *error))failure NS_REFINED_FOR_SWIFT;
#pragma mark Other
/**
Check if there is an identity server endpoint running at the provided
identity server address.
@param success A block object called when the operation succeeds.
@param failure A block object called when the operation fails.
@return a MXHTTPOperation instance.
*/
- (MXHTTPOperation*)pingIdentityServer:(void (^)(void))success
failure:(void (^)(NSError *error))failure NS_REFINED_FOR_SWIFT;
/**
Sign a 3PID URL.
@param signUrl the URL that will be called for signing.
@param success A block object called when the operation succeeds. It provides the signed data.
@param failure A block object called when the operation fails.
@return a MXHTTPOperation instance.
*/
- (MXHTTPOperation*)signUrl:(NSString*)signUrl
success:(void (^)(NSDictionary *thirdPartySigned))success
failure:(void (^)(NSError *error))failure NS_REFINED_FOR_SWIFT;
/**
Gets information about the token's owner, such as the user ID for which it belongs.
@param success A block object called when the operation succeeds. It provides the user ID.
@param failure A block object called when the operation fails.
@return a MXHTTPOperation instance.
*/
- (MXHTTPOperation*)accountWithSuccess:(void (^)(NSString *userId))success
failure:(void (^)(NSError *error))failure NS_REFINED_FOR_SWIFT;
@end
NS_ASSUME_NONNULL_END
| 3,508 |
1,737 |
<gh_stars>1000+
{
"include": ["src", "types"],
"compilerOptions": {
"target": "es5",
"module": "esnext",
"lib": ["dom", "esnext"],
"importHelpers": true,
"resolveJsonModule": true,
"importsNotUsedAsValues": "remove",
"declaration": false,
"noEmit": true,
"stripInternal": true,
"sourceMap": true,
"rootDir": "./src",
"moduleResolution": "node",
"baseUrl": "./",
"paths": {
"*": ["src/*", "node_modules/*"]
},
"jsx": "react",
"esModuleInterop": true
},
"typedocOptions": {
"module": "commonjs",
"target": "es5",
"out": "website/doc/",
"name": "WebCola",
"theme": "minimal"
}
}
| 310 |
1,144 |
package de.metas.i18n.impl;
/*
* #%L
* de.metas.util
* %%
* Copyright (C) 2015 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableMap;
import de.metas.i18n.AdMessageKey;
import de.metas.i18n.IMsgBL;
import de.metas.i18n.ITranslatableString;
import de.metas.i18n.TranslatableStrings;
import lombok.NonNull;
public class PlainMsgBL implements IMsgBL
{
@Override
public String getMsg(final String adLanguage, @NonNull final AdMessageKey message)
{
return adLanguage + "_" + message.toAD_Message();
}
@Override
public String getMsg(final String adLanguage, @NonNull final AdMessageKey message, final Object[] params)
{
return adLanguage + "_" + message.toAD_Message() + "_" + Arrays.toString(params);
}
@Override
public String getMsg(final String adLanguage, @NonNull final AdMessageKey message, final List<Object> params)
{
return adLanguage + "_" + message.toAD_Message() + "_" + params;
}
@Override
public String getMsg(final Properties ctx, @NonNull final AdMessageKey adMessage)
{
return adMessage.toAD_Message();
}
@Override
public String getMsg(final Properties ctx, @NonNull final AdMessageKey adMessage, final boolean text)
{
return adMessage.toAD_Message() + "_" + (text ? "Text" : "Tooltip");
}
@Override
public String getMsg(final Properties ctx, @NonNull final AdMessageKey adMessage, final Object[] params)
{
if (params == null || params.length == 0)
{
return adMessage.toAD_Message();
}
return adMessage + "_" + Arrays.toString(params);
}
@Override
public String getMsg(@NonNull final AdMessageKey adMessage, final List<Object> params)
{
if (params == null || params.isEmpty())
{
return adMessage.toAD_Message();
}
return adMessage.toAD_Message() + "_" + params;
}
@Override
public Map<String, String> getMsgMap(final String adLanguage, final String prefix, final boolean removePrefix)
{
return ImmutableMap.of();
}
@Override
public String parseTranslation(final Properties ctx, final String message)
{
return message;
}
@Override
public String parseTranslation(final String adLanguage, final String message)
{
return message;
}
@Override
public String translate(final Properties ctx, final String text)
{
return text;
}
@Override
public String translate(final String adLanguage, final String text)
{
return text;
}
@Override
public String translate(final Properties ctx, final String text, final boolean isSOTrx)
{
return text;
}
@Override
public ITranslatableString translatable(final String text)
{
return TranslatableStrings.constant(text);
}
@Override
public ITranslatableString getTranslatableMsgText(@NonNull final AdMessageKey adMessage, final Object... msgParameters)
{
if (msgParameters == null || msgParameters.length == 0)
{
return TranslatableStrings.constant(adMessage.toAD_Message());
}
else
{
return TranslatableStrings.constant(adMessage.toAD_Message() + " - " + Joiner.on(", ").join(msgParameters));
}
}
@Override
public void cacheReset()
{
// nothing
}
}
| 1,241 |
463 |
<filename>src/app/rv-sys.cc
//
// rv-sys.cc
//
#include <cstdio>
#include <cstdlib>
#include <cstring>
#include <cassert>
#include <cinttypes>
#include <csignal>
#include <csetjmp>
#include <cerrno>
#include <cmath>
#include <cctype>
#include <cwchar>
#include <climits>
#include <cfloat>
#include <cfenv>
#include <limits>
#include <array>
#include <string>
#include <vector>
#include <algorithm>
#include <memory>
#include <random>
#include <deque>
#include <map>
#include <thread>
#include <mutex>
#include <chrono>
#include <condition_variable>
#include <atomic>
#include <type_traits>
#include "dense_hash_map"
#include <poll.h>
#include <fcntl.h>
#include <unistd.h>
#include <termios.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include <sys/time.h>
#include "host-endian.h"
#include "types.h"
#include "fmt.h"
#include "bits.h"
#include "sha512.h"
#include "format.h"
#include "meta.h"
#include "util.h"
#include "color.h"
#include "host.h"
#include "cmdline.h"
#include "codec.h"
#include "elf.h"
#include "elf-file.h"
#include "elf-format.h"
#include "strings.h"
#include "disasm.h"
#include "alu.h"
#include "fpu.h"
#include "pte.h"
#include "pma.h"
#include "amo.h"
#include "processor-logging.h"
#include "processor-base.h"
#include "processor-impl.h"
#include "mmu-memory.h"
#include "tlb-soft.h"
#include "mmu-soft.h"
#include "interp.h"
#include "processor-model.h"
#include "queue.h"
#include "console.h"
#include "device-rom-boot.h"
#include "device-rom-sbi.h"
#include "device-rom-string.h"
#include "device-config.h"
#include "device-rtc.h"
#include "device-timer.h"
#include "device-plic.h"
#include "device-uart.h"
#include "device-mipi.h"
#include "device-gpio.h"
#include "device-rand.h"
#include "device-htif.h"
#include "processor-histogram.h"
#include "processor-priv-1.9.h"
#include "debug-cli.h"
#include "processor-runloop.h"
#if defined (ENABLE_GPERFTOOL)
#include "gperftools/profiler.h"
#endif
using namespace riscv;
/* Parameterized privileged soft-mmu processor models */
using priv_emulator_rv32imafdc = processor_runloop<processor_privileged<processor_rv32imafdc_model<decode,processor_priv_rv32imafd,mmu_soft_rv32>>>;
using priv_emulator_rv64imafdc = processor_runloop<processor_privileged<processor_rv64imafdc_model<decode,processor_priv_rv64imafd,mmu_soft_rv64>>>;
/* environment variables */
static const char* allowed_env_vars[] = {
"TERM=",
nullptr
};
static bool allow_env_var(const char *var)
{
const char **envp = allowed_env_vars;
while (*envp != nullptr) {
if (strncmp(*envp, var, strlen(*envp)) == 0) return true;
envp++;
}
return false;
}
/* RISC-V Emulator */
struct rv_emulator
{
/*
Simple ABI/AEE RISC-V emulator that uses a machine generated interpreter
created by parse-meta using the C-psuedo code in meta/instructions
Currently only a small number of syscalls are implemented
privileged emulator with soft-mmu is a work in progress
(ABI) application binary interface
(AEE) application execution environment
*/
static const uintmax_t default_ram_base = 0x80000000ULL; /* 2GiB */
static const uintmax_t default_ram_size = 0x40000000ULL; /* 1GiB */
elf_file elf;
host_cpu &cpu;
int proc_logs = 0;
bool help_or_error = false;
addr_t map_physical = 0;
s64 ram_boot = 0;
uint64_t initial_seed = 0;
std::string boot_filename;
std::string stats_dirname;
std::vector<std::string> host_cmdline;
std::vector<std::string> host_env;
rv_emulator() : cpu(host_cpu::get_instance()) {}
static const int elf_p_flags_mmap(int v)
{
int prot = 0;
if (v & PF_X) prot |= PROT_EXEC;
if (v & PF_W) prot |= PROT_WRITE;
if (v & PF_R) prot |= PROT_READ;
return prot;
}
static const int elf_pma_flags(int v)
{
int prot = 0;
if (v & PF_X) prot |= pma_prot_execute;
if (v & PF_W) prot |= pma_prot_write;
if (v & PF_R) prot |= pma_prot_read;
return prot;
}
/* Map ELF load segments into privileged MMU address space */
template <typename P>
void map_load_segment_priv(P &proc, const char* filename, Elf64_Phdr &phdr, addr_t map_addr)
{
int fd = open(filename, O_RDONLY);
if (fd < 0) {
panic("map_executable: error: open: %s: %s", filename, strerror(errno));
}
addr_t map_delta = phdr.p_offset & (page_size-1);
addr_t map_offset = phdr.p_offset - map_delta;
addr_t map_vaddr = phdr.p_vaddr - map_delta;
addr_t map_len = round_up(phdr.p_memsz + map_delta, page_size);
void *addr = mmap(nullptr, map_len,
elf_p_flags_mmap(phdr.p_flags), MAP_PRIVATE, fd, map_offset);
close(fd);
if (addr == MAP_FAILED) {
panic("map_executable: error: mmap: %s: %s", filename, strerror(errno));
}
/* zero bss */
if ((phdr.p_flags & PF_W) && phdr.p_memsz > phdr.p_filesz) {
memset((void*)((uintptr_t)addr + phdr.p_filesz), 0, phdr.p_memsz - phdr.p_filesz - 1);
}
/* add the mmap to the emulator soft_mmu */
proc.mmu.mem->add_mmap(map_vaddr, addr_t(addr), map_len,
pma_type_main | elf_pma_flags(phdr.p_flags));
}
void parse_commandline(int argc, const char* argv[], const char* envp[])
{
cmdline_option options[] =
{
{ "-l", "--log-instructions", cmdline_arg_type_none,
"Log Instructions",
[&](std::string s) { return (proc_logs |= (proc_log_inst | proc_log_trap)); } },
{ "-o", "--log-operands", cmdline_arg_type_none,
"Log Instructions and Operands",
[&](std::string s) { return (proc_logs |= (proc_log_inst | proc_log_trap | proc_log_operands)); } },
{ "-O", "--log-mmio", cmdline_arg_type_none,
"Log Memory Mapped IO",
[&](std::string s) { return (proc_logs |= proc_log_mmio); } },
{ "-m", "--log-memory-map", cmdline_arg_type_none,
"Log Memory Map Information",
[&](std::string s) { return (proc_logs |= proc_log_memory); } },
{ "-M", "--log-mmode-csr", cmdline_arg_type_none,
"Log Machine Control and Status Registers",
[&](std::string s) { return (proc_logs |= proc_log_csr_mmode); } },
{ "-S", "--log-smode-csr", cmdline_arg_type_none,
"Log Supervisor Control and Status Registers",
[&](std::string s) { return (proc_logs |= proc_log_csr_smode); } },
{ "-r", "--log-registers", cmdline_arg_type_none,
"Log Registers (defaults to integer registers)",
[&](std::string s) { return (proc_logs |= proc_log_int_reg); } },
{ "-v", "--log-pagewalks", cmdline_arg_type_none,
"Log Pagewalks",
[&](std::string s) { return (proc_logs |= proc_log_pagewalk); } },
{ "-c", "--log-config", cmdline_arg_type_none,
"Log Config",
[&](std::string s) { return (proc_logs |= proc_log_config); } },
{ "-t", "--log-traps", cmdline_arg_type_none,
"Log Traps",
[&](std::string s) { return (proc_logs |= proc_log_trap); } },
{ "-E", "--log-exit-stats", cmdline_arg_type_none,
"Log Registers and Statistics at Exit",
[&](std::string s) { return (proc_logs |= proc_log_exit_log_stats); } },
{ "-D", "--save-exit-stats", cmdline_arg_type_string,
"Save Registers and Statistics at Exit",
[&](std::string s) { stats_dirname = s; return (proc_logs |= proc_log_exit_save_stats); } },
{ "-P", "--pc-usage-histogram", cmdline_arg_type_none,
"Record program counter usage",
[&](std::string s) { return (proc_logs |= proc_log_hist_pc); } },
{ "-R", "--register-usage-histogram", cmdline_arg_type_none,
"Record register usage",
[&](std::string s) { return (proc_logs |= proc_log_hist_reg); } },
{ "-I", "--instruction-usage-histogram", cmdline_arg_type_none,
"Record instruction usage",
[&](std::string s) { return (proc_logs |= proc_log_hist_inst); } },
{ "-d", "--debug", cmdline_arg_type_none,
"Start up in debugger",
[&](std::string s) { return (proc_logs |= proc_log_ebreak_cli); } },
{ "-T", "--debug-trap", cmdline_arg_type_none,
"Start up in debugger and enter debugger on trap",
[&](std::string s) { return (proc_logs |= (proc_log_ebreak_cli | proc_log_trap_cli)); } },
{ "-x", "--no-pseudo", cmdline_arg_type_none,
"Disable Pseudoinstruction decoding",
[&](std::string s) { return (proc_logs |= proc_log_no_pseudo); } },
{ "-p", "--map-physical", cmdline_arg_type_string,
"Map execuatable at physical address",
[&](std::string s) { return parse_integral(s, map_physical); } },
{ "-b", "--binary", cmdline_arg_type_string,
"Boot Binary ( 32, 64 )",
[&](std::string s) { return parse_integral(s, ram_boot); } },
{ "-s", "--seed", cmdline_arg_type_string,
"Random seed",
[&](std::string s) { initial_seed = strtoull(s.c_str(), nullptr, 10); return true; } },
{ "-h", "--help", cmdline_arg_type_none,
"Show help",
[&](std::string s) { return (help_or_error = true); } },
{ nullptr, nullptr, cmdline_arg_type_none, nullptr, nullptr }
};
auto result = cmdline_option::process_options(options, argc, argv);
if (!result.second) {
help_or_error = true;
} else if (result.first.size() < 1 && !help_or_error) {
printf("%s: wrong number of arguments\n", argv[0]);
help_or_error = true;
}
if (help_or_error) {
printf("usage: %s [<options>] <elf_file>\n", argv[0]);
cmdline_option::print_options(options);
exit(9);
}
/* get command line options */
boot_filename = result.first[0];
for (size_t i = 0; i < result.first.size(); i++) {
host_cmdline.push_back(result.first[i]);
}
/* filter host environment */
for (const char** env = envp; *env != 0; env++) {
if (allow_env_var(*env)) {
host_env.push_back(*env);
}
}
/* load ELF */
if (ram_boot == 0) {
elf.load(boot_filename, elf_load_headers);
}
}
/* Start the execuatable with the given privileged processor template */
template <typename P>
void start_priv()
{
/* setup floating point exception mask */
fenv_init();
/* instantiate processor, set log options and program counter to entry address */
P proc;
proc.log = proc_logs;
proc.mmu.mem->log = (proc.log & proc_log_memory);
proc.stats_dirname = stats_dirname;
/* randomise integer register state with 512 bits of entropy */
proc.seed_registers(cpu, initial_seed, 512);
/* ROM/FLASH exposed in the Config MMIO region */
typename P::ux rom_base = 0, rom_size = 0, rom_entry = 0;
if (ram_boot == 32 || ram_boot == 64) {
struct stat statbuf;
FILE *file = nullptr;
memory_segment<typename P::ux> *segment = nullptr;
/* Add 1GB RAM to the mmu */
proc.mmu.mem->add_ram(default_ram_base, default_ram_size);
addr_t ram_base = proc.mmu.mem->mpa_to_uva(segment, default_ram_base);
if (segment == nullptr) {
panic("unable to locate ram");
}
if (stat(boot_filename.c_str(), &statbuf) < 0) {
panic("unable to stat boot file: %s", boot_filename.c_str());
}
if (!(file = fopen(boot_filename.c_str(), "r"))) {
panic("unable to open boot file: %s", boot_filename.c_str());
}
ssize_t len = fread((void*)ram_base, 1, statbuf.st_size, file);
if (len != statbuf.st_size) {
panic("unable to read boot file: %s", boot_filename.c_str());
}
fclose(file);
rom_base = default_ram_base;
rom_size = statbuf.st_size;
rom_entry = default_ram_base;
} else {
/* Find the ELF executable PT_LOAD segment base address */
for (size_t i = 0; i < elf.phdrs.size(); i++) {
Elf64_Phdr &phdr = elf.phdrs[i];
if (phdr.p_flags & (PT_LOAD | PT_DYNAMIC)) {
if (rom_base == 0) rom_base = phdr.p_vaddr;
rom_size = phdr.p_vaddr + phdr.p_memsz - rom_base;
}
}
/* Map the ELF executable PT_LOAD segments into the emulator mmu */
typename P::ux map_offset = map_physical == 0 ? 0 : rom_base - map_physical;
for (size_t i = 0; i < elf.phdrs.size(); i++) {
Elf64_Phdr &phdr = elf.phdrs[i];
if (phdr.p_flags & (PT_LOAD | PT_DYNAMIC)) {
map_load_segment_priv(proc, boot_filename.c_str(), phdr, phdr.p_vaddr - map_offset);
}
}
rom_base = rom_base - map_offset;
rom_entry = elf.ehdr.e_entry - map_offset;
/* Add 1GB RAM to the mmu */
proc.mmu.mem->add_ram(default_ram_base, default_ram_size);
}
/* Initialize interpreter */
proc.init();
proc.reset(); /* Reset code calls mapped ROM image */
proc.device_config->num_harts = 1;
proc.device_config->time_base = 1000000000;
proc.device_config->rom_base = rom_base;
proc.device_config->rom_size = rom_size;
proc.device_config->rom_entry = rom_entry;
proc.device_config->ram_base = default_ram_base;
proc.device_config->ram_size = default_ram_size;
#if defined (ENABLE_GPERFTOOL)
ProfilerStart("test-emulate.out");
#endif
/*
* Run the CPU until it halts
*
* when --debug flag is present we start in the debugger
*/
proc.run(proc.log & proc_log_ebreak_cli
? exit_cause_cli : exit_cause_continue);
#if defined (ENABLE_GPERFTOOL)
ProfilerStop();
#endif
}
/* Start a specific processor implementation based on ELF type and ISA extensions */
void exec()
{
/* check for RDTSCP on X86 */
#if X86_USE_RDTSCP
if (cpu.caps.size() > 0 && cpu.caps.find("RDTSCP") == cpu.caps.end()) {
panic("error: x86 host without RDTSCP. Recompile with -DX86_NO_RDTSCP");
}
#endif
/* execute */
if (ram_boot == 0) {
switch (elf.ei_class) {
case ELFCLASS32:
start_priv<priv_emulator_rv32imafdc>(); break;
case ELFCLASS64:
start_priv<priv_emulator_rv64imafdc>(); break;
}
}
else if (ram_boot == 32) {
start_priv<priv_emulator_rv32imafdc>();
}
else if (ram_boot == 64) {
start_priv<priv_emulator_rv64imafdc>();
} else {
panic("--boot option must be 32 or 64");
}
}
};
/* program main */
int main(int argc, const char* argv[], const char* envp[])
{
rv_emulator emulator;
emulator.parse_commandline(argc, argv, envp);
emulator.exec();
return 0;
}
| 5,763 |
5,079 |
<reponame>kokosing/hue<gh_stars>1000+
##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Interface Package Interfaces
"""
__docformat__ = 'restructuredtext'
from zope.interface.interface import Attribute
from zope.interface.interface import Interface
from zope.interface.declarations import implementer
_BLANK = u''
class IElement(Interface):
"""Objects that have basic documentation and tagged values.
"""
__name__ = Attribute('__name__', 'The object name')
__doc__ = Attribute('__doc__', 'The object doc string')
def getTaggedValue(tag):
"""Returns the value associated with `tag`.
Raise a `KeyError` of the tag isn't set.
"""
def queryTaggedValue(tag, default=None):
"""Returns the value associated with `tag`.
Return the default value of the tag isn't set.
"""
def getTaggedValueTags():
"""Returns a list of all tags."""
def setTaggedValue(tag, value):
"""Associates `value` with `key`."""
class IAttribute(IElement):
"""Attribute descriptors"""
interface = Attribute('interface',
'Stores the interface instance in which the '
'attribute is located.')
class IMethod(IAttribute):
"""Method attributes"""
def getSignatureInfo():
"""Returns the signature information.
This method returns a dictionary with the following keys:
o `positional` - All positional arguments.
o `required` - A list of all required arguments.
o `optional` - A list of all optional arguments.
o `varargs` - The name of the varargs argument.
o `kwargs` - The name of the kwargs argument.
"""
def getSignatureString():
"""Return a signature string suitable for inclusion in documentation.
This method returns the function signature string. For example, if you
have `func(a, b, c=1, d='f')`, then the signature string is `(a, b,
c=1, d='f')`.
"""
class ISpecification(Interface):
"""Object Behavioral specifications"""
def providedBy(object):
"""Test whether the interface is implemented by the object
Return true of the object asserts that it implements the
interface, including asserting that it implements an extended
interface.
"""
def implementedBy(class_):
"""Test whether the interface is implemented by instances of the class
Return true of the class asserts that its instances implement the
interface, including asserting that they implement an extended
interface.
"""
def isOrExtends(other):
"""Test whether the specification is or extends another
"""
def extends(other, strict=True):
"""Test whether a specification extends another
The specification extends other if it has other as a base
interface or if one of it's bases extends other.
If strict is false, then the specification extends itself.
"""
def weakref(callback=None):
"""Return a weakref to the specification
This method is, regrettably, needed to allow weakrefs to be
computed to security-proxied specifications. While the
zope.interface package does not require zope.security or
zope.proxy, it has to be able to coexist with it.
"""
__bases__ = Attribute("""Base specifications
A tuple if specifications from which this specification is
directly derived.
""")
__sro__ = Attribute("""Specification-resolution order
A tuple of the specification and all of it's ancestor
specifications from most specific to least specific.
(This is similar to the method-resolution order for new-style classes.)
""")
__iro__ = Attribute("""Interface-resolution order
A tuple of the of the specification's ancestor interfaces from
most specific to least specific. The specification itself is
included if it is an interface.
(This is similar to the method-resolution order for new-style classes.)
""")
def get(name, default=None):
"""Look up the description for a name
If the named attribute is not defined, the default is
returned.
"""
class IInterface(ISpecification, IElement):
"""Interface objects
Interface objects describe the behavior of an object by containing
useful information about the object. This information includes:
o Prose documentation about the object. In Python terms, this
is called the "doc string" of the interface. In this element,
you describe how the object works in prose language and any
other useful information about the object.
o Descriptions of attributes. Attribute descriptions include
the name of the attribute and prose documentation describing
the attributes usage.
o Descriptions of methods. Method descriptions can include:
- Prose "doc string" documentation about the method and its
usage.
- A description of the methods arguments; how many arguments
are expected, optional arguments and their default values,
the position or arguments in the signature, whether the
method accepts arbitrary arguments and whether the method
accepts arbitrary keyword arguments.
o Optional tagged data. Interface objects (and their attributes and
methods) can have optional, application specific tagged data
associated with them. Examples uses for this are examples,
security assertions, pre/post conditions, and other possible
information you may want to associate with an Interface or its
attributes.
Not all of this information is mandatory. For example, you may
only want the methods of your interface to have prose
documentation and not describe the arguments of the method in
exact detail. Interface objects are flexible and let you give or
take any of these components.
Interfaces are created with the Python class statement using
either Interface.Interface or another interface, as in::
from zope.interface import Interface
class IMyInterface(Interface):
'''Interface documentation'''
def meth(arg1, arg2):
'''Documentation for meth'''
# Note that there is no self argument
class IMySubInterface(IMyInterface):
'''Interface documentation'''
def meth2():
'''Documentation for meth2'''
You use interfaces in two ways:
o You assert that your object implement the interfaces.
There are several ways that you can assert that an object
implements an interface:
1. Call zope.interface.implements in your class definition.
2. Call zope.interfaces.directlyProvides on your object.
3. Call 'zope.interface.classImplements' to assert that instances
of a class implement an interface.
For example::
from zope.interface import classImplements
classImplements(some_class, some_interface)
This approach is useful when it is not an option to modify
the class source. Note that this doesn't affect what the
class itself implements, but only what its instances
implement.
o You query interface meta-data. See the IInterface methods and
attributes for details.
"""
def names(all=False):
"""Get the interface attribute names
Return a sequence of the names of the attributes, including
methods, included in the interface definition.
Normally, only directly defined attributes are included. If
a true positional or keyword argument is given, then
attributes defined by base classes will be included.
"""
def namesAndDescriptions(all=False):
"""Get the interface attribute names and descriptions
Return a sequence of the names and descriptions of the
attributes, including methods, as name-value pairs, included
in the interface definition.
Normally, only directly defined attributes are included. If
a true positional or keyword argument is given, then
attributes defined by base classes will be included.
"""
def __getitem__(name):
"""Get the description for a name
If the named attribute is not defined, a KeyError is raised.
"""
def direct(name):
"""Get the description for the name if it was defined by the interface
If the interface doesn't define the name, returns None.
"""
def validateInvariants(obj, errors=None):
"""Validate invariants
Validate object to defined invariants. If errors is None,
raises first Invalid error; if errors is a list, appends all errors
to list, then raises Invalid with the errors as the first element
of the "args" tuple."""
def __contains__(name):
"""Test whether the name is defined by the interface"""
def __iter__():
"""Return an iterator over the names defined by the interface
The names iterated include all of the names defined by the
interface directly and indirectly by base interfaces.
"""
__module__ = Attribute("""The name of the module defining the interface""")
class IDeclaration(ISpecification):
"""Interface declaration
Declarations are used to express the interfaces implemented by
classes or provided by objects.
"""
def __contains__(interface):
"""Test whether an interface is in the specification
Return true if the given interface is one of the interfaces in
the specification and false otherwise.
"""
def __iter__():
"""Return an iterator for the interfaces in the specification
"""
def flattened():
"""Return an iterator of all included and extended interfaces
An iterator is returned for all interfaces either included in
or extended by interfaces included in the specifications
without duplicates. The interfaces are in "interface
resolution order". The interface resolution order is such that
base interfaces are listed after interfaces that extend them
and, otherwise, interfaces are included in the order that they
were defined in the specification.
"""
def __sub__(interfaces):
"""Create an interface specification with some interfaces excluded
The argument can be an interface or an interface
specifications. The interface or interfaces given in a
specification are subtracted from the interface specification.
Removing an interface that is not in the specification does
not raise an error. Doing so has no effect.
Removing an interface also removes sub-interfaces of the interface.
"""
def __add__(interfaces):
"""Create an interface specification with some interfaces added
The argument can be an interface or an interface
specifications. The interface or interfaces given in a
specification are added to the interface specification.
Adding an interface that is already in the specification does
not raise an error. Doing so has no effect.
"""
def __nonzero__():
"""Return a true value of the interface specification is non-empty
"""
class IInterfaceDeclaration(Interface):
"""Declare and check the interfaces of objects
The functions defined in this interface are used to declare the
interfaces that objects provide and to query the interfaces that have
been declared.
Interfaces can be declared for objects in two ways:
- Interfaces are declared for instances of the object's class
- Interfaces are declared for the object directly.
The interfaces declared for an object are, therefore, the union of
interfaces declared for the object directly and the interfaces
declared for instances of the object's class.
Note that we say that a class implements the interfaces provided
by it's instances. An instance can also provide interfaces
directly. The interfaces provided by an object are the union of
the interfaces provided directly and the interfaces implemented by
the class.
"""
def providedBy(ob):
"""Return the interfaces provided by an object
This is the union of the interfaces directly provided by an
object and interfaces implemented by it's class.
The value returned is an IDeclaration.
"""
def implementedBy(class_):
"""Return the interfaces implemented for a class' instances
The value returned is an IDeclaration.
"""
def classImplements(class_, *interfaces):
"""Declare additional interfaces implemented for instances of a class
The arguments after the class are one or more interfaces or
interface specifications (IDeclaration objects).
The interfaces given (including the interfaces in the
specifications) are added to any interfaces previously
declared.
Consider the following example::
class C(A, B):
...
classImplements(C, I1, I2)
Instances of ``C`` provide ``I1``, ``I2``, and whatever interfaces
instances of ``A`` and ``B`` provide.
"""
def implementer(*interfaces):
"""Create a decorator for declaring interfaces implemented by a facory
A callable is returned that makes an implements declaration on
objects passed to it.
"""
def classImplementsOnly(class_, *interfaces):
"""Declare the only interfaces implemented by instances of a class
The arguments after the class are one or more interfaces or
interface specifications (IDeclaration objects).
The interfaces given (including the interfaces in the
specifications) replace any previous declarations.
Consider the following example::
class C(A, B):
...
classImplements(C, IA, IB. IC)
classImplementsOnly(C. I1, I2)
Instances of ``C`` provide only ``I1``, ``I2``, and regardless of
whatever interfaces instances of ``A`` and ``B`` implement.
"""
def implementer_only(*interfaces):
"""Create a decorator for declaring the only interfaces implemented
A callable is returned that makes an implements declaration on
objects passed to it.
"""
def directlyProvidedBy(object):
"""Return the interfaces directly provided by the given object
The value returned is an IDeclaration.
"""
def directlyProvides(object, *interfaces):
"""Declare interfaces declared directly for an object
The arguments after the object are one or more interfaces or
interface specifications (IDeclaration objects).
The interfaces given (including the interfaces in the
specifications) replace interfaces previously
declared for the object.
Consider the following example::
class C(A, B):
...
ob = C()
directlyProvides(ob, I1, I2)
The object, ``ob`` provides ``I1``, ``I2``, and whatever interfaces
instances have been declared for instances of ``C``.
To remove directly provided interfaces, use ``directlyProvidedBy`` and
subtract the unwanted interfaces. For example::
directlyProvides(ob, directlyProvidedBy(ob)-I2)
removes I2 from the interfaces directly provided by
``ob``. The object, ``ob`` no longer directly provides ``I2``,
although it might still provide ``I2`` if it's class
implements ``I2``.
To add directly provided interfaces, use ``directlyProvidedBy`` and
include additional interfaces. For example::
directlyProvides(ob, directlyProvidedBy(ob), I2)
adds I2 to the interfaces directly provided by ob.
"""
def alsoProvides(object, *interfaces):
"""Declare additional interfaces directly for an object::
alsoProvides(ob, I1)
is equivalent to::
directlyProvides(ob, directlyProvidedBy(ob), I1)
"""
def noLongerProvides(object, interface):
"""Remove an interface from the list of an object's directly
provided interfaces::
noLongerProvides(ob, I1)
is equivalent to::
directlyProvides(ob, directlyProvidedBy(ob)-I1)
with the exception that if ``I1`` is an interface that is
provided by ``ob`` through the class's implementation,
ValueError is raised.
"""
def implements(*interfaces):
"""Declare interfaces implemented by instances of a class
This function is called in a class definition (Python 2.x only).
The arguments are one or more interfaces or interface
specifications (IDeclaration objects).
The interfaces given (including the interfaces in the
specifications) are added to any interfaces previously
declared.
Previous declarations include declarations for base classes
unless implementsOnly was used.
This function is provided for convenience. It provides a more
convenient way to call classImplements. For example::
implements(I1)
is equivalent to calling::
classImplements(C, I1)
after the class has been created.
Consider the following example (Python 2.x only)::
class C(A, B):
implements(I1, I2)
Instances of ``C`` implement ``I1``, ``I2``, and whatever interfaces
instances of ``A`` and ``B`` implement.
"""
def implementsOnly(*interfaces):
"""Declare the only interfaces implemented by instances of a class
This function is called in a class definition (Python 2.x only).
The arguments are one or more interfaces or interface
specifications (IDeclaration objects).
Previous declarations including declarations for base classes
are overridden.
This function is provided for convenience. It provides a more
convenient way to call classImplementsOnly. For example::
implementsOnly(I1)
is equivalent to calling::
classImplementsOnly(I1)
after the class has been created.
Consider the following example (Python 2.x only)::
class C(A, B):
implementsOnly(I1, I2)
Instances of ``C`` implement ``I1``, ``I2``, regardless of what
instances of ``A`` and ``B`` implement.
"""
def classProvides(*interfaces):
"""Declare interfaces provided directly by a class
This function is called in a class definition.
The arguments are one or more interfaces or interface
specifications (IDeclaration objects).
The given interfaces (including the interfaces in the
specifications) are used to create the class's direct-object
interface specification. An error will be raised if the module
class has an direct interface specification. In other words, it is
an error to call this function more than once in a class
definition.
Note that the given interfaces have nothing to do with the
interfaces implemented by instances of the class.
This function is provided for convenience. It provides a more
convenient way to call directlyProvides for a class. For example::
classProvides(I1)
is equivalent to calling::
directlyProvides(theclass, I1)
after the class has been created.
"""
def provider(*interfaces):
"""A class decorator version of classProvides"""
def moduleProvides(*interfaces):
"""Declare interfaces provided by a module
This function is used in a module definition.
The arguments are one or more interfaces or interface
specifications (IDeclaration objects).
The given interfaces (including the interfaces in the
specifications) are used to create the module's direct-object
interface specification. An error will be raised if the module
already has an interface specification. In other words, it is
an error to call this function more than once in a module
definition.
This function is provided for convenience. It provides a more
convenient way to call directlyProvides for a module. For example::
moduleImplements(I1)
is equivalent to::
directlyProvides(sys.modules[__name__], I1)
"""
def Declaration(*interfaces):
"""Create an interface specification
The arguments are one or more interfaces or interface
specifications (IDeclaration objects).
A new interface specification (IDeclaration) with
the given interfaces is returned.
"""
class IAdapterRegistry(Interface):
"""Provide an interface-based registry for adapters
This registry registers objects that are in some sense "from" a
sequence of specification to an interface and a name.
No specific semantics are assumed for the registered objects,
however, the most common application will be to register factories
that adapt objects providing required specifications to a provided
interface.
"""
def register(required, provided, name, value):
"""Register a value
A value is registered for a *sequence* of required specifications, a
provided interface, and a name, which must be text.
"""
def registered(required, provided, name=_BLANK):
"""Return the component registered for the given interfaces and name
name must be text.
Unlike the lookup method, this methods won't retrieve
components registered for more specific required interfaces or
less specific provided interfaces.
If no component was registered exactly for the given
interfaces and name, then None is returned.
"""
def lookup(required, provided, name='', default=None):
"""Lookup a value
A value is looked up based on a *sequence* of required
specifications, a provided interface, and a name, which must be
text.
"""
def queryMultiAdapter(objects, provided, name=_BLANK, default=None):
"""Adapt a sequence of objects to a named, provided, interface
"""
def lookup1(required, provided, name=_BLANK, default=None):
"""Lookup a value using a single required interface
A value is looked up based on a single required
specifications, a provided interface, and a name, which must be
text.
"""
def queryAdapter(object, provided, name=_BLANK, default=None):
"""Adapt an object using a registered adapter factory.
"""
def adapter_hook(provided, object, name=_BLANK, default=None):
"""Adapt an object using a registered adapter factory.
name must be text.
"""
def lookupAll(required, provided):
"""Find all adapters from the required to the provided interfaces
An iterable object is returned that provides name-value two-tuples.
"""
def names(required, provided):
"""Return the names for which there are registered objects
"""
def subscribe(required, provided, subscriber, name=_BLANK):
"""Register a subscriber
A subscriber is registered for a *sequence* of required
specifications, a provided interface, and a name.
Multiple subscribers may be registered for the same (or
equivalent) interfaces.
"""
def subscriptions(required, provided, name=_BLANK):
"""Get a sequence of subscribers
Subscribers for a *sequence* of required interfaces, and a provided
interface are returned.
"""
def subscribers(objects, provided, name=_BLANK):
"""Get a sequence of subscription adapters
"""
# begin formerly in zope.component
class ComponentLookupError(LookupError):
"""A component could not be found."""
class Invalid(Exception):
"""A component doesn't satisfy a promise."""
class IObjectEvent(Interface):
"""An event related to an object.
The object that generated this event is not necessarily the object
refered to by location.
"""
object = Attribute("The subject of the event.")
@implementer(IObjectEvent)
class ObjectEvent(object):
def __init__(self, object):
self.object = object
class IComponentLookup(Interface):
"""Component Manager for a Site
This object manages the components registered at a particular site. The
definition of a site is intentionally vague.
"""
adapters = Attribute(
"Adapter Registry to manage all registered adapters.")
utilities = Attribute(
"Adapter Registry to manage all registered utilities.")
def queryAdapter(object, interface, name=_BLANK, default=None):
"""Look for a named adapter to an interface for an object
If a matching adapter cannot be found, returns the default.
"""
def getAdapter(object, interface, name=_BLANK):
"""Look for a named adapter to an interface for an object
If a matching adapter cannot be found, a ComponentLookupError
is raised.
"""
def queryMultiAdapter(objects, interface, name=_BLANK, default=None):
"""Look for a multi-adapter to an interface for multiple objects
If a matching adapter cannot be found, returns the default.
"""
def getMultiAdapter(objects, interface, name=_BLANK):
"""Look for a multi-adapter to an interface for multiple objects
If a matching adapter cannot be found, a ComponentLookupError
is raised.
"""
def getAdapters(objects, provided):
"""Look for all matching adapters to a provided interface for objects
Return an iterable of name-adapter pairs for adapters that
provide the given interface.
"""
def subscribers(objects, provided):
"""Get subscribers
Subscribers are returned that provide the provided interface
and that depend on and are comuted from the sequence of
required objects.
"""
def handle(*objects):
"""Call handlers for the given objects
Handlers registered for the given objects are called.
"""
def queryUtility(interface, name='', default=None):
"""Look up a utility that provides an interface.
If one is not found, returns default.
"""
def getUtilitiesFor(interface):
"""Look up the registered utilities that provide an interface.
Returns an iterable of name-utility pairs.
"""
def getAllUtilitiesRegisteredFor(interface):
"""Return all registered utilities for an interface
This includes overridden utilities.
An iterable of utility instances is returned. No names are
returned.
"""
class IRegistration(Interface):
"""A registration-information object
"""
registry = Attribute("The registry having the registration")
name = Attribute("The registration name")
info = Attribute("""Information about the registration
This is information deemed useful to people browsing the
configuration of a system. It could, for example, include
commentary or information about the source of the configuration.
""")
class IUtilityRegistration(IRegistration):
"""Information about the registration of a utility
"""
factory = Attribute("The factory used to create the utility. Optional.")
component = Attribute("The object registered")
provided = Attribute("The interface provided by the component")
class _IBaseAdapterRegistration(IRegistration):
"""Information about the registration of an adapter
"""
factory = Attribute("The factory used to create adapters")
required = Attribute("""The adapted interfaces
This is a sequence of interfaces adapters by the registered
factory. The factory will be caled with a sequence of objects, as
positional arguments, that provide these interfaces.
""")
provided = Attribute("""The interface provided by the adapters.
This interface is implemented by the factory
""")
class IAdapterRegistration(_IBaseAdapterRegistration):
"""Information about the registration of an adapter
"""
class ISubscriptionAdapterRegistration(_IBaseAdapterRegistration):
"""Information about the registration of a subscription adapter
"""
class IHandlerRegistration(IRegistration):
handler = Attribute("An object called used to handle an event")
required = Attribute("""The handled interfaces
This is a sequence of interfaces handled by the registered
handler. The handler will be caled with a sequence of objects, as
positional arguments, that provide these interfaces.
""")
class IRegistrationEvent(IObjectEvent):
"""An event that involves a registration"""
@implementer(IRegistrationEvent)
class RegistrationEvent(ObjectEvent):
"""There has been a change in a registration
"""
def __repr__(self):
return "%s event:\n%r" % (self.__class__.__name__, self.object)
class IRegistered(IRegistrationEvent):
"""A component or factory was registered
"""
@implementer(IRegistered)
class Registered(RegistrationEvent):
pass
class IUnregistered(IRegistrationEvent):
"""A component or factory was unregistered
"""
@implementer(IUnregistered)
class Unregistered(RegistrationEvent):
"""A component or factory was unregistered
"""
pass
class IComponentRegistry(Interface):
"""Register components
"""
def registerUtility(component=None, provided=None, name=_BLANK,
info=_BLANK, factory=None):
"""Register a utility
factory
Factory for the component to be registerd.
component
The registered component
provided
This is the interface provided by the utility. If the
component provides a single interface, then this
argument is optional and the component-implemented
interface will be used.
name
The utility name.
info
An object that can be converted to a string to provide
information about the registration.
Only one of component and factory can be used.
A Registered event is generated with an IUtilityRegistration.
"""
def unregisterUtility(component=None, provided=None, name=_BLANK,
factory=None):
"""Unregister a utility
A boolean is returned indicating whether the registry was
changed. If the given component is None and there is no
component registered, or if the given component is not
None and is not registered, then the function returns
False, otherwise it returns True.
factory
Factory for the component to be unregisterd.
component
The registered component The given component can be
None, in which case any component registered to provide
the given provided interface with the given name is
unregistered.
provided
This is the interface provided by the utility. If the
component is not None and provides a single interface,
then this argument is optional and the
component-implemented interface will be used.
name
The utility name.
Only one of component and factory can be used.
An UnRegistered event is generated with an IUtilityRegistration.
"""
def registeredUtilities():
"""Return an iterable of IUtilityRegistration instances.
These registrations describe the current utility registrations
in the object.
"""
def registerAdapter(factory, required=None, provided=None, name=_BLANK,
info=_BLANK):
"""Register an adapter factory
Parameters:
factory
The object used to compute the adapter
required
This is a sequence of specifications for objects to be
adapted. If omitted, then the value of the factory's
__component_adapts__ attribute will be used. The
__component_adapts__ attribute is usually attribute is
normally set in class definitions using adapts
function, or for callables using the adapter
decorator. If the factory doesn't have a
__component_adapts__ adapts attribute, then this
argument is required.
provided
This is the interface provided by the adapter and
implemented by the factory. If the factory
implements a single interface, then this argument is
optional and the factory-implemented interface will be
used.
name
The adapter name.
info
An object that can be converted to a string to provide
information about the registration.
A Registered event is generated with an IAdapterRegistration.
"""
def unregisterAdapter(factory=None, required=None,
provided=None, name=_BLANK):
"""Unregister an adapter factory
A boolean is returned indicating whether the registry was
changed. If the given component is None and there is no
component registered, or if the given component is not
None and is not registered, then the function returns
False, otherwise it returns True.
Parameters:
factory
This is the object used to compute the adapter. The
factory can be None, in which case any factory
registered to implement the given provided interface
for the given required specifications with the given
name is unregistered.
required
This is a sequence of specifications for objects to be
adapted. If the factory is not None and the required
arguments is omitted, then the value of the factory's
__component_adapts__ attribute will be used. The
__component_adapts__ attribute attribute is normally
set in class definitions using adapts function, or for
callables using the adapter decorator. If the factory
is None or doesn't have a __component_adapts__ adapts
attribute, then this argument is required.
provided
This is the interface provided by the adapter and
implemented by the factory. If the factory is not
None and implements a single interface, then this
argument is optional and the factory-implemented
interface will be used.
name
The adapter name.
An Unregistered event is generated with an IAdapterRegistration.
"""
def registeredAdapters():
"""Return an iterable of IAdapterRegistration instances.
These registrations describe the current adapter registrations
in the object.
"""
def registerSubscriptionAdapter(factory, required=None, provides=None,
name=_BLANK, info=''):
"""Register a subscriber factory
Parameters:
factory
The object used to compute the adapter
required
This is a sequence of specifications for objects to be
adapted. If omitted, then the value of the factory's
__component_adapts__ attribute will be used. The
__component_adapts__ attribute is usually attribute is
normally set in class definitions using adapts
function, or for callables using the adapter
decorator. If the factory doesn't have a
__component_adapts__ adapts attribute, then this
argument is required.
provided
This is the interface provided by the adapter and
implemented by the factory. If the factory implements
a single interface, then this argument is optional and
the factory-implemented interface will be used.
name
The adapter name.
Currently, only the empty string is accepted. Other
strings will be accepted in the future when support for
named subscribers is added.
info
An object that can be converted to a string to provide
information about the registration.
A Registered event is generated with an
ISubscriptionAdapterRegistration.
"""
def unregisterSubscriptionAdapter(factory=None, required=None,
provides=None, name=_BLANK):
"""Unregister a subscriber factory.
A boolean is returned indicating whether the registry was
changed. If the given component is None and there is no
component registered, or if the given component is not
None and is not registered, then the function returns
False, otherwise it returns True.
Parameters:
factory
This is the object used to compute the adapter. The
factory can be None, in which case any factories
registered to implement the given provided interface
for the given required specifications with the given
name are unregistered.
required
This is a sequence of specifications for objects to be
adapted. If the factory is not None and the required
arguments is omitted, then the value of the factory's
__component_adapts__ attribute will be used. The
__component_adapts__ attribute attribute is normally
set in class definitions using adapts function, or for
callables using the adapter decorator. If the factory
is None or doesn't have a __component_adapts__ adapts
attribute, then this argument is required.
provided
This is the interface provided by the adapter and
implemented by the factory. If the factory is not
None implements a single interface, then this argument
is optional and the factory-implemented interface will
be used.
name
The adapter name.
Currently, only the empty string is accepted. Other
strings will be accepted in the future when support for
named subscribers is added.
An Unregistered event is generated with an
ISubscriptionAdapterRegistration.
"""
def registeredSubscriptionAdapters():
"""Return an iterable of ISubscriptionAdapterRegistration instances.
These registrations describe the current subscription adapter
registrations in the object.
"""
def registerHandler(handler, required=None, name=_BLANK, info=''):
"""Register a handler.
A handler is a subscriber that doesn't compute an adapter
but performs some function when called.
Parameters:
handler
The object used to handle some event represented by
the objects passed to it.
required
This is a sequence of specifications for objects to be
adapted. If omitted, then the value of the factory's
__component_adapts__ attribute will be used. The
__component_adapts__ attribute is usually attribute is
normally set in class definitions using adapts
function, or for callables using the adapter
decorator. If the factory doesn't have a
__component_adapts__ adapts attribute, then this
argument is required.
name
The handler name.
Currently, only the empty string is accepted. Other
strings will be accepted in the future when support for
named handlers is added.
info
An object that can be converted to a string to provide
information about the registration.
A Registered event is generated with an IHandlerRegistration.
"""
def unregisterHandler(handler=None, required=None, name=_BLANK):
"""Unregister a handler.
A handler is a subscriber that doesn't compute an adapter
but performs some function when called.
A boolean is returned indicating whether the registry was
changed.
Parameters:
handler
This is the object used to handle some event
represented by the objects passed to it. The handler
can be None, in which case any handlers registered for
the given required specifications with the given are
unregistered.
required
This is a sequence of specifications for objects to be
adapted. If omitted, then the value of the factory's
__component_adapts__ attribute will be used. The
__component_adapts__ attribute is usually attribute is
normally set in class definitions using adapts
function, or for callables using the adapter
decorator. If the factory doesn't have a
__component_adapts__ adapts attribute, then this
argument is required.
name
The handler name.
Currently, only the empty string is accepted. Other
strings will be accepted in the future when support for
named handlers is added.
An Unregistered event is generated with an IHandlerRegistration.
"""
def registeredHandlers():
"""Return an iterable of IHandlerRegistration instances.
These registrations describe the current handler registrations
in the object.
"""
class IComponents(IComponentLookup, IComponentRegistry):
"""Component registration and access
"""
# end formerly in zope.component
| 14,449 |
5,169 |
{
"name": "YINMapping",
"version": "1.0.0",
"summary": "YINMapping ios kvo 响应编程",
"description": "ios kvo 响应编程 使用便捷",
"homepage": "https://github.com/wangyin1/YINMapping",
"license": "MIT (LICENSE)",
"authors": {
"wangyin1": ""
},
"platforms": {
"ios": "9.0"
},
"source": {
"git": "https://github.com/wangyin1/YINMapping.git",
"tag": "1.0.0"
},
"source_files": [
"Classes",
"Classes/**/*.{h,m}"
],
"exclude_files": "Classes/Exclude",
"public_header_files": "Classes/**/*.h"
}
| 276 |
1,091 |
/*
* Copyright 2015-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.cli.net;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.collect.Lists;
import org.apache.karaf.shell.api.action.Argument;
import org.apache.karaf.shell.api.action.Command;
import org.apache.karaf.shell.api.action.Completion;
import org.apache.karaf.shell.api.action.Option;
import org.apache.karaf.shell.api.action.lifecycle.Service;
import org.onosproject.cli.AbstractShellCommand;
import org.onosproject.net.Device;
import org.onosproject.net.DeviceId;
import org.onosproject.net.PortNumber;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.device.PortStatistics;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static org.onosproject.cli.net.DevicesListCommand.getSortedDevices;
import static org.onosproject.net.DeviceId.deviceId;
/**
* Lists port statistic of all ports in the system.
*/
@Service
@Command(scope = "onos", name = "portstats",
description = "Lists statistics of all ports in the system")
public class DevicePortStatsCommand extends AbstractShellCommand {
@Option(name = "-nz", aliases = "--nonzero", description = "Show only non-zero portstats",
required = false, multiValued = false)
private boolean nonzero = false;
@Option(name = "-d", aliases = "--delta",
description = "Show delta port statistics,"
+ "only for the last polling interval",
required = false, multiValued = false)
private boolean delta = false;
@Option(name = "-t", aliases = "--table",
description = "Show delta port statistics in table format "
+ "using human readable unit",
required = false, multiValued = false)
private boolean table = false;
@Argument(index = 0, name = "uri", description = "Device ID",
required = false, multiValued = false)
@Completion(DeviceIdCompleter.class)
String uri = null;
@Argument(index = 1, name = "portNumber", description = "Port Number",
required = false, multiValued = false)
@Completion(PortNumberCompleter.class)
String portNumberStr = null;
PortNumber portNumber = null;
private static final String FORMAT =
" port=%s, pktRx=%s, pktTx=%s, bytesRx=%s, bytesTx=%s, pktRxDrp=%s, pktTxDrp=%s, Dur=%s%s";
@Override
protected void doExecute() {
DeviceService deviceService = get(DeviceService.class);
if (portNumberStr != null) {
portNumber = PortNumber.fromString(portNumberStr);
}
if (uri == null) {
if (outputJson()) {
if (delta) {
print("%s", jsonPortStatsDelta(deviceService, getSortedDevices(deviceService)));
} else {
print("%s", jsonPortStats(deviceService, getSortedDevices(deviceService)));
}
} else {
for (Device d : getSortedDevices(deviceService)) {
if (delta) {
if (table) {
printPortStatsDeltaTable(d.id(), deviceService.getPortDeltaStatistics(d.id()));
} else {
printPortStatsDelta(d.id(), deviceService.getPortDeltaStatistics(d.id()));
}
} else {
printPortStats(d.id(), deviceService.getPortStatistics(d.id()));
}
}
}
} else {
Device d = deviceService.getDevice(deviceId(uri));
if (d == null) {
error("No such device %s", uri);
} else if (outputJson()) {
if (delta) {
print("%s", jsonPortStatsDelta(d.id(), new ObjectMapper(),
deviceService.getPortDeltaStatistics(d.id())));
} else {
print("%s", jsonPortStats(d.id(), new ObjectMapper(), deviceService.getPortStatistics(d.id())));
}
} else if (delta) {
if (table) {
printPortStatsDeltaTable(d.id(), deviceService.getPortDeltaStatistics(d.id()));
} else {
printPortStatsDelta(d.id(), deviceService.getPortDeltaStatistics(d.id()));
}
} else {
printPortStats(d.id(), deviceService.getPortStatistics(d.id()));
}
}
}
/**
* Produces JSON array containing portstats of the specified devices.
*
* @param deviceService device service
* @param devices collection of devices
* @return JSON Array
*/
protected JsonNode jsonPortStats(DeviceService deviceService, Iterable<Device> devices) {
ObjectMapper mapper = new ObjectMapper();
ArrayNode result = mapper.createArrayNode();
for (Device device : devices) {
result.add(jsonPortStats(device.id(), mapper, deviceService.getPortStatistics(device.id())));
}
return result;
}
/**
* Produces JSON array containing portstats of the specified device.
*
* @param deviceId device id
* @param portStats collection of port statistics
* @return JSON array
*/
private JsonNode jsonPortStats(DeviceId deviceId, ObjectMapper mapper, Iterable<PortStatistics> portStats) {
ObjectNode result = mapper.createObjectNode();
ArrayNode portStatsNode = mapper.createArrayNode();
for (PortStatistics stat : sortByPort(portStats)) {
if (isIrrelevant(stat)) {
continue;
}
if (nonzero && stat.isZero()) {
continue;
}
portStatsNode.add(mapper.createObjectNode()
.put("port", stat.portNumber().toString())
.put("pktRx", stat.packetsReceived())
.put("pktTx", stat.packetsSent())
.put("bytesRx", stat.bytesReceived())
.put("bytesTx", stat.bytesSent())
.put("pktRxDrp", stat.packetsRxDropped())
.put("pktTxDrp", stat.packetsTxDropped())
.put("Dur", stat.durationSec())
.set("annotations", annotations(mapper, stat.annotations())));
}
result.put("deviceId", deviceId.toString());
result.set("portStats", portStatsNode);
return result;
}
/**
* Produces JSON array containing delta portstats of the specified devices.
*
* @param deviceService device service
* @param devices collection of devices
* @return JSON Array
*/
protected JsonNode jsonPortStatsDelta(DeviceService deviceService, Iterable<Device> devices) {
ObjectMapper mapper = new ObjectMapper();
ArrayNode result = mapper.createArrayNode();
for (Device device : devices) {
result.add(jsonPortStatsDelta(device.id(), mapper, deviceService.getPortDeltaStatistics(device.id())));
}
return result;
}
/**
* Produces JSON array containing delta portstats of the specified device id.
*
* @param deviceId device id
* @param portStats collection of port statistics
* @return JSON array
*/
private JsonNode jsonPortStatsDelta(DeviceId deviceId, ObjectMapper mapper, Iterable<PortStatistics> portStats) {
ObjectNode result = mapper.createObjectNode();
ArrayNode portStatsNode = mapper.createArrayNode();
for (PortStatistics stat : sortByPort(portStats)) {
if (isIrrelevant(stat)) {
continue;
}
if (nonzero && stat.isZero()) {
continue;
}
float duration = ((float) stat.durationSec()) +
(((float) stat.durationNano()) / TimeUnit.SECONDS.toNanos(1));
float rateRx = stat.bytesReceived() * 8 / duration;
float rateTx = stat.bytesSent() * 8 / duration;
portStatsNode.add(mapper.createObjectNode()
.put("port", stat.portNumber().toString())
.put("pktRx", stat.packetsReceived())
.put("pktTx", stat.packetsSent())
.put("bytesRx", stat.bytesReceived())
.put("bytesTx", stat.bytesSent())
.put("rateRx", String.format("%.1f", rateRx))
.put("rateTx", String.format("%.1f", rateTx))
.put("pktRxDrp", stat.packetsRxDropped())
.put("pktTxDrp", stat.packetsTxDropped())
.put("interval", String.format("%.3f", duration)));
}
result.put("deviceId", deviceId.toString());
result.set("portStats", portStatsNode);
return result;
}
/**
* Prints Port Statistics.
*
* @param deviceId
* @param portStats
*/
private void printPortStats(DeviceId deviceId, Iterable<PortStatistics> portStats) {
print("deviceId=%s", deviceId);
for (PortStatistics stat : sortByPort(portStats)) {
if (isIrrelevant(stat)) {
continue;
}
if (nonzero && stat.isZero()) {
continue;
}
print(FORMAT, stat.portNumber(), stat.packetsReceived(), stat.packetsSent(), stat.bytesReceived(),
stat.bytesSent(), stat.packetsRxDropped(), stat.packetsTxDropped(), stat.durationSec(),
annotations(stat.annotations()));
}
}
private boolean isIrrelevant(PortStatistics stat) {
// TODO revisit logical port (e.g., ALL) handling
return portNumber != null && !portNumber.equals(stat.portNumber());
}
/**
* Prints Port delta statistics.
*
* @param deviceId
* @param portStats
*/
private void printPortStatsDelta(DeviceId deviceId, Iterable<PortStatistics> portStats) {
final String formatDelta = " port=%s, pktRx=%s, pktTx=%s, bytesRx=%s, bytesTx=%s,"
+ " rateRx=%s, rateTx=%s, pktRxDrp=%s, pktTxDrp=%s, interval=%s";
print("deviceId=%s", deviceId);
for (PortStatistics stat : sortByPort(portStats)) {
if (isIrrelevant(stat)) {
continue;
}
if (nonzero && stat.isZero()) {
continue;
}
float duration = ((float) stat.durationSec()) +
(((float) stat.durationNano()) / TimeUnit.SECONDS.toNanos(1));
float rateRx = stat.bytesReceived() * 8 / duration;
float rateTx = stat.bytesSent() * 8 / duration;
print(formatDelta, stat.portNumber(),
stat.packetsReceived(),
stat.packetsSent(),
stat.bytesReceived(),
stat.bytesSent(),
String.format("%.1f", rateRx),
String.format("%.1f", rateTx),
stat.packetsRxDropped(),
stat.packetsTxDropped(),
String.format("%.3f", duration));
}
}
/**
* Prints human readable table with delta Port Statistics for specific device.
*
* @param deviceId
* @param portStats
*/
private void printPortStatsDeltaTable(DeviceId deviceId, Iterable<PortStatistics> portStats) {
final String formatDeltaTable = "|%5s | %7s | %7s | %7s | %7s | %7s | %7s | %7s | %7s |%9s |";
print("+---------------------------------------------------------------------------------------------------+");
print("| DeviceId = %-86s |", deviceId);
print("|---------------------------------------------------------------------------------------------------|");
print("| | Receive | Transmit | Time [s] |");
print("| Port | Packets | Bytes | Rate bps | Drop | Packets | Bytes | Rate bps | Drop | Interval |");
print("|---------------------------------------------------------------------------------------------------|");
for (PortStatistics stat : sortByPort(portStats)) {
if (isIrrelevant(stat)) {
continue;
}
if (nonzero && stat.isZero()) {
continue;
}
float duration = ((float) stat.durationSec()) +
(((float) stat.durationNano()) / TimeUnit.SECONDS.toNanos(1));
float rateRx = duration > 0 ? stat.bytesReceived() * 8 / duration : 0;
float rateTx = duration > 0 ? stat.bytesSent() * 8 / duration : 0;
print(formatDeltaTable, stat.portNumber(),
humanReadable(stat.packetsReceived()),
humanReadable(stat.bytesReceived()),
humanReadableBps(rateRx),
humanReadable(stat.packetsRxDropped()),
humanReadable(stat.packetsSent()),
humanReadable(stat.bytesSent()),
humanReadableBps(rateTx),
humanReadable(stat.packetsTxDropped()),
String.format("%.3f", duration));
}
print("+---------------------------------------------------------------------------------------------------+");
}
/**
* Converts bytes to human readable string with Kilo, Mega, Giga, etc.
*
* @param bytes input byte array
* @return human readble string
*/
public static String humanReadable(long bytes) {
int unit = 1000;
if (bytes < unit) {
return String.format("%s ", bytes);
}
int exp = (int) (Math.log(bytes) / Math.log(unit));
Character pre = ("KMGTPE").charAt(exp - 1);
return String.format("%.2f%s", bytes / Math.pow(unit, exp), pre);
}
/**
* Converts bps to human readable format.
*
* @param bps input rate
* @return human readble string
*/
public static String humanReadableBps(float bps) {
int unit = 1000;
if (bps < unit) {
return String.format("%.0f ", bps);
}
int exp = (int) (Math.log(bps) / Math.log(unit));
Character pre = ("KMGTPE").charAt(exp - 1);
return String.format("%.2f%s", bps / Math.pow(unit, exp), pre);
}
private static List<PortStatistics> sortByPort(Iterable<PortStatistics> portStats) {
List<PortStatistics> portStatsList = Lists.newArrayList(portStats);
portStatsList.sort(Comparator.comparing(ps -> ps.portNumber().toLong()));
return portStatsList;
}
}
| 6,865 |
2,350 |
import unreal_engine as ue
from unreal_engine.structs import KAggregateGeom
mesh = ue.get_selected_assets()[0]
mesh.BodySetup.AggGeom = KAggregateGeom()
mesh.static_mesh_generate_kdop26()
mesh.static_mesh_generate_kdop18()
mesh.static_mesh_generate_kdop10x()
mesh.static_mesh_generate_kdop10y()
mesh.static_mesh_generate_kdop10z()
| 146 |
313 |
<gh_stars>100-1000
{"status_id":8045695227396096,"text":"Boktijiw ru mavaru kincu ra bimup vupnof jopiz huref mero hodum ze zo fi ekejuac jekkur hoku nun. #vuwas","user":{"user_id":3237519353184256,"name":"<NAME>","screen_name":"@azfig","created_at":119047738,"followers_count":9,"friends_count":28,"favourites_count":21},"created_at":1415725733,"favorite_count":52,"retweet_count":76,"entities":{"hashtags":[{"text":"#vuwas","indices":[8,27]}]},"in_reply_to_status_id":null}
| 189 |
1,339 |
import pickle
from datetime import datetime, timedelta
from pathlib import Path
from cryptography.fernet import Fernet
from robin_stocks.tda.globals import DATA_DIR_NAME, PICKLE_NAME
from robin_stocks.tda.helper import (request_data, set_login_state,
update_session)
from robin_stocks.tda.urls import URLS
def login_first_time(encryption_passcode, client_id, authorization_token, refresh_token):
""" Stores log in information in a pickle file on the computer. After being used once,
user can call login() to automatically read in information from pickle file and refresh
authorization tokens when needed.
:param encryption_passcode: Encryption key created by generate_encryption_passcode().
:type encryption_passcode: str
:param client_id: The Consumer Key for the API account.
:type client_id: str
:param authorization_token: The authorization code returned from post request to https://developer.tdameritrade.com/authentication/apis/post/token-0
:type authorization_token: str
:param refresh_token: The refresh code returned from post request to https://developer.tdameritrade.com/authentication/apis/post/token-0
:type refresh_token: str
"""
if type(encryption_passcode) is str:
encryption_passcode = encryption_passcode.encode()
cipher_suite = Fernet(encryption_passcode)
# Create necessary folders and paths for pickle file as defined in globals.
data_dir = Path.home().joinpath(DATA_DIR_NAME)
if not data_dir.exists():
data_dir.mkdir(parents=True)
pickle_path = data_dir.joinpath(PICKLE_NAME)
if not pickle_path.exists():
Path.touch(pickle_path)
# Write information to the file.
with pickle_path.open("wb") as pickle_file:
pickle.dump(
{
'authorization_token': cipher_suite.encrypt(authorization_token.encode()),
'refresh_token': cipher_suite.encrypt(refresh_token.encode()),
'client_id': cipher_suite.encrypt(client_id.encode()),
'authorization_timestamp': datetime.now(),
'refresh_timestamp': datetime.now()
}, pickle_file)
def login(encryption_passcode):
""" Set the authorization token so the API can be used. Gets a new authorization token
every 30 minutes using the refresh token. Gets a new refresh token every 60 days.
:param encryption_passcode: Encryption key created by generate_encryption_passcode().
:type encryption_passcode: str
"""
if type(encryption_passcode) is str:
encryption_passcode = encryption_passcode.encode()
cipher_suite = Fernet(encryption_passcode)
# Check that file exists before trying to read from it.
data_dir = Path.home().joinpath(DATA_DIR_NAME)
pickle_path = data_dir.joinpath(PICKLE_NAME)
if not pickle_path.exists():
raise FileExistsError(
"Please Call login_first_time() to create pickle file.")
# Read the information from the pickle file.
with pickle_path.open("rb") as pickle_file:
pickle_data = pickle.load(pickle_file)
access_token = cipher_suite.decrypt(pickle_data['authorization_token']).decode()
refresh_token = cipher_suite.decrypt(pickle_data['refresh_token']).decode()
client_id = cipher_suite.decrypt(pickle_data['client_id']).decode()
authorization_timestamp = pickle_data['authorization_timestamp']
refresh_timestamp = pickle_data['refresh_timestamp']
# Authorization tokens expire after 30 mins. Refresh tokens expire after 90 days,
# but you need to request a fresh authorization and refresh token before it expires.
authorization_delta = timedelta(seconds=1800)
refresh_delta = timedelta(days=60)
url = URLS.oauth()
# If it has been longer than 60 days. Get a new refresh and authorization token.
# Else if it has been longer than 30 minutes, get only a new authorization token.
if (datetime.now() - refresh_timestamp > refresh_delta):
payload = {
"grant_type": "refresh_token",
"access_type": "offline",
"refresh_token": refresh_token,
"client_id": client_id
}
data, _ = request_data(url, payload, True)
if "access_token" not in data and "refresh_token" not in data:
raise ValueError(
"Refresh token is no longer valid. Call login_first_time() to get a new refresh token.")
access_token = data["access_token"]
refresh_token = data["refresh_token"]
with pickle_path.open("wb") as pickle_file:
pickle.dump(
{
'authorization_token': cipher_suite.encrypt(access_token.encode()),
'refresh_token': cipher_suite.encrypt(refresh_token.encode()),
'client_id': cipher_suite.encrypt(client_id.encode()),
'authorization_timestamp': datetime.now(),
'refresh_timestamp': datetime.now()
}, pickle_file)
elif (datetime.now() - authorization_timestamp > authorization_delta):
payload = {
"grant_type": "refresh_token",
"refresh_token": refresh_token,
"client_id": client_id
}
data, _ = request_data(url, payload, True)
if "access_token" not in data:
raise ValueError(
"Refresh token is no longer valid. Call login_first_time() to get a new refresh token.")
access_token = data["access_token"]
# Write new data to file. Do not replace the refresh timestamp.
with pickle_path.open("wb") as pickle_file:
pickle.dump(
{
'authorization_token': cipher_suite.encrypt(access_token.encode()),
'refresh_token': cipher_suite.encrypt(refresh_token.encode()),
'client_id': cipher_suite.encrypt(client_id.encode()),
'authorization_timestamp': datetime.now(),
'refresh_timestamp': refresh_timestamp
}, pickle_file)
# Store authorization token in session information to be used with API calls.
auth_token = "Bearer {0}".format(access_token)
update_session("Authorization", auth_token)
update_session("apikey", client_id)
set_login_state(True)
return auth_token
def generate_encryption_passcode():
""" Returns an encryption key to be used for logging in.
:returns: Returns a byte object to be used with cryptography.
"""
return Fernet.generate_key().decode()
| 2,627 |
1,929 |
from .res_layer import ResLayer
from .res_i3d_layer import ResI3DLayer
__all__ = [
'ResLayer', 'ResI3DLayer'
]
| 48 |
335 |
<reponame>Safal08/Hacktoberfest-1
{
"word": "Vertiginous",
"definitions": [
"Extremely high or steep.",
"Relating to or affected by vertigo."
],
"parts-of-speech": "Adjective"
}
| 95 |
4,772 |
package example.repo;
import example.model.Customer887;
import java.util.List;
import org.springframework.data.repository.CrudRepository;
public interface Customer887Repository extends CrudRepository<Customer887, Long> {
List<Customer887> findByLastName(String lastName);
}
| 83 |
469 |
/*******************************************************************************
* Copyright 2019 See AUTHORS file
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.mini2Dx.ui.navigation;
import org.jmock.Expectations;
import org.jmock.Mockery;
import org.jmock.lib.legacy.ClassImposteriser;
import org.junit.Before;
import org.junit.Test;
import org.mini2Dx.ui.element.Actionable;
import org.mini2Dx.ui.element.Button;
import org.mini2Dx.ui.layout.ScreenSize;
import com.badlogic.gdx.Input.Keys;
import junit.framework.Assert;
/**
* Unit tests for {@link GridUiNavigation}
*/
public class GridUiNavigationTest {
private final int COLUMNS = 3;
private final int ROWS = 3;
private final Mockery mockery = new Mockery();
private final GridUiNavigation navigation = new GridUiNavigation(COLUMNS);
private final Actionable [][] elements = new Actionable[COLUMNS][ROWS];
@Before
public void setUp() {
mockery.setImposteriser(ClassImposteriser.INSTANCE);
for(int x = 0; x < COLUMNS; x++) {
for(int y = 0; y < ROWS; y++) {
elements[x][y] = mockery.mock(Actionable.class, "actionable-" + x + "," + y);
final Actionable actionable = elements[x][y];
mockery.checking(new Expectations() {
{
atLeast(1).of(actionable).addHoverListener(navigation);
allowing(actionable).invokeEndHover();
}
});
}
}
}
@Test
public void testSetXY() {
addElementsToGrid();
for(int x = 0; x < COLUMNS; x++) {
for(int y = 0; y < ROWS; y++) {
Assert.assertEquals(elements[x][y], navigation.get(x, y));
}
}
}
@Test
public void testNavigate() {
addElementsToGrid();
Actionable lastActionable = null;
for(int x = 0; x <= COLUMNS; x++) {
lastActionable = navigation.navigate(Keys.RIGHT);
}
Assert.assertEquals(elements[2][0], lastActionable);
for(int y = 0; y <= ROWS; y++) {
lastActionable = navigation.navigate(Keys.DOWN);
}
Assert.assertEquals(elements[2][2], lastActionable);
for(int x = 0; x <= COLUMNS; x++) {
lastActionable = navigation.navigate(Keys.LEFT);
}
Assert.assertEquals(elements[0][2], lastActionable);
for(int y = 0; y <= ROWS; y++) {
lastActionable = navigation.navigate(Keys.UP);
}
Assert.assertEquals(elements[0][0], lastActionable);
}
@Test
public void testCursorReset() {
addElementsToGrid();
Actionable lastActionable = null;
for(int x = 0; x <= COLUMNS; x++) {
lastActionable = navigation.navigate(Keys.RIGHT);
}
Assert.assertEquals(elements[2][0], lastActionable);
navigation.layout(ScreenSize.XS);
Assert.assertEquals(elements[2][0], navigation.getCursor());
}
@Test
public void testRemoveAllWithNullValues(){
for(int x = 0; x < COLUMNS; x++) {
for(int y = 0; y < ROWS; y++) {
final Actionable actionable = elements[x][y];
mockery.checking(new Expectations() {
{
exactly(9).of(actionable).removeHoverListener(navigation);
}
});
}
}
final Actionable mockedActionable = mockery.mock(Actionable.class);
mockery.checking(new Expectations() {
{
oneOf(mockedActionable).addHoverListener(navigation);
oneOf(mockedActionable).removeHoverListener(navigation);
}
});
navigation.set(2, 3, mockedActionable);
addElementsToGrid();
navigation.removeAll();
}
@Test
public void testNavigateWithNullItems(){
for(int x = 0; x < COLUMNS; x++) {
for(int y = 0; y < ROWS; y++) {
final Actionable actionable = elements[x][y];
mockery.checking(new Expectations() {
{
exactly(9).of(actionable).removeHoverListener(navigation);
}
});
}
}
String elementId = "actionable-" + 2 + "," + 3;
final Actionable mockedActionable = mockery.mock(Actionable.class, elementId);
mockery.checking(new Expectations() {
{
oneOf(mockedActionable).addHoverListener(navigation);
oneOf(mockedActionable).invokeEndHover();
atLeast(1).of(mockedActionable).getId();
will(returnValue(elementId));
}
});
navigation.set(2, 3, mockedActionable);
Assert.assertEquals(mockedActionable, navigation.updateCursor(mockedActionable.getId()));
}
@Test
public void testNavigateUpFromNullElement(){
Button[][] navigationElements = new Button[COLUMNS][ROWS];
for(int x = 0; x < COLUMNS; x++) {
for(int y = 0; y < ROWS; y++) {
navigationElements[x][y] = new Button("button-" + x + "," + y);
navigation.set(x, y, navigationElements[x][y]);}
}
navigation.updateCursor(navigationElements[1][1].getId());
navigation.set(1, 1, null);
Assert.assertEquals(navigationElements[1][0], navigation.navigate(Keys.UP));
}
@Test
public void testNavigateDownFromNullElement(){
Button[][] navigationElements = new Button[COLUMNS][ROWS];
for(int x = 0; x < COLUMNS; x++) {
for(int y = 0; y < ROWS; y++) {
navigationElements[x][y] = new Button("button-" + x + "," + y);
navigation.set(x, y, navigationElements[x][y]);}
}
navigation.updateCursor(navigationElements[1][1].getId());
navigation.set(1, 1, null);
Assert.assertEquals(navigationElements[1][2], navigation.navigate(Keys.DOWN));
}
@Test
public void testNavigateLeftFromNullElement(){
Button[][] navigationElements = new Button[COLUMNS][ROWS];
for(int x = 0; x < COLUMNS; x++) {
for(int y = 0; y < ROWS; y++) {
navigationElements[x][y] = new Button("button-" + x + "," + y);
navigation.set(x, y, navigationElements[x][y]);}
}
navigation.updateCursor(navigationElements[1][1].getId());
navigation.set(1, 1, null);
Assert.assertEquals(navigationElements[0][1], navigation.navigate(Keys.LEFT));
}
@Test
public void testNavigateRightFromNullElement(){
Button[][] navigationElements = new Button[COLUMNS][ROWS];
for(int x = 0; x < COLUMNS; x++) {
for(int y = 0; y < ROWS; y++) {
navigationElements[x][y] = new Button("button-" + x + "," + y);
navigation.set(x, y, navigationElements[x][y]);}
}
navigation.updateCursor(navigationElements[1][1].getId());
navigation.set(1, 1, null);
Assert.assertEquals(navigationElements[2][1], navigation.navigate(Keys.RIGHT));
}
@Test
public void testNavigateUpOverNullElement(){
Button[][] navigationElements = new Button[COLUMNS][ROWS];
for(int x = 0; x < COLUMNS; x++) {
for(int y = 0; y < ROWS; y++) {
navigationElements[x][y] = new Button("button-" + x + "," + y);
navigation.set(x, y, navigationElements[x][y]);}
}
navigation.updateCursor(navigationElements[1][2].getId());
navigation.set(1, 1, null);
Assert.assertEquals(navigationElements[1][0], navigation.navigate(Keys.UP));
}
@Test
public void testNavigateDownOverNullElement(){
Button[][] navigationElements = new Button[COLUMNS][ROWS];
for(int x = 0; x < COLUMNS; x++) {
for(int y = 0; y < ROWS; y++) {
navigationElements[x][y] = new Button("button-" + x + "," + y);
navigation.set(x, y, navigationElements[x][y]);}
}
navigation.updateCursor(navigationElements[1][0].getId());
navigation.set(1, 1, null);
Assert.assertEquals(navigationElements[1][2], navigation.navigate(Keys.DOWN));
}
@Test
public void testNavigateLeftOverNullElement(){
Button[][] navigationElements = new Button[COLUMNS][ROWS];
for(int x = 0; x < COLUMNS; x++) {
for(int y = 0; y < ROWS; y++) {
navigationElements[x][y] = new Button("button-" + x + "," + y);
navigation.set(x, y, navigationElements[x][y]);}
}
navigation.updateCursor(navigationElements[2][1].getId());
navigation.set(1, 1, null);
Assert.assertEquals(navigationElements[0][1], navigation.navigate(Keys.LEFT));
}
@Test
public void testNavigateRightOverNullElement(){
Button[][] navigationElements = new Button[COLUMNS][ROWS];
for(int x = 0; x < COLUMNS; x++) {
for(int y = 0; y < ROWS; y++) {
navigationElements[x][y] = new Button("button-" + x + "," + y);
navigation.set(x, y, navigationElements[x][y]);}
}
navigation.updateCursor(navigationElements[0][1].getId());
navigation.set(1, 1, null);
Assert.assertEquals(navigationElements[2][1], navigation.navigate(Keys.RIGHT));
}
@Test
public void testNavigateDownNoElementToMoveTo(){
Button[][] navigationElements = new Button[COLUMNS][ROWS];
for(int x = 0; x < COLUMNS; x++) {
for(int y = 0; y < ROWS; y++) {
navigationElements[x][y] = new Button("button-" + x + "," + y);
navigation.set(x, y, navigationElements[x][y]);}
}
navigation.updateCursor(navigationElements[2][0].getId());
navigation.set(2, 1, null);
navigation.set(2, 2, null);
Assert.assertEquals(navigationElements[2][0], navigation.navigate(Keys.DOWN));
}
@Test
public void testNavigateLeftNoElementToMoveTo(){
Button[][] navigationElements = new Button[COLUMNS][ROWS];
for(int x = 0; x < COLUMNS; x++) {
for(int y = 0; y < ROWS; y++) {
navigationElements[x][y] = new Button("button-" + x + "," + y);
navigation.set(x, y, navigationElements[x][y]);}
}
navigation.updateCursor(navigationElements[0][0].getId());
navigation.set(1, 0, null);
navigation.set(2, 0, null);
Assert.assertEquals(navigationElements[0][0], navigation.navigate(Keys.RIGHT));
}
@Test
public void testSetNullElement(){
for(int x = 0; x < COLUMNS; x++) {
for(int y = 0; y < ROWS; y++) {
final Actionable actionable = elements[x][y];
mockery.checking(new Expectations() {
{
exactly(9).of(actionable).removeHoverListener(navigation);
}
});
}
}
navigation.set(2, 3, null);
Assert.assertEquals(12, navigation.getNavigationSize());
}
private void addElementsToGrid() {
for(int x = 0; x < COLUMNS; x++) {
for(int y = 0; y < ROWS; y++) {
navigation.set(x, y, elements[x][y]);
}
}
}
}
| 4,053 |
12,278 |
<gh_stars>1000+
////////////////////////////////////////////////////////////////////////////////
/// DISCLAIMER
///
/// Copyright 2020 ArangoDB GmbH, Cologne, Germany
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
///
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
///
/// Copyright holder is ArangoDB GmbH, Cologne, Germany
///
/// @author <NAME>
////////////////////////////////////////////////////////////////////////////////
#ifndef IRESEARCH_FILTER_VISITOR_H
#define IRESEARCH_FILTER_VISITOR_H
#include "shared.hpp"
namespace iresearch {
struct sub_reader;
struct term_reader;
struct seek_term_iterator;
//////////////////////////////////////////////////////////////////////////////
/// @class filter_visitor
/// @brief base filter visitor interface
//////////////////////////////////////////////////////////////////////////////
struct IRESEARCH_API filter_visitor {
virtual ~filter_visitor() = default;
//////////////////////////////////////////////////////////////////////////////
/// @brief makes preparations for a visitor
//////////////////////////////////////////////////////////////////////////////
virtual void prepare(const sub_reader& segment,
const term_reader& field,
const seek_term_iterator& terms) = 0;
//////////////////////////////////////////////////////////////////////////////
/// @brief applies actions to a current term iterator
//////////////////////////////////////////////////////////////////////////////
virtual void visit(boost_t boost) = 0;
}; // filter_visitor
}
#endif // IRESEARCH_FILTER_VISITOR_H
| 505 |
445 |
<filename>processors/ARM/gdb-8.3.1/gdb/gnu-nat.h
/* Common things used by the various *gnu-nat.c files
Copyright (C) 1995-2019 Free Software Foundation, Inc.
Written by <NAME> <<EMAIL>>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>. */
#ifndef GNU_NAT_H
#define GNU_NAT_H
#include "defs.h"
/* Work around conflict between Mach's 'thread_info' function, and GDB's
'thread_info' class. Make the former available as 'mach_thread_info'. */
#define thread_info mach_thread_info
/* Mach headers are not yet ready for C++ compilation. */
extern "C"
{
#include <mach.h>
}
#undef thread_info
/* Divert 'mach_thread_info' to the original Mach 'thread_info' function. */
extern __typeof__ (mach_thread_info) mach_thread_info asm ("thread_info");
#include <unistd.h>
#include "inf-child.h"
struct inf;
extern struct inf *gnu_current_inf;
/* Converts a GDB pid to a struct proc. */
struct proc *inf_tid_to_thread (struct inf *inf, int tid);
typedef void (inf_threads_ftype) (struct proc *thread, void *arg);
/* Call F for every thread in inferior INF, passing ARG as second parameter. */
void inf_threads (struct inf *inf, inf_threads_ftype *f, void *arg);
/* Makes sure that INF's thread list is synced with the actual process. */
int inf_update_procs (struct inf *inf);
/* A proc is either a thread, or the task (there can only be one task proc
because it always has the same TID, PROC_TID_TASK). */
struct proc
{
thread_t port; /* The task or thread port. */
int tid; /* The GDB pid (actually a thread id). */
int num; /* An id number for threads, to print. */
mach_port_t saved_exc_port; /* The task/thread's real exception port. */
mach_port_t exc_port; /* Our replacement, which for. */
int sc; /* Desired suspend count. */
int cur_sc; /* Implemented suspend count. */
int run_sc; /* Default sc when the program is running. */
int pause_sc; /* Default sc when gdb has control. */
int resume_sc; /* Sc resulting from the last resume. */
int detach_sc; /* SC to leave around when detaching
from program. */
thread_state_data_t state; /* Registers, &c. */
int state_valid:1; /* True if STATE is up to date. */
int state_changed:1;
int aborted:1; /* True if thread_abort has been called. */
int dead:1; /* We happen to know it's actually dead. */
/* Bit mask of registers fetched by gdb. This is used when we re-fetch
STATE after aborting the thread, to detect that gdb may have out-of-date
information. */
unsigned long fetched_regs;
struct inf *inf; /* Where we come from. */
struct proc *next;
};
/* The task has a thread entry with this TID. */
#define PROC_TID_TASK (-1)
#define proc_is_task(proc) ((proc)->tid == PROC_TID_TASK)
#define proc_is_thread(proc) ((proc)->tid != PROC_TID_TASK)
extern int __proc_pid (struct proc *proc);
/* Make sure that the state field in PROC is up to date, and return a
pointer to it, or 0 if something is wrong. If WILL_MODIFY is true,
makes sure that the thread is stopped and aborted first, and sets
the state_changed field in PROC to true. */
extern thread_state_t proc_get_state (struct proc *proc, int will_modify);
/* Return printable description of proc. */
extern char *proc_string (struct proc *proc);
#define proc_debug(_proc, msg, args...) \
do { struct proc *__proc = (_proc); \
debug ("{proc %d/%d %s}: " msg, \
__proc_pid (__proc), __proc->tid, \
host_address_to_string (__proc) , ##args); } while (0)
extern int gnu_debug_flag;
#define debug(msg, args...) \
do { if (gnu_debug_flag) \
fprintf_unfiltered (gdb_stdlog, "%s:%d: " msg "\r\n", \
__FILE__ , __LINE__ , ##args); } while (0)
/* A prototype generic GNU/Hurd target. The client can override it
with local methods. */
struct gnu_nat_target : public inf_child_target
{
void attach (const char *, int) override;
bool attach_no_wait () override
{ return true; }
void detach (inferior *, int) override;
void resume (ptid_t, int, enum gdb_signal) override;
ptid_t wait (ptid_t, struct target_waitstatus *, int) override;
enum target_xfer_status xfer_partial (enum target_object object,
const char *annex,
gdb_byte *readbuf,
const gdb_byte *writebuf,
ULONGEST offset, ULONGEST len,
ULONGEST *xfered_len) override;
int find_memory_regions (find_memory_region_ftype func, void *data)
override;
void kill () override;
void create_inferior (const char *, const std::string &,
char **, int) override;
void mourn_inferior () override;
bool thread_alive (ptid_t ptid) override;
const char *pid_to_str (ptid_t) override;
void stop (ptid_t) override;
};
#endif /* GNU_NAT_H */
| 1,901 |
4,303 |
// This file simulates the actual hexagon DMA driver functions to run
// the DMA Examples on host machine. The definitions in this file are
// a weak reference so that these will be called only in case of
// unavailability of actual DMA functions.
#include "HalideRuntime.h"
#define WEAK __attribute__((weak))
#include "../../src/runtime/hexagon_dma_pool.h"
#include "../../src/runtime/mini_hexagon_dma.h"
#include <assert.h>
#include <memory.h>
#include <stdio.h>
#include <stdlib.h>
#define HALIDE_MOCK_DMA_DEBUG
// Mock Global Descriptor
typedef struct {
struct {
uintptr_t des_pointer; // for chain to next "desc" or NULL to terminate the chain
uint32 dst_pix_fmt : 3;
uint32 dst_is_ubwc : 1;
uint32 src_pix_fmt : 3;
uint32 src_is_ubwc : 1;
uint32 dst_is_tcm : 1;
uint32 _unused0 : 3;
uint32 src_is_tcm : 1;
uint32 _unused1 : 3;
uint32 dst_pix_padding : 1;
uint32 _unused2 : 3;
uint32 src_pix_padding : 1;
uint32 _unused3 : 11;
uint32 frm_height : 16;
uint32 frm_width : 16;
uint32 roiY : 16;
uint32 roiX : 16;
} stWord0;
struct {
uint32 roiH : 16;
uint32 roiW : 16;
uint32 src_roi_stride : 16;
uint32 dst_roi_stride : 16;
uintptr_t src_frm_base_addr;
uintptr_t dst_frm_base_addr;
uint32 src_roi_start_addr : 32;
uint32 dst_roi_start_addr : 32;
uint32 ubwc_stat_pointer : 32; // use reserved3 for gralloc ubwc_stat_pointer
} stWord1;
struct {
uint32 pix_fmt;
uint32 _unused0;
uint32 _unused1;
uint32 _unused2;
} stWord2;
} t_st_hw_descriptor;
typedef struct {
int x; // in case we want to keep a count
t_st_hw_descriptor *ptr;
} dma_handle_t;
int halide_hexagon_free_l2_pool(void *user_context) {
halide_print(user_context, "halide_hexagon_free_l2_pool mock implementation \n");
return 0;
}
static int nDmaPixelSize(int pix_fmt) {
int nRet = 0;
switch (pix_fmt) {
case eDmaFmt_RawData:
case eDmaFmt_NV12:
case eDmaFmt_NV12_Y:
case eDmaFmt_NV12_UV:
case eDmaFmt_NV124R:
case eDmaFmt_NV124R_Y:
case eDmaFmt_NV124R_UV:
nRet = 1;
break;
case eDmaFmt_P010:
case eDmaFmt_P010_Y:
case eDmaFmt_P010_UV:
case eDmaFmt_TP10:
case eDmaFmt_TP10_Y:
case eDmaFmt_TP10_UV:
nRet = 2;
break;
}
assert(nRet != 0);
return nRet;
}
void *HAP_cache_lock(unsigned int size, void **paddr_ptr) {
void *alloc = 0;
if (size != 0) {
alloc = malloc(size);
}
return alloc;
}
int HAP_cache_unlock(void *vaddr_ptr) {
if (vaddr_ptr != 0) {
free(vaddr_ptr);
vaddr_ptr = 0;
return 0;
}
return 1;
}
int32 nDmaWrapper_PowerVoting(uint32 cornercase) {
printf("In nDmaWrapper_PowerVoting %d \n", cornercase);
return 0;
}
t_DmaWrapper_DmaEngineHandle hDmaWrapper_AllocDma(void) {
dma_handle_t *handle = (dma_handle_t *)malloc(sizeof(dma_handle_t));
handle->ptr = NULL;
return (void *)handle;
}
int32 nDmaWrapper_FreeDma(t_DmaWrapper_DmaEngineHandle dma_handle) {
dma_handle_t *desc = (dma_handle_t *)dma_handle;
assert(desc != NULL);
assert(desc->ptr == NULL);
free(desc);
return 0;
}
int32 nDmaWrapper_Move(t_DmaWrapper_DmaEngineHandle handle) {
if (handle != 0) {
dma_handle_t *dma_handle = (dma_handle_t *)handle;
t_st_hw_descriptor *desc = dma_handle->ptr;
while (desc != NULL) {
unsigned char *src_addr = reinterpret_cast<unsigned char *>(desc->stWord1.src_frm_base_addr);
unsigned char *dst_addr = reinterpret_cast<unsigned char *>(desc->stWord1.dst_frm_base_addr);
int x = desc->stWord0.roiX;
int y = desc->stWord0.roiY;
int w = desc->stWord1.roiW;
int h = desc->stWord1.roiH;
int pixelsize = nDmaPixelSize(desc->stWord2.pix_fmt);
int y_offset;
if (desc->stWord2.pix_fmt == eDmaFmt_NV12_UV ||
desc->stWord2.pix_fmt == eDmaFmt_NV124R_UV ||
desc->stWord2.pix_fmt == eDmaFmt_P010_UV ||
desc->stWord2.pix_fmt == eDmaFmt_TP10_UV) {
y_offset = desc->stWord0.frm_height;
} else {
y_offset = 0;
}
#ifdef HALIDE_MOCK_DMA_DEBUG
fprintf(stderr, "Processing descriptor %p -- DMAREAD: %d src_addr: %p dst_addr: %p ROI(X: %u, Y: %u, W: %u, H: %u) FrameStride: %u, CacheRoiStride %u, Frm(W: %u, H: %u), y_offset: %u\n",
desc, desc->stWord0.dst_is_tcm, src_addr, dst_addr, x, y, w, h,
desc->stWord1.src_roi_stride, desc->stWord1.dst_roi_stride, desc->stWord0.frm_width, desc->stWord0.frm_height, y_offset);
int cnt = 0; // log first few lines
#endif
for (int yii = 0; yii < h; yii++) {
// per line copy
int ydst = yii * desc->stWord1.dst_roi_stride * pixelsize;
int ysrc = yii * desc->stWord1.src_roi_stride * pixelsize;
int len = w * pixelsize;
int frame_offset = 0;
if (desc->stWord0.dst_is_tcm) {
frame_offset = (x + (y_offset + y) * desc->stWord1.src_roi_stride) * pixelsize + ysrc;
memcpy(&dst_addr[ydst], &src_addr[frame_offset], len);
} else {
frame_offset = (x + (y_offset + y) * desc->stWord1.dst_roi_stride) * pixelsize + ydst;
memcpy(&dst_addr[frame_offset], &src_addr[ysrc], len);
}
#ifdef HALIDE_MOCK_DMA_DEBUG
#define DBG_LOG_LINES 2
if (cnt++ < DBG_LOG_LINES) {
fprintf(stderr, "Processing line -- yii: %u ydst: %u frame_offset: %u ysrc: %u len: %u\n",
yii, ydst, frame_offset, ysrc, len);
} else {
if (cnt == (h - DBG_LOG_LINES)) cnt = 0; // log last few lines
}
#endif
}
desc = reinterpret_cast<t_st_hw_descriptor *>(desc->stWord0.des_pointer);
}
}
return 0;
}
int32 nDmaWrapper_Wait(t_DmaWrapper_DmaEngineHandle dma_handle) {
dma_handle_t *desc = (dma_handle_t *)dma_handle;
assert(desc != NULL);
// remove the association from descriptor
desc->ptr = NULL;
return 0;
}
int32 nDmaWrapper_FinishFrame(t_DmaWrapper_DmaEngineHandle dma_handle) {
dma_handle_t *desc = (dma_handle_t *)dma_handle;
assert(desc != NULL);
// remove the association from descriptor
desc->ptr = NULL;
return 0;
}
int32 nDmaWrapper_GetRecommendedWalkSize(t_eDmaFmt fmt, bool is_ubwc,
t_StDmaWrapper_RoiAlignInfo *walk_size) {
walk_size->u16H = align(walk_size->u16H, 1);
walk_size->u16W = align(walk_size->u16W, 1);
return 0;
}
int32 nDmaWrapper_GetRecommendedIntermBufStride(t_eDmaFmt fmt,
t_StDmaWrapper_RoiAlignInfo *roi_size,
bool is_ubwc) {
// UBWC Not Supported
assert(is_ubwc == 0);
return roi_size->u16W;
}
int32 nDmaWrapper_DmaTransferSetup(t_DmaWrapper_DmaEngineHandle handle, t_StDmaWrapper_DmaTransferSetup *dma_transfer_parm) {
if (handle == 0)
return 1;
if (dma_transfer_parm->pDescBuf == NULL)
return 1;
// Add it to the linked list of dma_handle->ptr
dma_handle_t *dma_handle = (dma_handle_t *)handle;
t_st_hw_descriptor *temp = dma_handle->ptr;
t_st_hw_descriptor *desc = (t_st_hw_descriptor *)dma_transfer_parm->pDescBuf;
desc->stWord0.des_pointer = 0;
if (temp != NULL) {
while (temp->stWord0.des_pointer != 0) {
temp = reinterpret_cast<t_st_hw_descriptor *>(temp->stWord0.des_pointer);
}
temp->stWord0.des_pointer = reinterpret_cast<uintptr_t>(desc);
} else {
dma_handle->ptr = desc;
}
int mul_factor = 1;
switch (dma_transfer_parm->eFmt) { // chroma fmt
case eDmaFmt_NV12_UV:
case eDmaFmt_NV124R_UV:
case eDmaFmt_P010_UV:
case eDmaFmt_TP10_UV: {
// DMA Driver halves the Y offset and height so that only half the size of roi luma is transferred for chroma
// Adjusting for that behavior
mul_factor = 2;
} break;
case eDmaFmt_RawData:
case eDmaFmt_NV12:
case eDmaFmt_NV12_Y:
case eDmaFmt_NV124R:
case eDmaFmt_NV124R_Y:
case eDmaFmt_P010:
case eDmaFmt_P010_Y:
case eDmaFmt_TP10:
case eDmaFmt_TP10_Y:
default:
break;
}
desc->stWord0.dst_is_ubwc = dma_transfer_parm->bIsFmtUbwc;
desc->stWord0.dst_is_tcm = (dma_transfer_parm->eTransferType == eDmaWrapper_DdrToL2) ? 1 : 0;
desc->stWord0.frm_height = dma_transfer_parm->u16FrameH;
desc->stWord0.frm_width = dma_transfer_parm->u16FrameW;
desc->stWord0.roiX = dma_transfer_parm->u16RoiX;
desc->stWord0.roiY = dma_transfer_parm->u16RoiY / mul_factor;
desc->stWord1.roiH = dma_transfer_parm->u16RoiH / mul_factor;
desc->stWord1.roiW = dma_transfer_parm->u16RoiW;
if (desc->stWord0.dst_is_tcm) {
desc->stWord1.dst_frm_base_addr = reinterpret_cast<uintptr_t>(dma_transfer_parm->pTcmDataBuf);
// must always point to start of frame buffer, and not start of component
desc->stWord1.src_frm_base_addr = reinterpret_cast<uintptr_t>(dma_transfer_parm->pFrameBuf);
desc->stWord1.src_roi_stride = dma_transfer_parm->u16FrameStride;
desc->stWord1.dst_roi_stride = dma_transfer_parm->u16RoiStride;
} else {
desc->stWord1.src_frm_base_addr = reinterpret_cast<uintptr_t>(dma_transfer_parm->pTcmDataBuf);
// must always point to start of frame buffer, and not start of component
desc->stWord1.dst_frm_base_addr = reinterpret_cast<uintptr_t>(dma_transfer_parm->pFrameBuf);
// We are in dma write so dst roi stride is the frame stride and src stride is tcm stride
desc->stWord1.dst_roi_stride = dma_transfer_parm->u16FrameStride;
desc->stWord1.src_roi_stride = dma_transfer_parm->u16RoiStride;
}
desc->stWord2.pix_fmt = dma_transfer_parm->eFmt;
return 0;
}
int32 nDmaWrapper_GetDescbuffsize(t_eDmaFmt *fmt, uint16 nsize) {
int32 desc_size, yuvformat = 0;
for (int32 i = 0; i < nsize; i++) {
if ((fmt[i] == eDmaFmt_NV12) || (fmt[i] == eDmaFmt_TP10) ||
(fmt[i] == eDmaFmt_NV124R) || (fmt[i] == eDmaFmt_P010)) {
yuvformat += 1;
}
}
desc_size = (nsize + yuvformat) * 64;
return desc_size;
}
int32 nDmaWrapper_GetRecommendedIntermBufSize(t_eDmaFmt eFmtId, bool bUse16BitPaddingInL2,
t_StDmaWrapper_RoiAlignInfo *pStRoiSize,
bool bIsUbwc, uint16 u16IntermBufStride) {
return 0;
}
| 5,626 |
384 |
<reponame>sekikn/tez<filename>tez-mapreduce/src/main/java/org/apache/tez/mapreduce/hadoop/InputSplitInfoMem.java<gh_stars>100-1000
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tez.mapreduce.hadoop;
import java.io.IOException;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.serializer.SerializationFactory;
import org.apache.hadoop.security.Credentials;
import org.apache.tez.dag.api.TaskLocationHint;
import org.apache.tez.mapreduce.protos.MRRuntimeProtos.MRSplitsProto;
import org.apache.tez.common.Preconditions;
/**
* Represents InputSplitInfo for splits generated to memory. </p>
*
* Since splits are generated in memory, the getSplitsMetaInfoFile and
* getSplitsFile are not supported.
*
*/
public class InputSplitInfoMem implements InputSplitInfo {
// private final MRSplitsProto splitsProto;
private final boolean isNewSplit;
private final int numTasks;
private final Credentials credentials;
private final Configuration conf;
private final List<TaskLocationHint> taskLocationHints;
private org.apache.hadoop.mapreduce.InputSplit[] newFormatSplits;
private org.apache.hadoop.mapred.InputSplit[] oldFormatSplits;
// TaskLocationHints accepted as a parameter since InputSplits don't have rack
// info, and it can't always be derived.
public InputSplitInfoMem(org.apache.hadoop.mapreduce.InputSplit[] newSplits,
List<TaskLocationHint> taskLocationHints, int numTasks, Credentials credentials,
Configuration conf) {
this.isNewSplit = true;
this.newFormatSplits = newSplits;
this.taskLocationHints = taskLocationHints;
this.numTasks = numTasks;
this.credentials = credentials;
this.conf = conf;
}
// TaskLocationHints accepted as a parameter since InputSplits don't have rack
// info, and it can't always be derived.
public InputSplitInfoMem(org.apache.hadoop.mapred.InputSplit[] oldSplits,
List<TaskLocationHint> taskLocationHints, int numTasks, Credentials credentials,
Configuration conf) {
this.isNewSplit = false;
this.oldFormatSplits = oldSplits;
this.taskLocationHints = taskLocationHints;
this.numTasks = numTasks;
this.credentials = credentials;
this.conf = conf;
}
@Override
public List<TaskLocationHint> getTaskLocationHints() {
return this.taskLocationHints;
}
@Override
public Path getSplitsMetaInfoFile() {
throw new UnsupportedOperationException("Not supported for Type: "
+ getType());
}
@Override
public Path getSplitsFile() {
throw new UnsupportedOperationException("Not supported for Type: "
+ getType());
}
@Override
public int getNumTasks() {
return this.numTasks;
}
@Override
public Type getType() {
return Type.MEM;
}
@Override
public MRSplitsProto getSplitsProto() {
if (isNewSplit) {
try {
return createSplitsProto(newFormatSplits, new SerializationFactory(conf));
} catch (IOException e) {
throw new RuntimeException(e);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
} else {
try {
return createSplitsProto(oldFormatSplits);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
@Override
public Credentials getCredentials() {
return this.credentials;
}
@Override
public boolean holdsNewFormatSplits() {
return this.isNewSplit;
}
@Override
public org.apache.hadoop.mapreduce.InputSplit[] getNewFormatSplits() {
Preconditions
.checkState(
isNewSplit == true,
"Cannot fetch newSplits for an instance handling oldFormatSplits. Use holdsNewFormatSplits() to check type");
return newFormatSplits;
}
@Override
public org.apache.hadoop.mapred.InputSplit[] getOldFormatSplits() {
Preconditions
.checkState(
isNewSplit == false,
"Cannot fetch newSplits for an instance handling newFormatSplits. Use holdsNewFormatSplits() to check type");
return oldFormatSplits;
}
private static MRSplitsProto createSplitsProto(
org.apache.hadoop.mapreduce.InputSplit[] newSplits,
SerializationFactory serializationFactory) throws IOException,
InterruptedException {
MRSplitsProto.Builder splitsBuilder = MRSplitsProto.newBuilder();
for (org.apache.hadoop.mapreduce.InputSplit newSplit : newSplits) {
splitsBuilder.addSplits(MRInputHelpers.createSplitProto(newSplit, serializationFactory));
}
return splitsBuilder.build();
}
private static MRSplitsProto createSplitsProto(
org.apache.hadoop.mapred.InputSplit[] oldSplits) throws IOException {
MRSplitsProto.Builder splitsBuilder = MRSplitsProto.newBuilder();
for (org.apache.hadoop.mapred.InputSplit oldSplit : oldSplits) {
splitsBuilder.addSplits(MRInputHelpers.createSplitProto(oldSplit));
}
return splitsBuilder.build();
}
}
| 1,954 |
361 |
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------#
# Copyright © 2015-2016 VMware, Inc. All Rights Reserved. #
# #
# Licensed under the BSD 2-Clause License (the “License”); you may not use #
# this file except in compliance with the License. #
# #
# The BSD 2-Clause License #
# #
# Redistribution and use in source and binary forms, with or without #
# modification, are permitted provided that the following conditions are met:#
# #
# - Redistributions of source code must retain the above copyright notice, #
# this list of conditions and the following disclaimer. #
# #
# - Redistributions in binary form must reproduce the above copyright #
# notice, this list of conditions and the following disclaimer in the #
# documentation and/or other materials provided with the distribution. #
# #
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"#
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE #
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE #
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE #
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR #
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF #
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS #
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN #
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) #
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF #
# THE POSSIBILITY OF SUCH DAMAGE. #
# ----------------------------------------------------------------------------#
from liota.core.package_manager import LiotaPackage
from liota.lib.utilities.utility import read_user_config
dependencies = ["edge_systems/dell5k/edge_system"]
class PackageClass(LiotaPackage):
"""
This package creates a AWSIoT DCC object and registers edge system on
AWSIoT to acquire "registered edge system", i.e. aws_iot_edge_system.
"""
def run(self, registry):
"""
The execution function of a liota package.
Establishes connection with AWSIoT DCC using MqttDccComms
:param registry: the instance of ResourceRegistryPerPackage of the package
:return:
"""
import copy
from liota.dccs.aws_iot import AWSIoT
from liota.dcc_comms.mqtt_dcc_comms import MqttDccComms
from liota.lib.transports.mqtt import QoSDetails
from liota.lib.utilities.identity import Identity
from liota.lib.utilities.tls_conf import TLSConf
# Acquire resources from registry
# Creating a copy of edge_system object to keep original object "clean"
edge_system = copy.copy(registry.get("edge_system"))
# Get values from configuration file
config_path = registry.get("package_conf")
config = read_user_config(config_path + '/sampleProp.conf')
# Encapsulates Identity
identity = Identity(root_ca_cert=config['broker_root_ca_cert'], username=None, password=<PASSWORD>,
cert_file=config['edge_system_cert_file'], key_file=config['edge_system_key_file'])
# Encapsulate TLS parameters
tls_conf = TLSConf(config['cert_required'], config['tls_version'], config['cipher'])
# Encapsulate QoS related parameters
qos_details = QoSDetails(config['in_flight'], config['queue_size'], config['retry'])
# Connecting to AWSIoT
# Publish topic for all Metrics will be 'liota/generated_local_uuid_of_edge_system/request'
# Create and pass custom MqttMessagingAttributes object to MqttDccComms to have custom topic
self.aws_iot = AWSIoT(MqttDccComms(edge_system_name=edge_system.name,
url=config['BrokerIP'], port=config['BrokerPort'], identity=identity,
tls_conf=tls_conf,
qos_details=qos_details,
clean_session=True,
protocol=config['protocol'], transport=['transport'],
conn_disconn_timeout=config['ConnectDisconnectTimeout']),
enclose_metadata=True)
# Register edge system (gateway)
aws_iot_edge_system = self.aws_iot.register(edge_system)
registry.register("aws_iot", self.aws_iot)
registry.register("aws_iot_edge_system", aws_iot_edge_system)
def clean_up(self):
"""
The clean up function of a liota package.
Disconnects from AWSIoT DCC.
:return:
"""
self.aws_iot.comms.client.disconnect()
| 2,575 |
2,472 |
// Copyright (c) 2011-present, Facebook, Inc. All rights reserved.
// This source code is licensed under both the GPLv2 (found in the
// COPYING file in the root directory) and Apache 2.0 License
// (found in the LICENSE.Apache file in the root directory).
#include <jni.h>
#include <string>
#include "rocksdb/comparator.h"
#include "rocksdb/slice.h"
#include "include/org_rocksdb_NativeComparatorWrapperTest_NativeStringComparatorWrapper.h"
namespace rocksdb {
class NativeComparatorWrapperTestStringComparator
: public Comparator {
const char* Name() const {
return "NativeComparatorWrapperTestStringComparator";
}
int Compare(
const Slice& a, const Slice& b) const {
return a.ToString().compare(b.ToString());
}
void FindShortestSeparator(
std::string* start, const Slice& limit) const {
return;
}
void FindShortSuccessor(
std::string* key) const {
return;
}
};
} // end of rocksdb namespace
/*
* Class: org_rocksdb_NativeComparatorWrapperTest_NativeStringComparatorWrapper
* Method: newStringComparator
* Signature: ()J
*/
jlong Java_org_rocksdb_NativeComparatorWrapperTest_00024NativeStringComparatorWrapper_newStringComparator(
JNIEnv* env , jobject jobj) {
auto* comparator =
new rocksdb::NativeComparatorWrapperTestStringComparator();
return reinterpret_cast<jlong>(comparator);
}
| 469 |
605 |
package com.sohu.tv.mq.cloud.service;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.sohu.tv.mq.cloud.bo.TopicTraffic;
import com.sohu.tv.mq.cloud.dao.DelayMessageDao;
import com.sohu.tv.mq.cloud.util.Result;
/**
* 延迟消息服务
*
* @Description:
* @author zhehongyuan
* @date 2019年4月23日
*/
@Service
public class DelayMessageService {
private Logger logger = LoggerFactory.getLogger(this.getClass());
@Autowired
private DelayMessageDao delayMessageDao;
/**
* 延迟消息的流量使用客户端上报的数据
*
* @param tid
* @param createDate
*/
public Result<List<TopicTraffic>> selectDelayMessageTraffic(long tid, int createDate) {
List<TopicTraffic> topicTrafficList = null;
try {
topicTrafficList = delayMessageDao.selectTopicTraffic(tid, createDate);
} catch (Exception e) {
logger.error("selectDelayMessageTraffic err, tid:{}, createDate:{}", tid, createDate, e);
return Result.getDBErrorResult(e);
}
return Result.getResult(topicTrafficList);
}
/**
* 延迟消息的流量使用客户端上报的数据
*
* @param tid
* @param date
* @param time
* @return
*/
public Result<TopicTraffic> query(long tid, int date, String time) {
TopicTraffic topicTraffic = null;
try {
topicTraffic = delayMessageDao.selectByIdListDateTime(tid, date, time);
} catch (Exception e) {
logger.error("query traffic err, tid:{},date:{},time:{}", tid, date, time, e);
return Result.getDBErrorResult(e);
}
return Result.getResult(topicTraffic);
}
/**
* idList中的id需要全部属于延迟消息的topic,外部做过滤
*
* @param idList
* @param date
* @param time
* @return
*/
public Result<List<TopicTraffic>> query(List<Long> idList, int date, String time) {
if (idList == null || idList.isEmpty()) {
return Result.getResult(null);
}
List<TopicTraffic> list = new ArrayList<TopicTraffic>(idList.size());
for (Long tid : idList) {
Result<TopicTraffic> topicTraffic = query(tid, date, time);
if (topicTraffic.isOK()) {
list.add(topicTraffic.getResult());
}
}
return Result.getResult(list);
}
/**
* 查询延迟消息日流量
*
* @param tid
* @param date
* @return
*/
public Result<TopicTraffic> query(long tid, int date) {
TopicTraffic topicTraffic = null;
try {
topicTraffic = delayMessageDao.selectTotalTraffic(tid, date);
} catch (Exception e) {
logger.error("query traffic err, tid:{},date:{}", tid, date, e);
return Result.getDBErrorResult(e);
}
return Result.getResult(topicTraffic);
}
}
| 1,554 |
1,738 |
<reponame>jeikabu/lumberyard
/*
* All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
* its licensors.
*
* For complete copyright and license terms please see the LICENSE at the root of this
* distribution (the "License"). All use of this software is governed by the License,
* or, if provided, by the license below or the license accompanying this file. Do not
* remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
*/
// Original file Copyright Crytek GMBH or its affiliates, used under license.
#ifndef CRYINCLUDE_CRYSYSTEM_VALIDATOR_H
#define CRYINCLUDE_CRYSYSTEM_VALIDATOR_H
#pragma once
//////////////////////////////////////////////////////////////////////////
// Default validator implementation.
//////////////////////////////////////////////////////////////////////////
struct SDefaultValidator
: public IValidator
{
CSystem* m_pSystem;
SDefaultValidator(CSystem* system)
: m_pSystem(system) {};
virtual void Report(SValidatorRecord& record)
{
if (record.text)
{
static bool bNoMsgBoxOnWarnings = false;
if ((record.text[0] == '!') || (m_pSystem->m_sysWarnings && m_pSystem->m_sysWarnings->GetIVal() != 0))
{
if (g_cvars.sys_no_crash_dialog)
{
return;
}
if (bNoMsgBoxOnWarnings)
{
return;
}
#ifdef WIN32
ICVar* pFullscreen = (gEnv && gEnv->pConsole) ? gEnv->pConsole->GetCVar("r_Fullscreen") : 0;
if (pFullscreen && pFullscreen->GetIVal() != 0 && gEnv->pRenderer && gEnv->pRenderer->GetHWND())
{
::ShowWindow((HWND)gEnv->pRenderer->GetHWND(), SW_MINIMIZE);
}
string strMessage = record.text;
strMessage += "\n---------------------------------------------\nAbort - terminate application\nRetry - continue running the application\nIgnore - don't show this message box any more";
switch (::MessageBox(NULL, strMessage.c_str(), "CryEngine Warning", MB_ABORTRETRYIGNORE | MB_DEFBUTTON2 | MB_ICONWARNING | MB_SYSTEMMODAL))
{
case IDABORT:
m_pSystem->GetIConsole()->Exit ("User abort requested during showing the warning box with the following message: %s", record.text);
break;
case IDRETRY:
break;
case IDIGNORE:
bNoMsgBoxOnWarnings = true;
m_pSystem->m_sysWarnings->Set(0);
break;
}
#endif
}
}
}
};
#endif // CRYINCLUDE_CRYSYSTEM_VALIDATOR_H
| 1,286 |
545 |
/*
* PSP Software Development Kit - https://github.com/pspdev
* -----------------------------------------------------------------------
* Licensed under the BSD license, see LICENSE in PSPSDK root for details.
*
* pspvfpu.h - Prototypes for the VFPU library
*
* Copyright (c) 2005 <NAME> <<EMAIL>>
*
*/
#include <malloc.h>
#include <string.h>
#include "pspthreadman.h"
#include "pspvfpu.h"
#define NMAT 8
struct pspvfpu_context {
float fpregs[4*4*NMAT] __attribute__((aligned(VFPU_ALIGNMENT)));
/*
States a matrix can be in:
owned valid
0 X context has no knowledge of the matrix
1 1 context is using matrix, and wants it preserved
1 0 context is using matrix temporarily
*/
vfpumatrixset_t valid; /* which matrices are valid in this context */
vfpumatrixset_t owned; /* which matrices are in the VFPU at the moment */
};
/* XXX This should be per-thread info */
static struct pspvfpu_context *users[NMAT];
static void save_matrix(struct pspvfpu_context *c, int mat)
{
#define SV(N) \
asm("sv.q c"#N"00, 0 + %0, wt\n" \
"sv.q c"#N"10, 16 + %0, wt\n" \
"sv.q c"#N"20, 32 + %0, wt\n" \
"sv.q c"#N"30, 48 + %0, wt\n" \
: "=m" (c->fpregs[N * 4*4]) \
: : "memory")
switch(mat) {
case 0: SV(0); break;
case 1: SV(1); break;
case 2: SV(2); break;
case 3: SV(3); break;
case 4: SV(4); break;
case 5: SV(5); break;
case 6: SV(6); break;
case 7: SV(7); break;
}
#undef SV
}
static void load_matrix(const struct pspvfpu_context *c, int mat)
{
#define LV(N) \
asm("lv.q c"#N"00, 0 + %0\n" \
"lv.q c"#N"10, 16 + %0\n" \
"lv.q c"#N"20, 32 + %0\n" \
"lv.q c"#N"30, 48 + %0\n" \
: : "m" (c->fpregs[N * 4*4]) \
: "memory")
switch(mat) {
case 0: LV(0); break;
case 1: LV(1); break;
case 2: LV(2); break;
case 3: LV(3); break;
case 4: LV(4); break;
case 5: LV(5); break;
case 6: LV(6); break;
case 7: LV(7); break;
}
#undef LV
}
/*
Switch the VFPU's register state for the current context. This means:
1. save any other context's matrices in the set (keepset | tempset)
2. load the current context's valid keepset (keepset & c->valid)
3. mark the current context as owning (keepset | tempset), and having keepset valid
Note, a NULL context is a valid special case. It means that the
caller doesn't care about long-term matrix use, but does want to
claim a temporary matrix.
*/
void pspvfpu_use_matrices(struct pspvfpu_context *c, vfpumatrixset_t keepset, vfpumatrixset_t tempset)
{
vfpumatrixset_t saveset, loadset;
/* If a matrix is both in the keepset and tempset, drop it
from tempset */
tempset &= ~keepset;
if (c != NULL) {
/* If the context already has a matrix owned, we
don't need to handle it */
keepset &= ~c->owned;
tempset &= ~c->owned;
saveset = keepset | tempset; /* save everything we use */
loadset = keepset & c->valid; /* only reload valid matrices */
c->owned |= saveset; /* will be true by the time we're done */
c->valid |= keepset; /* likewise */
c->valid &= ~tempset; /* temporaries aren't valid */
} else {
saveset = keepset | tempset;
loadset = 0; /* no context, nothing to reload */
}
int m = 0;
while(saveset) {
/* skip to the next member of the matrix set
(ctz -> count trailing zeros; the number of zeros in the LSB) */
int skip = __builtin_ctz(saveset);
m += skip;
saveset >>= skip;
loadset >>= skip;
/* we need to save everyone else's use of saveset
matrices */
if (users[m] != NULL) {
struct pspvfpu_context *other = users[m];
if (other->valid & (1 << m))
save_matrix(other, m);
other->owned &= ~(1 << m);
other->valid &= ~(1 << m);
}
/* reload matrix values if necessary */
if (loadset & 1)
load_matrix(c, m);
/* we own all the matrices we're about to use */
users[m] = c;
saveset &= ~1; /* that one's done */
}
}
/*
Create a new context, and make sure the matrices in matrixset are
ready for use.
*/
struct pspvfpu_context *pspvfpu_initcontext(void)
{
struct pspvfpu_context *c;
/* Make sure the kernel preserves this thread's VFPU state */
if (sceKernelChangeCurrentThreadAttr(0, PSP_THREAD_ATTR_VFPU) < 0)
return NULL;
c = memalign(VFPU_ALIGNMENT, sizeof(*c));
if (c != NULL) {
c->owned = 0;
c->valid = 0;
}
return c;
}
void pspvfpu_deletecontext(struct pspvfpu_context *c)
{
for(int i = 0; i < NMAT; i++)
if (users[i] == c)
users[i] = NULL;
free(c);
}
| 1,936 |
1,831 |
<filename>logdevice/common/configuration/EpochMetaDataVersion.cpp
/**
* Copyright (c) 2017-present, Facebook, Inc. and its affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
#include "logdevice/common/configuration/EpochMetaDataVersion.h"
#include "logdevice/common/configuration/ServerConfig.h"
namespace facebook { namespace logdevice { namespace epoch_metadata_version {
type versionToWrite(const std::shared_ptr<ServerConfig>& server_cfg) {
ld_check(server_cfg);
auto version_in_config =
server_cfg->getMetaDataLogsConfig().metadata_version_to_write;
type res =
version_in_config.has_value() ? version_in_config.value() : CURRENT;
ld_check(validToWrite(res));
return res;
}
}}} // namespace facebook::logdevice::epoch_metadata_version
| 273 |
679 |
<gh_stars>100-1000
/**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
// MARKER(update_precomp.py): autogen include statement, do not remove
#include "precompiled_basegfx.hxx"
#include <osl/diagnose.h>
#include <basegfx/polygon/b3dpolypolygon.hxx>
#include <basegfx/polygon/b3dpolygon.hxx>
#include <rtl/instance.hxx>
#include <basegfx/matrix/b2dhommatrix.hxx>
#include <basegfx/matrix/b3dhommatrix.hxx>
#include <functional>
#include <vector>
#include <algorithm>
//////////////////////////////////////////////////////////////////////////////
class ImplB3DPolyPolygon
{
typedef ::std::vector< ::basegfx::B3DPolygon > PolygonVector;
PolygonVector maPolygons;
public:
ImplB3DPolyPolygon() : maPolygons()
{
}
ImplB3DPolyPolygon(const ::basegfx::B3DPolygon& rToBeCopied) :
maPolygons(1,rToBeCopied)
{
}
bool operator==(const ImplB3DPolyPolygon& rPolygonList) const
{
// same polygon count?
if(maPolygons.size() != rPolygonList.maPolygons.size())
return false;
// compare polygon content
if(maPolygons != rPolygonList.maPolygons)
return false;
return true;
}
const ::basegfx::B3DPolygon& getB3DPolygon(sal_uInt32 nIndex) const
{
return maPolygons[nIndex];
}
void setB3DPolygon(sal_uInt32 nIndex, const ::basegfx::B3DPolygon& rPolygon)
{
maPolygons[nIndex] = rPolygon;
}
void insert(sal_uInt32 nIndex, const ::basegfx::B3DPolygon& rPolygon, sal_uInt32 nCount)
{
if(nCount)
{
// add nCount copies of rPolygon
PolygonVector::iterator aIndex(maPolygons.begin());
aIndex += nIndex;
maPolygons.insert(aIndex, nCount, rPolygon);
}
}
void insert(sal_uInt32 nIndex, const ::basegfx::B3DPolyPolygon& rPolyPolygon)
{
const sal_uInt32 nCount = rPolyPolygon.count();
if(nCount)
{
// add nCount polygons from rPolyPolygon
maPolygons.reserve(maPolygons.size() + nCount);
PolygonVector::iterator aIndex(maPolygons.begin());
aIndex += nIndex;
for(sal_uInt32 a(0L); a < nCount; a++)
{
maPolygons.insert(aIndex, rPolyPolygon.getB3DPolygon(a));
aIndex++;
}
}
}
void remove(sal_uInt32 nIndex, sal_uInt32 nCount)
{
if(nCount)
{
// remove polygon data
PolygonVector::iterator aStart(maPolygons.begin());
aStart += nIndex;
const PolygonVector::iterator aEnd(aStart + nCount);
maPolygons.erase(aStart, aEnd);
}
}
sal_uInt32 count() const
{
return maPolygons.size();
}
void setClosed(bool bNew)
{
for(sal_uInt32 a(0L); a < maPolygons.size(); a++)
{
maPolygons[a].setClosed(bNew);
}
}
void flip()
{
std::for_each( maPolygons.begin(),
maPolygons.end(),
std::mem_fun_ref( &::basegfx::B3DPolygon::flip ));
}
void removeDoublePoints()
{
std::for_each( maPolygons.begin(),
maPolygons.end(),
std::mem_fun_ref( &::basegfx::B3DPolygon::removeDoublePoints ));
}
void transform(const ::basegfx::B3DHomMatrix& rMatrix)
{
for(sal_uInt32 a(0L); a < maPolygons.size(); a++)
{
maPolygons[a].transform(rMatrix);
}
}
void clearBColors()
{
for(sal_uInt32 a(0L); a < maPolygons.size(); a++)
{
maPolygons[a].clearBColors();
}
}
void transformNormals(const ::basegfx::B3DHomMatrix& rMatrix)
{
for(sal_uInt32 a(0L); a < maPolygons.size(); a++)
{
maPolygons[a].transformNormals(rMatrix);
}
}
void clearNormals()
{
for(sal_uInt32 a(0L); a < maPolygons.size(); a++)
{
maPolygons[a].clearNormals();
}
}
void transformTextureCoordiantes(const ::basegfx::B2DHomMatrix& rMatrix)
{
for(sal_uInt32 a(0L); a < maPolygons.size(); a++)
{
maPolygons[a].transformTextureCoordiantes(rMatrix);
}
}
void clearTextureCoordinates()
{
for(sal_uInt32 a(0L); a < maPolygons.size(); a++)
{
maPolygons[a].clearTextureCoordinates();
}
}
void makeUnique()
{
std::for_each( maPolygons.begin(),
maPolygons.end(),
std::mem_fun_ref( &::basegfx::B3DPolygon::makeUnique ));
}
};
//////////////////////////////////////////////////////////////////////////////
namespace basegfx
{
namespace { struct DefaultPolyPolygon : public rtl::Static<B3DPolyPolygon::ImplType,
DefaultPolyPolygon> {}; }
B3DPolyPolygon::B3DPolyPolygon() :
mpPolyPolygon(DefaultPolyPolygon::get())
{
}
B3DPolyPolygon::B3DPolyPolygon(const B3DPolyPolygon& rPolyPolygon) :
mpPolyPolygon(rPolyPolygon.mpPolyPolygon)
{
}
B3DPolyPolygon::B3DPolyPolygon(const B3DPolygon& rPolygon) :
mpPolyPolygon( ImplB3DPolyPolygon(rPolygon) )
{
}
B3DPolyPolygon::~B3DPolyPolygon()
{
}
B3DPolyPolygon& B3DPolyPolygon::operator=(const B3DPolyPolygon& rPolyPolygon)
{
mpPolyPolygon = rPolyPolygon.mpPolyPolygon;
return *this;
}
void B3DPolyPolygon::makeUnique()
{
mpPolyPolygon.make_unique();
mpPolyPolygon->makeUnique();
}
bool B3DPolyPolygon::operator==(const B3DPolyPolygon& rPolyPolygon) const
{
if(mpPolyPolygon.same_object(rPolyPolygon.mpPolyPolygon))
return true;
return ((*mpPolyPolygon) == (*rPolyPolygon.mpPolyPolygon));
}
bool B3DPolyPolygon::operator!=(const B3DPolyPolygon& rPolyPolygon) const
{
return !(*this == rPolyPolygon);
}
sal_uInt32 B3DPolyPolygon::count() const
{
return mpPolyPolygon->count();
}
B3DPolygon B3DPolyPolygon::getB3DPolygon(sal_uInt32 nIndex) const
{
OSL_ENSURE(nIndex < mpPolyPolygon->count(), "B3DPolyPolygon access outside range (!)");
return mpPolyPolygon->getB3DPolygon(nIndex);
}
void B3DPolyPolygon::setB3DPolygon(sal_uInt32 nIndex, const B3DPolygon& rPolygon)
{
OSL_ENSURE(nIndex < mpPolyPolygon->count(), "B3DPolyPolygon access outside range (!)");
if(getB3DPolygon(nIndex) != rPolygon)
mpPolyPolygon->setB3DPolygon(nIndex, rPolygon);
}
bool B3DPolyPolygon::areBColorsUsed() const
{
for(sal_uInt32 a(0L); a < mpPolyPolygon->count(); a++)
{
if((mpPolyPolygon->getB3DPolygon(a)).areBColorsUsed())
{
return true;
}
}
return false;
}
void B3DPolyPolygon::clearBColors()
{
if(areBColorsUsed())
mpPolyPolygon->clearBColors();
}
void B3DPolyPolygon::transformNormals(const B3DHomMatrix& rMatrix)
{
if(!rMatrix.isIdentity())
mpPolyPolygon->transformNormals(rMatrix);
}
bool B3DPolyPolygon::areNormalsUsed() const
{
for(sal_uInt32 a(0L); a < mpPolyPolygon->count(); a++)
{
if((mpPolyPolygon->getB3DPolygon(a)).areNormalsUsed())
{
return true;
}
}
return false;
}
void B3DPolyPolygon::clearNormals()
{
if(areNormalsUsed())
mpPolyPolygon->clearNormals();
}
void B3DPolyPolygon::transformTextureCoordiantes(const B2DHomMatrix& rMatrix)
{
if(!rMatrix.isIdentity())
mpPolyPolygon->transformTextureCoordiantes(rMatrix);
}
bool B3DPolyPolygon::areTextureCoordinatesUsed() const
{
for(sal_uInt32 a(0L); a < mpPolyPolygon->count(); a++)
{
if((mpPolyPolygon->getB3DPolygon(a)).areTextureCoordinatesUsed())
{
return true;
}
}
return false;
}
void B3DPolyPolygon::clearTextureCoordinates()
{
if(areTextureCoordinatesUsed())
mpPolyPolygon->clearTextureCoordinates();
}
void B3DPolyPolygon::insert(sal_uInt32 nIndex, const B3DPolygon& rPolygon, sal_uInt32 nCount)
{
OSL_ENSURE(nIndex <= mpPolyPolygon->count(), "B3DPolyPolygon Insert outside range (!)");
if(nCount)
mpPolyPolygon->insert(nIndex, rPolygon, nCount);
}
void B3DPolyPolygon::append(const B3DPolygon& rPolygon, sal_uInt32 nCount)
{
if(nCount)
mpPolyPolygon->insert(mpPolyPolygon->count(), rPolygon, nCount);
}
void B3DPolyPolygon::insert(sal_uInt32 nIndex, const B3DPolyPolygon& rPolyPolygon)
{
OSL_ENSURE(nIndex <= mpPolyPolygon->count(), "B3DPolyPolygon Insert outside range (!)");
if(rPolyPolygon.count())
mpPolyPolygon->insert(nIndex, rPolyPolygon);
}
void B3DPolyPolygon::append(const B3DPolyPolygon& rPolyPolygon)
{
if(rPolyPolygon.count())
mpPolyPolygon->insert(mpPolyPolygon->count(), rPolyPolygon);
}
void B3DPolyPolygon::remove(sal_uInt32 nIndex, sal_uInt32 nCount)
{
OSL_ENSURE(nIndex + nCount <= mpPolyPolygon->count(), "B3DPolyPolygon Remove outside range (!)");
if(nCount)
mpPolyPolygon->remove(nIndex, nCount);
}
void B3DPolyPolygon::clear()
{
mpPolyPolygon = DefaultPolyPolygon::get();
}
bool B3DPolyPolygon::isClosed() const
{
bool bRetval(true);
// PolyPOlygon is closed when all contained Polygons are closed or
// no Polygon exists.
for(sal_uInt32 a(0L); bRetval && a < mpPolyPolygon->count(); a++)
{
if(!(mpPolyPolygon->getB3DPolygon(a)).isClosed())
{
bRetval = false;
}
}
return bRetval;
}
void B3DPolyPolygon::setClosed(bool bNew)
{
if(bNew != isClosed())
mpPolyPolygon->setClosed(bNew);
}
void B3DPolyPolygon::flip()
{
mpPolyPolygon->flip();
}
bool B3DPolyPolygon::hasDoublePoints() const
{
bool bRetval(false);
for(sal_uInt32 a(0L); !bRetval && a < mpPolyPolygon->count(); a++)
{
if((mpPolyPolygon->getB3DPolygon(a)).hasDoublePoints())
{
bRetval = true;
}
}
return bRetval;
}
void B3DPolyPolygon::removeDoublePoints()
{
if(hasDoublePoints())
mpPolyPolygon->removeDoublePoints();
}
void B3DPolyPolygon::transform(const B3DHomMatrix& rMatrix)
{
if(mpPolyPolygon->count() && !rMatrix.isIdentity())
{
mpPolyPolygon->transform(rMatrix);
}
}
} // end of namespace basegfx
// eof
| 4,410 |
2,151 |
<reponame>zipated/src
#!/usr/bin/env python
# Copyright 2016 The Shaderc Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Updates build-version.inc in the current directory, unless the update is
# identical to the existing content.
#
# Args: <shaderc_dir> <spirv-tools_dir> <glslang_dir>
#
# For each directory, there will be a line in build-version.inc containing that
# directory's "git describe" output enclosed in double quotes and appropriately
# escaped.
from __future__ import print_function
import datetime
import os.path
import subprocess
import sys
OUTFILE = 'build-version.inc'
def command_output(cmd, dir):
"""Runs a command in a directory and returns its standard output stream.
Captures the standard error stream.
Raises a RuntimeError if the command fails to launch or otherwise fails.
"""
p = subprocess.Popen(cmd,
cwd=dir,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, _) = p.communicate()
if p.returncode != 0:
raise RuntimeError('Failed to run %s in %s' % (cmd, dir))
return stdout
def describe(dir):
"""Returns a string describing the current Git HEAD version as descriptively
as possible.
Runs 'git describe', or alternately 'git rev-parse HEAD', in dir. If
successful, returns the output; otherwise returns 'unknown hash, <date>'."""
try:
# decode() is needed here for Python3 compatibility. In Python2,
# str and bytes are the same type, but not in Python3.
# Popen.communicate() returns a bytes instance, which needs to be
# decoded into text data first in Python3. And this decode() won't
# hurt Python2.
return command_output(['git', 'describe'], dir).rstrip().decode()
except:
try:
return command_output(
['git', 'rev-parse', 'HEAD'], dir).rstrip().decode()
except:
return 'unknown hash, ' + datetime.date.today().isoformat()
def main():
if len(sys.argv) != 4:
print(
'usage: {0} <shaderc_dir> <spirv-tools_dir> <glslang_dir>'.format(sys.argv[0]))
sys.exit(1)
projects = ['shaderc', 'spirv-tools', 'glslang']
tags = [describe(p).replace('"', '\\"')
for p in sys.argv[1:]]
new_content = ''.join([
'"{} {}\\n"\n'.format(p, t)
for (p, t) in zip(projects, tags)])
if os.path.isfile(OUTFILE) and new_content == open(OUTFILE, 'r').read():
sys.exit(0)
open(OUTFILE, 'w').write(new_content)
if __name__ == '__main__':
main()
| 1,182 |
1,529 |
<gh_stars>1000+
/*
* Copyright (c) 2016 咖枯 <<EMAIL> | <EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.kaku.colorfulnews.mvp.presenter.impl;
import com.kaku.colorfulnews.mvp.entity.NewsSummary;
import com.kaku.colorfulnews.common.LoadNewsType;
import com.kaku.colorfulnews.mvp.interactor.NewsListInteractor;
import com.kaku.colorfulnews.mvp.interactor.impl.NewsListInteractorImpl;
import com.kaku.colorfulnews.listener.RequestCallBack;
import com.kaku.colorfulnews.mvp.presenter.NewsListPresenter;
import com.kaku.colorfulnews.mvp.presenter.base.BasePresenterImpl;
import com.kaku.colorfulnews.mvp.view.NewsListView;
import java.util.List;
import javax.inject.Inject;
/**
* @author 咖枯
* @version 1.0 2016/5/19
*/
public class NewsListPresenterImpl extends BasePresenterImpl<NewsListView, List<NewsSummary>>
implements NewsListPresenter, RequestCallBack<List<NewsSummary>> {
private NewsListInteractor<List<NewsSummary>> mNewsListInteractor;
private String mNewsType;
private String mNewsId;
private int mStartPage;
private boolean misFirstLoad;
private boolean mIsRefresh = true;
@Inject
public NewsListPresenterImpl(NewsListInteractorImpl newsListInteractor) {
mNewsListInteractor = newsListInteractor;
}
@Override
public void onCreate() {
if (mView != null) {
loadNewsData();
}
}
@Override
public void beforeRequest() {
if (!misFirstLoad) {
mView.showProgress();
}
}
@Override
public void onError(String errorMsg) {
super.onError(errorMsg);
if (mView != null) {
int loadType = mIsRefresh ? LoadNewsType.TYPE_REFRESH_ERROR : LoadNewsType.TYPE_LOAD_MORE_ERROR;
mView.setNewsList(null, loadType);
}
}
@Override
public void success(List<NewsSummary> items) {
misFirstLoad = true;
if (items != null) {
mStartPage += 20;
}
int loadType = mIsRefresh ? LoadNewsType.TYPE_REFRESH_SUCCESS : LoadNewsType.TYPE_LOAD_MORE_SUCCESS;
if (mView != null) {
mView.setNewsList(items, loadType);
mView.hideProgress();
}
}
@Override
public void setNewsTypeAndId(String newsType, String newsId) {
mNewsType = newsType;
mNewsId = newsId;
}
@Override
public void refreshData() {
mStartPage = 0;
mIsRefresh = true;
loadNewsData();
}
@Override
public void loadMore() {
mIsRefresh = false;
loadNewsData();
}
private void loadNewsData() {
mSubscription = mNewsListInteractor.loadNews(this, mNewsType, mNewsId, mStartPage);
}
}
| 1,268 |
528 |
from opytimizer.optimizers.evolutionary import FOA
# One should declare a hyperparameters object based
# on the desired algorithm that will be used
params = {
'life_time': 6,
'area_limit': 30,
'LSC': 1,
'GSC': 1,
'transfer_rate': 0.1
}
# Creates a FOA optimizer
o = FOA(params=params)
| 118 |
2,313 |
<gh_stars>1000+
/*
* Copyright (c) 2017-2018,2021 Arm Limited.
*
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#ifndef ARM_COMPUTE_TEST_MEASUREMENT
#define ARM_COMPUTE_TEST_MEASUREMENT
#include "../Utils.h"
#include "arm_compute/core/Error.h"
#include <list>
#include <ostream>
#include <string>
namespace arm_compute
{
namespace test
{
namespace framework
{
/** Generic measurement that stores values as either double or long long int. */
struct Measurement
{
/** Measurement value */
struct Value
{
/** Constructor
*
* @param[in] is_floating Will the value stored be floating point ?
*/
Value(bool is_floating)
: v{ 0 }, is_floating_point(is_floating)
{
}
/** Add the value stored to the stream as a string
*/
friend std::ostream &operator<<(std::ostream &os, const Value &value)
{
if(value.is_floating_point)
{
os << arithmetic_to_string(value.v.floating_point, 4);
}
else
{
os << arithmetic_to_string(value.v.integer);
}
return os;
}
/** Convert the value stored to string
*/
std::string to_string() const
{
std::stringstream ss;
ss << *this;
return ss.str();
}
/** Add with another value and return the sum
*
* @param[in] b Other value
*
* @return Sum of the stored value + b
*/
Value operator+(Value b) const
{
if(is_floating_point)
{
b.v.floating_point += v.floating_point;
}
else
{
b.v.integer += v.integer;
}
return b;
}
/** Subtract with another value and return the result
*
* @param[in] b Other value
*
* @return Result of the stored value - b
*/
Value operator-(Value b) const
{
if(is_floating_point)
{
b.v.floating_point -= v.floating_point;
}
else
{
b.v.integer -= v.integer;
}
return b;
}
/** Multiple with another value and return the result
*
* @param[in] b Other value
*
* @return Result of the stored value * b
*/
Value operator*(Value b) const
{
if(is_floating_point)
{
b.v.floating_point *= v.floating_point;
}
else
{
b.v.integer *= v.integer;
}
return b;
}
/** Return the stored value divided by an integer.
*
* @param[in] b Integer to divide the value by.
*
* @return Stored value / b
*/
Value operator/(int b) const
{
Value res(is_floating_point);
if(is_floating_point)
{
res.v.floating_point = v.floating_point / b;
}
else
{
res.v.integer = v.integer / b;
}
return res;
}
/** Subtract another value and return the updated stored value.
*
* @param[in] b Other value
*
* @return The updated stored value
*/
Value &operator-=(const Value &b)
{
if(is_floating_point)
{
v.floating_point -= b.v.floating_point;
}
else
{
v.integer -= b.v.integer;
}
return *this;
}
/** Compare the stored value with another value
*
* @param[in] b Value to compare against
*
* @return The result of stored value < b
*/
bool operator<(const Value &b) const
{
if(is_floating_point)
{
return v.floating_point < b.v.floating_point;
}
else
{
return v.integer < b.v.integer;
}
}
/** Get the relative standard deviation to a given distribution as a percentage.
*
* @param[in] variance The variance of the distribution.
* @param[in] mean The mean of the distribution.
*
* @return the relative standard deviation.
*/
static double relative_standard_deviation(const Value &variance, const Value &mean)
{
if(variance.is_floating_point)
{
return 100.0 * sqrt(variance.v.floating_point) / mean.v.floating_point;
}
else
{
return 100.0 * sqrt(static_cast<double>(variance.v.integer)) / mean.v.integer;
}
}
/** Stored value */
union
{
double floating_point;
long long int integer;
} v;
bool is_floating_point; /**< Is the stored value floating point or integer ? */
};
/** Compare the stored value with another value
*
* @param[in] b Value to compare against
*
* @return The result of stored value < b
*/
bool operator<(const Measurement &b) const
{
return _value < b.value();
}
/** Stream output operator to print the measurement.
*
* Prints value and unit.
*
* @param[out] os Output stream.
* @param[in] measurement Measurement.
*
* @return the modified output stream.
*/
friend inline std::ostream &operator<<(std::ostream &os, const Measurement &measurement)
{
os << measurement._value << " " << measurement._unit;
return os;
}
/** Constructor to store a floating point value
*
* @param[in] v Value to store
* @param[in] unit Unit of @p v
* @param[in] raw (Optional) The raw value(s) @p was generated from.
*/
template < typename Floating, typename std::enable_if < !std::is_integral<Floating>::value, int >::type = 0 >
Measurement(Floating v, std::string unit, std::list<std::string> raw = {})
: _unit(unit), _raw_data(std::move(raw)), _value(true)
{
_value.v.floating_point = static_cast<double>(v);
if(_raw_data.empty())
{
_raw_data = { _value.to_string() };
}
}
/** Constructor to store an integer value
*
* @param[in] v Value to store
* @param[in] unit Unit of @p v
* @param[in] raw (Optional) The raw value(s) @p was generated from.
*/
template <typename Integer, typename std::enable_if<std::is_integral<Integer>::value, int>::type = 0>
Measurement(Integer v, std::string unit, std::list<std::string> raw = {})
: _unit(unit), _raw_data(std::move(raw)), _value(false)
{
_value.v.integer = static_cast<long long int>(v);
if(_raw_data.empty())
{
_raw_data = { _value.to_string() };
}
}
/** Accessor for the unit of the measurement
*
* @return Unit of the measurement
*/
const std::string &unit() const
{
return _unit;
}
/** Accessor for the raw data
*
* @return The raw data
*/
const std::list<std::string> &raw_data() const
{
return _raw_data;
}
/** Accessor for the stored value
*
* @return The stored value
*/
const Value &value() const
{
return _value;
}
private:
std::string _unit;
std::list<std::string> _raw_data;
Value _value;
};
} // namespace framework
} // namespace test
} // namespace arm_compute
#endif /* ARM_COMPUTE_TEST_MEASUREMENT */
| 4,228 |
32,544 |
package com.baeldung.lagom.helloworld.weather.api;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Random;
public enum WeatherStats {
STATS_RAINY("Going to Rain, Take Umbrella"), STATS_HUMID("Going to be very humid, Take Water");
private final String message;
private static final List<WeatherStats> VALUES = Collections.unmodifiableList(Arrays.asList(values()));
private static final int SIZE = VALUES.size();
private static final Random RANDOM = new Random();
WeatherStats(String msg) {
this.message = msg;
}
public static WeatherStats forToday() {
return VALUES.get(RANDOM.nextInt(SIZE));
}
public String getMessage() {
return message;
}
}
| 264 |
330 |
<gh_stars>100-1000
# Copyright 2021, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Privunit definitions.
This is the code to be used to simulate the privunit algorithm.
The privunit algorithm was introduced by Bhowmick et al in "Protection Against
Reconstruction and Its Applications in Private Federated Learning" -
https://arxiv.org/pdf/1812.00984.pdf.
"""
import numpy as np
import scipy.special as sc
def find_best_gamma(d, eps):
"""This function finds the best gamma in an iterative fashion.
Gamma is essentially the parameter in the privunit algorithm that specifies
the distance from the equator (see figure 2 in the original paper linked
above). The best gamma here refers to the one that achieves maximum accuracy.
Gamma always adheres to (16a) or (16b) in the original paper (linked above).
Args:
d: Number of dimensions.
eps: The privacy parameter epsilon.
Returns:
gamma: The best gamma.
flag: Flag indicating how best gamma was calculated - True if 16a was used,
and False is 16b was used.
"""
flag = False
gamma_a = (np.exp(eps) - 1) / (np.exp(eps) + 1) * np.sqrt(np.pi / (2 * d - 2))
# Calculate an upper bound on gamma as the initialization step.
gamma_b = min(np.exp(eps) / (6 * np.sqrt(d)), 1)
while eps < 1 / 2 * np.log(
d * 36) - (d - 1) / 2 * np.log(1 - gamma_b**2) + np.log(gamma_b):
gamma_b = gamma_b / 1.01
if gamma_b > np.sqrt(2 / d):
gamma = max(gamma_b, gamma_a)
else:
gamma = gamma_a
if gamma == gamma_a:
flag = True
return gamma, flag
def get_privunit_densities(d, gamma, p):
"""Compute the constants that the conditional density is proportional to.
The conditional density of z (i.e., the output of the privunit) given x (i.e.,
the input to the privunit) is proportional to c1 if the inner product between
x and z is more than gamma and is proportional to c2 otherwise.
Args:
d: The number of dimensions.
gamma: The best gamma.
p : The probability with which an unit vector is sampled from the shaded
spherical cap associated with the input (see the original paper).
Returns:
c1: The factor that the conditional density of z given x is proportional to
if the inner product between x and z is more than gamma.
c2: The factor that the conditional density of z given x is proportional to
if the inner product between x and z is less than gamma.
"""
c1 = 2 * p / (sc.betainc((d - 1) / 2, 1 / 2, (1 - gamma**2)))
c2 = 2 * (1 - p) / (2 - sc.betainc((d - 1) / 2, 1 / 2, (1 - gamma**2)))
return c1, c2
def getm(d, gamma, p):
"""Get the parameter m (eq (15) in the paper) in the privunit mechanism."""
alpha = (d - 1) / 2
tau = (1 + gamma) / 2
if d > 1000:
# For large d, Stirling's formula is used to approximate eq (15).
m = (d - 2) / (d - 1) * (1 - gamma**2)**alpha / (
2 * np.sqrt(np.pi * (d - 3) / 2)) * (
p / (1 - sc.betainc(alpha, alpha, tau)) -
(1 - p) / sc.betainc(alpha, alpha, tau))
else:
# For small d, eq (15) is used directly
m = ((1 - gamma**2)**alpha) * (
(p / (sc.betainc(alpha, alpha, 1) - sc.betainc(alpha, alpha, tau))) -
((1 - p) / sc.betainc(alpha, alpha, tau))) / (
(2**(d - 2)) * (d - 1) * sc.beta(alpha, alpha))
return m
def get_optimized_budget(epsilon, d):
budget_space = np.linspace(0.01, 0.99, 99)
m = np.zeros(len(budget_space))
for step, budget in enumerate(budget_space):
gamma, _ = find_best_gamma(d, budget * epsilon)
p = np.exp((1 - budget) * epsilon) / (1 + np.exp((1 - budget) * epsilon))
m[step] = getm(d, gamma, p)
return budget_space[np.argmax(m)]
def apply_privunit(x, eps, budget):
"""This function applies the privunit mechanism.
The privunit mechanism produces an unbiased estimator of x that has
a small norm and is eps-differentially private. See algortihm 1 in the
original paper (linked above).
Args:
x: The 2-dimensional array to be privatized.
eps: The privacy factor epsilon.
budget: The default budget splitting between the gamma and p parameters.
Returns:
x_perturbed: The x privatized using privunit. This has the same dimensions
as x.
m: The scalar norm that x_perturbed should be divided with to get
an unbiased estimator.
"""
(d, n) = x.shape
gamma, _ = find_best_gamma(d, budget * eps)
p = np.exp((1 - budget) * eps) / (1 + np.exp((1 - budget) * eps))
m = getm(d, gamma, p)
x_perturbed = np.zeros((d, n))
for i in range(n):
u = x[:, i]
if np.random.uniform(0, 1) < p:
while True:
v = np.random.normal(0, 1, (1, d))
v /= np.linalg.norm(v)
if np.abs(np.inner(u, v)) >= gamma:
break
if np.inner(u, v) < 0:
v = -v
else:
while True:
v = np.random.normal(0, 1, (1, d))
v /= np.linalg.norm(v)
if np.inner(u, v) < gamma:
break
x_perturbed[:, i] = v / m
return x_perturbed, m
| 2,051 |
679 |
/**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
// MARKER(update_precomp.py): autogen include statement, do not remove
#include "precompiled_forms.hxx"
#include "attributedispatcher.hxx"
/** === begin UNO includes === **/
/** === end UNO includes === **/
#include <editeng/editview.hxx>
//........................................................................
namespace frm
{
//........................................................................
using namespace ::com::sun::star::uno;
using namespace ::com::sun::star::frame;
using namespace ::com::sun::star::lang;
using namespace ::com::sun::star::util;
using namespace ::com::sun::star::beans;
//====================================================================
//= OAttributeDispatcher
//====================================================================
//--------------------------------------------------------------------
OAttributeDispatcher::OAttributeDispatcher( EditView& _rView, AttributeId _nAttributeId, const URL& _rURL,
IMultiAttributeDispatcher* _pMasterDispatcher )
:ORichTextFeatureDispatcher( _rView, _rURL )
,m_pMasterDispatcher( _pMasterDispatcher )
,m_nAttributeId( _nAttributeId )
{
OSL_ENSURE( m_pMasterDispatcher, "OAttributeDispatcher::OAttributeDispatcher: invalid master dispatcher!" );
}
//--------------------------------------------------------------------
OAttributeDispatcher::~OAttributeDispatcher( )
{
acquire();
dispose();
}
//--------------------------------------------------------------------
void OAttributeDispatcher::disposing( ::osl::ClearableMutexGuard& _rClearBeforeNotify )
{
m_pMasterDispatcher = NULL;
ORichTextFeatureDispatcher::disposing( _rClearBeforeNotify );
}
//--------------------------------------------------------------------
void OAttributeDispatcher::fillFeatureEventFromAttributeState( FeatureStateEvent& _rEvent, const AttributeState& _rState ) const
{
if ( _rState.eSimpleState == eChecked )
_rEvent.State <<= (sal_Bool)sal_True;
else if ( _rState.eSimpleState == eUnchecked )
_rEvent.State <<= (sal_Bool)sal_False;
}
//--------------------------------------------------------------------
FeatureStateEvent OAttributeDispatcher::buildStatusEvent() const
{
FeatureStateEvent aEvent( ORichTextFeatureDispatcher::buildStatusEvent() );
aEvent.IsEnabled = getEditView() ? !getEditView()->IsReadOnly() : sal_False;
AttributeState aState;
if ( m_pMasterDispatcher )
aState = m_pMasterDispatcher->getState( m_nAttributeId );
fillFeatureEventFromAttributeState( aEvent, aState );
return aEvent;
}
//--------------------------------------------------------------------
void SAL_CALL OAttributeDispatcher::dispatch( const URL& _rURL, const Sequence< PropertyValue >& _rArguments ) throw (RuntimeException)
{
::osl::MutexGuard aGuard( m_aMutex );
checkDisposed();
(void)_rURL;
(void)_rArguments;
OSL_ENSURE( _rURL.Complete == getFeatureURL().Complete, "OAttributeDispatcher::dispatch: invalid URL!" );
#if OSL_DEBUG_LEVEL > 0
if ( _rArguments.getLength() )
{
::rtl::OString sMessage( "OAttributeDispatcher::dispatch: found arguments, but can't handle arguments at all" );
sMessage += "\n (URL: ";
sMessage += ::rtl::OString( _rURL.Complete.getStr(), _rURL.Complete.getLength(), RTL_TEXTENCODING_ASCII_US );
sMessage += ")";
DBG_ERROR( sMessage.getStr() );
}
#endif
if ( m_pMasterDispatcher )
m_pMasterDispatcher->executeAttribute( m_nAttributeId, NULL );
}
//--------------------------------------------------------------------
void OAttributeDispatcher::onAttributeStateChanged( AttributeId _nAttributeId, const AttributeState& /*_rState*/ )
{
OSL_ENSURE( _nAttributeId == m_nAttributeId, "OAttributeDispatcher::onAttributeStateChanged: wrong attribute!" );
(void)_nAttributeId;
FeatureStateEvent aEvent( buildStatusEvent() );
::cppu::OInterfaceIteratorHelper aIter( getStatusListeners() );
while ( aIter.hasMoreElements() )
doNotify( static_cast< XStatusListener* >( aIter.next() ), aEvent );
}
//........................................................................
} // namespace frm
//........................................................................
| 1,742 |
12,887 |
<gh_stars>1000+
package com.zhisheng.connectors.cassandra.streaming;
import com.datastax.driver.mapping.annotations.Column;
import com.datastax.driver.mapping.annotations.Table;
import lombok.AllArgsConstructor;
/**
* Desc:
* Created by zhisheng on 2019-08-04
* blog:http://www.54tianzhisheng.cn/
* 微信公众号:zhisheng
*/
@Table(keyspace = "test", name = "message")
@AllArgsConstructor
public class Message {
private static final long serialVersionUID = 1123119384361005680L;
@Column(name = "body")
private String message;
public Message() {
this(null);
}
public String getMessage() {
return message;
}
public void setMessage(String word) {
this.message = word;
}
public boolean equals(Object other) {
if (other instanceof Message) {
Message that = (Message) other;
return this.message.equals(that.message);
}
return false;
}
@Override
public int hashCode() {
return message.hashCode();
}
}
| 419 |
4,071 |
<filename>blaze/blaze/operator/op/reshape_op.h
/*
* \file reshape_op.h
* \brief The reshape operation
*/
#pragma once
#include "blaze/operator/operator.h"
#include "blaze/common/exception.h"
#include "blaze/common/types.h"
namespace blaze {
template <class Context>
class ReshapeOp : public Operator<Context> {
public:
USE_OPERATOR_FUNCTIONS(Context);
ReshapeOp(const OperatorDef& def, Workspace* workspace) :
Operator<Context>(def, workspace) { }
bool RunOnDevice() override {
Blob* x = this->Input(0);
Blob* shape_blob = this->Input(1);
Blob* y = this->Output(0);
std::vector<int32_t> rshape;
for (size_t i = 0; i < shape_blob->size(); ++i) {
rshape.push_back(shape_blob->as<int32_t>()[i]);
}
const std::vector<size_t>& shape = x->shape();
std::vector<size_t> new_shape(rshape.size());
int unknown_pos = -1;
size_t known_size = 1;
for (size_t i = 0; i < rshape.size(); ++i) {
if (rshape[i] == 0) {
new_shape[i] = shape[i]; known_size *= new_shape[i];
} else if (rshape[i] < 0) {
CHECK_EQ(unknown_pos, -1, "can not support two unknown dims");
unknown_pos = i;
} else {
new_shape[i] = rshape[i]; known_size *= new_shape[i];
}
}
if (unknown_pos >= 0) {
new_shape[unknown_pos] = x->size() / known_size;
} else {
CHECK_EQ(known_size, x->size(), "known_size=", known_size, " x->size()=", x->size());
}
y->RefReshape(new_shape, x->as<char>());
return true;
}
};
} // namespace blaze
| 684 |
531 |
/**
* Copyright (c) 2011-2021, JFXtras
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* Neither the name of the organization nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL JFXTRAS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package jfxtras.scene.control.agenda.icalendar.misc;
import java.time.LocalDate;
import java.time.temporal.Temporal;
import java.util.LinkedHashMap;
import java.util.Locale;
import java.util.Map;
import java.util.ResourceBundle;
import org.junit.Ignore;
import org.junit.Test;
import javafx.scene.Node;
import javafx.scene.Parent;
import javafx.scene.control.Label;
import javafx.util.Pair;
import jfxtras.internal.scene.control.skin.agenda.icalendar.base24hour.EditChoiceDialog;
import jfxtras.scene.control.agenda.icalendar.OSInfo;
import jfxtras.scene.control.agenda.icalendar.OSInfo.OS;
import jfxtras.scene.control.agenda.icalendar.editors.ChangeDialogOption;
import jfxtras.test.AssertNode;
import jfxtras.test.JFXtrasGuiTest;
import jfxtras.test.TestUtil;
@Ignore // fails with NPE
public class ComponentChangeDialogTest extends JFXtrasGuiTest
{
private ResourceBundle resources;
private static final Map<ChangeDialogOption, Pair<Temporal,Temporal>> EXAMPLE_MAP = makeExampleMap();
private static Map<ChangeDialogOption, Pair<Temporal,Temporal>> makeExampleMap()
{
Map<ChangeDialogOption, Pair<Temporal,Temporal>> exampleMap = new LinkedHashMap<>();
exampleMap.put(ChangeDialogOption.ALL, new Pair<Temporal, Temporal>(LocalDate.of(2016, 5, 25), null));
exampleMap.put(ChangeDialogOption.ONE, new Pair<Temporal, Temporal>(LocalDate.of(2016, 5, 25), LocalDate.of(2016, 5, 25)));
exampleMap.put(ChangeDialogOption.THIS_AND_FUTURE, new Pair<Temporal, Temporal>(LocalDate.of(2016, 6, 25), null));
return exampleMap;
}
@Override
public Parent getRootNode()
{
resources = ResourceBundle.getBundle("jfxtras.ICalendarAgenda", Locale.getDefault());
return new Label();
}
@Test
public void canDisplayDialog()
{
TestUtil.runThenWaitForPaintPulse( () ->
{
EditChoiceDialog dialog = new EditChoiceDialog(EXAMPLE_MAP, resources);
dialog.show();
});
Node n = find("#editChoiceDialog");
double w = (OSInfo.MY_OS == OS.UNIX) ? 345.0 : 288.0;
double h = (OSInfo.MY_OS == OS.UNIX) ? 166.0 : 158.0;
//AssertNode.generateSource("n", n, null, false, jfxtras.test.AssertNode.A.XYWH);
new AssertNode(n).assertXYWH(0.0, 0.0, w, h, 0.01);
clickOn("#changeDialogCancelButton");
}
}
| 1,378 |
922 |
// tag::include[]
package org.hibernate.validator.referenceguide.chapter12.purelycomposed;
//end::include[]
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import jakarta.validation.Constraint;
import jakarta.validation.OverridesAttribute;
import jakarta.validation.Payload;
import jakarta.validation.ReportAsSingleViolation;
import jakarta.validation.constraints.Min;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraintvalidation.SupportedValidationTarget;
import jakarta.validation.constraintvalidation.ValidationTarget;
import static java.lang.annotation.ElementType.ANNOTATION_TYPE;
import static java.lang.annotation.ElementType.CONSTRUCTOR;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.ElementType.PARAMETER;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
//tag::include[]
@Min(value = 0)
@NotNull
@Target({ METHOD, FIELD, ANNOTATION_TYPE, CONSTRUCTOR, PARAMETER })
@Retention(RUNTIME)
@Documented
@Constraint(validatedBy = {})
@SupportedValidationTarget(ValidationTarget.ANNOTATED_ELEMENT)
@ReportAsSingleViolation
public @interface ValidInvoiceAmount {
String message() default "{org.hibernate.validator.referenceguide.chapter11.purelycomposed."
+ "ValidInvoiceAmount.message}";
Class<?>[] groups() default {};
Class<? extends Payload>[] payload() default {};
@OverridesAttribute(constraint = Min.class, name = "value")
long value();
}
//end::include[]
| 504 |
3,294 |
<reponame>BaruaSourav/docs<filename>samples/snippets/cpp/VS_Snippets_Misc/NVC_MFC_StateCollection/cpp/resource.h
//{{NO_DEPENDENCIES}}
// Microsoft Developer Studio generated include file.
// Used by StateCollection.rc
//
#define IDP_OLE_INIT_FAILED 100
#define IDS_MAIN_TOOLBAR 101
#define IDR_MAINFRAME 128
#define IDR_STATECTYPE 129
#define IDR_CONTEXT_MENU 130
#define IDR_POPUP_TOOLBAR 131
#define IDB_WORKSPACE 147
#define IDC_COMPANY_URL 1041
#define IDD_ABOUTBOX 999
#define ID_VIEW_CUSTOMIZE 32770
#define ID_VIEW_TOOLBARS 32771
#define ID_VIEW_WORKSPACE 32803
#define ID_VIEW_OUTPUT 32804
#define ID_TOOLS_ENTRY 32805
#define ID_USER_TOOL1 32806
#define ID_USER_TOOL2 32807
#define ID_USER_TOOL3 32808
#define ID_USER_TOOL4 32809
#define ID_USER_TOOL5 32810
#define ID_USER_TOOL6 32811
#define ID_USER_TOOL7 32812
#define ID_USER_TOOL8 32813
#define ID_USER_TOOL9 32814
#define ID_USER_TOOL10 32815
#define ID_SAVE_DEBUG_CONF 32822
#define ID_SAVE_REGULAR_CONF 32823
#define ID_LOAD_DEBUG_CONF 32824
#define ID_LOAD_REGULAR_CONF 32825
// Next default values for new objects
//
#ifdef APSTUDIO_INVOKED
#ifndef APSTUDIO_READONLY_SYMBOLS
#define _APS_3D_CONTROLS 1
#define _APS_NEXT_RESOURCE_VALUE 153
#define _APS_NEXT_COMMAND_VALUE 32826
#define _APS_NEXT_CONTROL_VALUE 1042
#define _APS_NEXT_SYMED_VALUE 107
#endif
#endif
| 1,052 |
464 |
<reponame>sailxjx/DI-engine
from .competitive_rl_env import CompetitiveRlEnv
| 29 |
631 |
<reponame>Ar-2/activejdbc
package org.javalite.validation.length;
/**
* The attribute length must be between the given minimum and maximum length (inclusive).
*/
public class Range implements LengthOption {
private int min;
private int max;
private Range(int min, int max) {
this.min = min;
this.max = max;
}
public static Range of(int min, int max) {
return new Range(min, max);
}
@Override
public boolean validate(String fieldValue) {
int fieldValueLength = fieldValue.length();
return fieldValueLength >= min && fieldValueLength <= max;
}
@Override
public String getParametrizedMessage() {
return "attribute should have a length between {0} and {1} (inclusive)";
}
@Override
public Object[] getMessageParameters() {
return new Object[]{min, max};
}
}
| 316 |
3,702 |
<filename>gpcontrib/orafce/utility.c
/*
This code implements one part of functonality of
free available library PL/Vision. Please look www.quest.com
Original author: <NAME>, 1996 - 2002
PostgreSQL implementation author: <NAME>, 2006-2018
This module is under BSD Licence
History:
1.0. first public version 22. September 2006
*/
#include "postgres.h"
#include "utils/builtins.h"
#include "utils/numeric.h"
#include "string.h"
#include "stdlib.h"
#include "utils/pg_locale.h"
#include "mb/pg_wchar.h"
#include "lib/stringinfo.h"
#include "catalog/pg_type.h"
#include "libpq/pqformat.h"
#include "utils/array.h"
#include "utils/memutils.h"
#include "utils/lsyscache.h"
#include "access/tupmacs.h"
#include "orafce.h"
#include "builtins.h"
#include "utils/elog.h"
PG_FUNCTION_INFO_V1(dbms_utility_format_call_stack0);
PG_FUNCTION_INFO_V1(dbms_utility_format_call_stack1);
static char*
dbms_utility_format_call_stack(char mode)
{
MemoryContext oldcontext = CurrentMemoryContext;
ErrorData *edata;
ErrorContextCallback *econtext;
StringInfo sinfo;
#if PG_VERSION_NUM >= 130000
errstart(ERROR, TEXTDOMAIN);
#else
errstart(ERROR, __FILE__, __LINE__, PG_FUNCNAME_MACRO, TEXTDOMAIN);
#endif
MemoryContextSwitchTo(oldcontext);
for (econtext = error_context_stack;
econtext != NULL;
econtext = econtext->previous)
(*econtext->callback) (econtext->arg);
edata = CopyErrorData();
FlushErrorState();
/* Now I wont to parse edata->context to more traditional format */
/* I am not sure about order */
sinfo = makeStringInfo();
switch (mode)
{
case 'o':
appendStringInfoString(sinfo, "----- PL/pgSQL Call Stack -----\n");
appendStringInfoString(sinfo, " object line object\n");
appendStringInfoString(sinfo, " handle number name\n");
break;
}
if (edata->context)
{
char *start = edata->context;
while (*start)
{
char *oname = "anonymous object";
char *line = "";
char *eol = strchr(start, '\n');
Oid fnoid = InvalidOid;
/* first, solve multilines */
if (eol)
*eol = '\0';
/* first know format */
if (strncmp(start, "PL/pgSQL function ",18) == 0)
{
char *p1, *p2;
if ((p1 = strstr(start, "function \"")))
{
p1 += strlen("function \"");
if ((p2 = strchr(p1, '"')))
{
*p2++ = '\0';
oname = p1;
start = p2;
}
}
else if ((p1 = strstr(start, "function ")))
{
p1 += strlen("function ");
if ((p2 = strchr(p1, ')')))
{
char c = *++p2;
*p2 = '\0';
oname = pstrdup(p1);
fnoid = DatumGetObjectId(DirectFunctionCall1(regprocedurein,
CStringGetDatum(oname)));
*p2 = c;
start = p2;
}
}
if ((p1 = strstr(start, "line ")))
{
size_t p2i;
char c;
p1 += strlen("line ");
p2i = strspn(p1, "0123456789");
/* safe separator */
c = p1[p2i];
p1[p2i] = '\0';
line = pstrdup(p1);
p1[p2i] = c;
}
}
switch (mode)
{
case 'o':
appendStringInfo(sinfo, "%8x %5s function %s", (int)fnoid, line, oname);
break;
case 'p':
appendStringInfo(sinfo, "%8d %5s function %s", (int)fnoid, line, oname);
break;
case 's':
appendStringInfo(sinfo, "%d,%s,%s", (int)fnoid, line, oname);
break;
}
if (eol)
{
start = eol + 1;
appendStringInfoChar(sinfo, '\n');
}
else
break;
}
}
return sinfo->data;
}
Datum
dbms_utility_format_call_stack0(PG_FUNCTION_ARGS)
{
PG_RETURN_TEXT_P(cstring_to_text(dbms_utility_format_call_stack('o')));
}
Datum
dbms_utility_format_call_stack1(PG_FUNCTION_ARGS)
{
text *arg = PG_GETARG_TEXT_P(0);
char mode;
if ((1 != VARSIZE(arg) - VARHDRSZ))
ereport(ERROR,
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
errmsg("invalid parameter"),
errdetail("Allowed only chars [ops].")));
mode = *VARDATA(arg);
switch (mode)
{
case 'o':
case 'p':
case 's':
break;
default:
ereport(ERROR,
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
errmsg("invalid parameter"),
errdetail("Allowed only chars [ops].")));
}
PG_RETURN_TEXT_P(cstring_to_text(dbms_utility_format_call_stack(mode)));
}
| 1,958 |
3,866 |
module kotlinx.serialization.hocon {
requires transitive kotlin.stdlib;
requires transitive kotlinx.serialization.core;
requires transitive typesafe.config;
exports kotlinx.serialization.hocon;
}
| 74 |
1,405 |
package com.tencent.tmsecure.module.antitheft;
import com.tencent.tmsecure.common.ManagerCreator;
public final class Proguard {
public final void callAllMethods() {
AntitheftManager antitheftManager = (AntitheftManager) ManagerCreator.getManager(AntitheftManager.class);
antitheftManager.getHelperNumber();
antitheftManager.getPassword();
antitheftManager.handleSmsCommand(null, null, null);
antitheftManager.setHelperNumber(null);
antitheftManager.setPassword(null);
}
}
| 191 |
5,378 |
<reponame>st--/jupytext
import pytest
import jupytext
from jupytext.compare import compare
from .utils import list_notebooks
@pytest.mark.parametrize("nb_file", list_notebooks("Rmd"))
def test_identity_write_read(nb_file, no_jupytext_version_number):
"""Test that writing the notebook with ipynb, and read again, yields identity"""
with open(nb_file) as fp:
rmd = fp.read()
nb = jupytext.reads(rmd, "Rmd")
rmd2 = jupytext.writes(nb, "Rmd")
compare(rmd2, rmd)
def test_two_blank_lines_as_cell_separator():
rmd = """Some markdown
text
And a new cell
"""
nb = jupytext.reads(rmd, "Rmd")
assert len(nb.cells) == 2
assert nb.cells[0].cell_type == "markdown"
assert nb.cells[1].cell_type == "markdown"
assert nb.cells[0].source == "Some markdown\ntext"
assert nb.cells[1].source == "And a new cell"
| 360 |
5,169 |
<filename>Specs/9/c/d/LBTestCode/1.2.1/LBTestCode.podspec.json
{
"name": "LBTestCode",
"version": "1.2.1",
"summary": "just do a demo that we can learn how to create a cocoapods personal",
"description": "这只是一个破洞仓库简单的demo,并没有实际的意思just do a demo that \t\t\t\twe can learn how,just do a demo that we can learn how",
"homepage": "https://github.com/LeonLeeboy/LBTestCode",
"license": "MIT",
"authors": {
"Leon": "<EMAIL>"
},
"platforms": {
"ios": "8.0"
},
"source": {
"git": "https://github.com/LeonLeeboy/LBTestCode.git",
"tag": "1.2.1"
},
"source_files": "LBTestCode/LBTestCode/source/**/Headers/*",
"vendored_frameworks": "LBTestCode/LBTestCode/source/LBFrameWorkDemo.framework",
"frameworks": "UIKit"
}
| 338 |
648 |
<reponame>mehrdad-shokri/fluxcapacitor
#ifndef _UEVENT_H
#define _UEVENT_H
struct uevent;
typedef int (*uevent_callback_t)(struct uevent *uevent, int sd, int mask, void *userdata);
struct uevent {
int max_slots;
int curr_slot;
int used_slots;
struct {
uevent_callback_t callback;
void *userdata;
} fdmap[__FD_SETSIZE];
fd_set readfds;
fd_set writefds;
int max_fd;
};
enum {
UEVENT_WRITE = 1 << 0,
UEVENT_READ = 1 << 1
};
struct uevent *uevent_new(struct uevent *uevent);
int uevent_loop(struct uevent *uevent);
int uevent_select(struct uevent *uevent, struct timeval *timeout);
int uevent_yield(struct uevent *uevent, int fd, int mask, uevent_callback_t callback, void *userdata);
void uevent_clear(struct uevent *uevent, int fd);
#endif // _UEVENT_H
| 326 |
6,224 |
<reponame>maxvankessel/zephyr
/**
* Copyright (c) 2018 Linaro
* Copyright (c) 2020 ATL Electronics
*
* SPDX-License-Identifier: Apache-2.0
*/
#define DT_DRV_COMPAT inventek_eswifi_uart
#include "eswifi_log.h"
LOG_MODULE_DECLARE(LOG_MODULE_NAME);
#include <zephyr.h>
#include <kernel.h>
#include <device.h>
#include <string.h>
#include <errno.h>
#include <sys/ring_buffer.h>
#include <drivers/gpio.h>
#include <drivers/uart.h>
#include "eswifi.h"
#define ESWIFI_RING_BUF_SIZE 2048
enum eswifi_uart_fsm {
ESWIFI_UART_FSM_WAIT_CR,
ESWIFI_UART_FSM_WAIT_LF,
ESWIFI_UART_FSM_WAIT_MARK,
ESWIFI_UART_FSM_WAIT_SPACE,
ESWIFI_UART_FSM_END,
};
struct eswifi_uart_data {
const struct device *dev;
enum eswifi_uart_fsm fsm;
size_t rx_count;
size_t rx_buf_size;
char *rx_buf;
/* RX Ring Buf */
uint8_t iface_rb_buf[ESWIFI_RING_BUF_SIZE];
struct ring_buf rx_rb;
};
static struct eswifi_uart_data eswifi_uart0; /* Static instance */
static void eswifi_iface_uart_flush(struct eswifi_uart_data *uart)
{
uint8_t c;
while (uart_fifo_read(uart->dev, &c, 1) > 0) {
continue;
}
}
static void eswifi_iface_uart_isr(const struct device *uart_dev,
void *user_data)
{
struct eswifi_uart_data *uart = &eswifi_uart0; /* Static instance */
int rx = 0;
uint8_t *dst;
uint32_t partial_size = 0;
uint32_t total_size = 0;
ARG_UNUSED(user_data);
while (uart_irq_update(uart->dev) &&
uart_irq_rx_ready(uart->dev)) {
if (!partial_size) {
partial_size = ring_buf_put_claim(&uart->rx_rb, &dst,
UINT32_MAX);
}
if (!partial_size) {
LOG_ERR("Rx buffer doesn't have enough space");
eswifi_iface_uart_flush(uart);
break;
}
rx = uart_fifo_read(uart->dev, dst, partial_size);
if (rx <= 0) {
continue;
}
dst += rx;
total_size += rx;
partial_size -= rx;
}
ring_buf_put_finish(&uart->rx_rb, total_size);
}
static char get_fsm_char(int fsm)
{
switch (fsm) {
case ESWIFI_UART_FSM_WAIT_CR:
return('C');
case ESWIFI_UART_FSM_WAIT_LF:
return('L');
case ESWIFI_UART_FSM_WAIT_MARK:
return('M');
case ESWIFI_UART_FSM_WAIT_SPACE:
return('S');
case ESWIFI_UART_FSM_END:
return('E');
}
return('?');
}
static int eswifi_uart_get_resp(struct eswifi_uart_data *uart)
{
uint8_t c;
while (ring_buf_get(&uart->rx_rb, &c, 1) > 0) {
LOG_DBG("FSM: %c, RX: 0x%02x : %c",
get_fsm_char(uart->fsm), c, c);
if (uart->rx_buf_size > 0) {
uart->rx_buf[uart->rx_count++] = c;
if (uart->rx_count == uart->rx_buf_size) {
return -ENOMEM;
}
}
switch (uart->fsm) {
case ESWIFI_UART_FSM_WAIT_CR:
if (c == '\r') {
uart->fsm = ESWIFI_UART_FSM_WAIT_LF;
}
break;
case ESWIFI_UART_FSM_WAIT_LF:
if (c == '\n') {
uart->fsm = ESWIFI_UART_FSM_WAIT_MARK;
} else if (c != '\r') {
uart->fsm = ESWIFI_UART_FSM_WAIT_CR;
}
break;
case ESWIFI_UART_FSM_WAIT_MARK:
if (c == '>') {
uart->fsm = ESWIFI_UART_FSM_WAIT_SPACE;
} else if (c == '\r') {
uart->fsm = ESWIFI_UART_FSM_WAIT_LF;
} else {
uart->fsm = ESWIFI_UART_FSM_WAIT_CR;
}
break;
case ESWIFI_UART_FSM_WAIT_SPACE:
if (c == ' ') {
uart->fsm = ESWIFI_UART_FSM_END;
} else if (c == '\r') {
uart->fsm = ESWIFI_UART_FSM_WAIT_LF;
} else {
uart->fsm = ESWIFI_UART_FSM_WAIT_CR;
}
break;
default:
break;
}
}
return 0;
}
static int eswifi_uart_wait_prompt(struct eswifi_uart_data *uart)
{
unsigned int max_retries = 60 * 1000; /* 1 minute */
int err;
while (--max_retries) {
err = eswifi_uart_get_resp(uart);
if (err) {
LOG_DBG("Err: 0x%08x - %d", err, err);
return err;
}
if (uart->fsm == ESWIFI_UART_FSM_END) {
LOG_DBG("Success!");
return uart->rx_count;
}
/* allow other threads to be scheduled */
k_sleep(K_MSEC(1));
}
LOG_DBG("Timeout");
return -ETIMEDOUT;
}
static int eswifi_uart_request(struct eswifi_dev *eswifi, char *cmd,
size_t clen, char *rsp, size_t rlen)
{
struct eswifi_uart_data *uart = eswifi->bus_data;
int count;
int err;
LOG_DBG("cmd=%p (%u byte), rsp=%p (%u byte)", cmd, clen, rsp, rlen);
/* Send CMD */
for (count = 0; count < clen; count++) {
uart_poll_out(uart->dev, cmd[count]);
}
uart->fsm = ESWIFI_UART_FSM_WAIT_CR;
uart->rx_count = 0;
uart->rx_buf = rsp;
uart->rx_buf_size = rlen;
err = eswifi_uart_wait_prompt(uart);
if (err > 0) {
LOG_HEXDUMP_DBG(uart->rx_buf, uart->rx_count, "Stream");
}
return err;
}
int eswifi_uart_init(struct eswifi_dev *eswifi)
{
struct eswifi_uart_data *uart = &eswifi_uart0; /* Static instance */
uart->dev = device_get_binding(DT_INST_BUS_LABEL(0));
if (!uart->dev) {
LOG_ERR("Failed to initialize uart driver");
return -ENODEV;
}
eswifi->bus_data = uart;
uart_irq_rx_disable(uart->dev);
uart_irq_tx_disable(uart->dev);
eswifi_iface_uart_flush(uart);
uart_irq_callback_set(uart->dev, eswifi_iface_uart_isr);
uart_irq_rx_enable(uart->dev);
ring_buf_init(&uart->rx_rb, sizeof(uart->iface_rb_buf),
uart->iface_rb_buf);
LOG_DBG("success");
return 0;
}
static struct eswifi_bus_ops eswifi_bus_ops_uart = {
.init = eswifi_uart_init,
.request = eswifi_uart_request,
};
struct eswifi_bus_ops *eswifi_get_bus(void)
{
return &eswifi_bus_ops_uart;
}
| 2,599 |
2,151 |
<reponame>zipated/src
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/sync/sessions/sync_sessions_web_contents_router.h"
#include "chrome/browser/sync/sessions/sync_sessions_web_contents_router_factory.h"
#include "chrome/browser/ui/sync/tab_contents_synced_tab_delegate.h"
#include "chrome/test/base/testing_profile.h"
#include "content/public/test/test_browser_thread_bundle.h"
#include "content/public/test/web_contents_tester.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace sync_sessions {
class StartSyncFlareMock {
public:
StartSyncFlareMock() {}
~StartSyncFlareMock() {}
void StartSyncFlare(syncer::ModelType type) { was_run_ = true; }
bool was_run() { return was_run_; }
private:
bool was_run_ = false;
};
class SyncSessionsWebContentsRouterTest : public testing::Test {
protected:
~SyncSessionsWebContentsRouterTest() override {}
void SetUp() override {
router_ =
SyncSessionsWebContentsRouterFactory::GetInstance()->GetForProfile(
&profile_);
test_contents_ =
content::WebContentsTester::CreateTestWebContents(&profile_, nullptr);
}
SyncSessionsWebContentsRouter* router() { return router_; }
content::WebContents* test_contents() { return test_contents_.get(); }
private:
content::TestBrowserThreadBundle thread_bundle_;
TestingProfile profile_;
SyncSessionsWebContentsRouter* router_;
std::unique_ptr<content::WebContents> test_contents_;
};
TEST_F(SyncSessionsWebContentsRouterTest, FlareNotRun) {
StartSyncFlareMock mock;
router()->InjectStartSyncFlare(
base::Bind(&StartSyncFlareMock::StartSyncFlare, base::Unretained(&mock)));
// There's no delegate for the tab, so the flare shouldn't run.
router()->NotifyTabModified(test_contents(), false);
EXPECT_FALSE(mock.was_run());
TabContentsSyncedTabDelegate::CreateForWebContents(test_contents());
// There's a delegate for the tab, but it's not a load completed event, so the
// flare still shouldn't run.
router()->NotifyTabModified(test_contents(), false);
EXPECT_FALSE(mock.was_run());
}
// Make sure we don't crash when there's not a flare.
TEST_F(SyncSessionsWebContentsRouterTest, FlareNotSet) {
TabContentsSyncedTabDelegate::CreateForWebContents(test_contents());
router()->NotifyTabModified(test_contents(), false);
}
// Disabled on android due to complexity of creating a full TabAndroid object
// for a unit test. The logic being tested here isn't directly affected by
// platform-specific peculiarities.
#if !defined(OS_ANDROID)
TEST_F(SyncSessionsWebContentsRouterTest, FlareRunsForLoadCompleted) {
TabContentsSyncedTabDelegate::CreateForWebContents(test_contents());
StartSyncFlareMock mock;
router()->InjectStartSyncFlare(
base::Bind(&StartSyncFlareMock::StartSyncFlare, base::Unretained(&mock)));
// There's a delegate for the tab, and it's a load completed event, so the
// flare should run.
router()->NotifyTabModified(test_contents(), true);
EXPECT_TRUE(mock.was_run());
}
#endif // !defined(OS_ANDROID)
} // namespace sync_sessions
| 1,064 |
376 |
<reponame>cjsmeele/Kvasir<filename>Lib/Chip/LPC11Cxx_v9.hpp
#pragma once
#include <Chip/CM0/NXP/LPC11Cxx_v9/I2C.hpp>
#include <Chip/CM0/NXP/LPC11Cxx_v9/WWDT.hpp>
#include <Chip/CM0/NXP/LPC11Cxx_v9/UART.hpp>
#include <Chip/CM0/NXP/LPC11Cxx_v9/CT16B0.hpp>
#include <Chip/CM0/NXP/LPC11Cxx_v9/CT16B1.hpp>
#include <Chip/CM0/NXP/LPC11Cxx_v9/CT32B0.hpp>
#include <Chip/CM0/NXP/LPC11Cxx_v9/CT32B1.hpp>
#include <Chip/CM0/NXP/LPC11Cxx_v9/ADC.hpp>
#include <Chip/CM0/NXP/LPC11Cxx_v9/PMU.hpp>
#include <Chip/CM0/NXP/LPC11Cxx_v9/FLASHCTRL.hpp>
#include <Chip/CM0/NXP/LPC11Cxx_v9/SPI0.hpp>
#include <Chip/CM0/NXP/LPC11Cxx_v9/IOCON.hpp>
#include <Chip/CM0/NXP/LPC11Cxx_v9/SYSCON.hpp>
#include <Chip/CM0/NXP/LPC11Cxx_v9/C_CAN.hpp>
#include <Chip/CM0/NXP/LPC11Cxx_v9/SPI1.hpp>
#include <Chip/CM0/NXP/LPC11Cxx_v9/GPIO0.hpp>
#include <Chip/CM0/NXP/LPC11Cxx_v9/GPIO1.hpp>
#include <Chip/CM0/NXP/LPC11Cxx_v9/GPIO2.hpp>
#include <Chip/CM0/NXP/LPC11Cxx_v9/GPIO3.hpp>
| 572 |
318 |
{
"variants": {
"type=bottom": {
"model": "techreborn:block/storage/aluminum_storage_block_slab"
},
"type=double": {
"model": "techreborn:block/storage/aluminum_storage_block"
},
"type=top": {
"model": "techreborn:block/storage/aluminum_storage_block_slab_top"
}
}
}
| 120 |
4,879 |
<reponame>smartyw/organicmaps
#pragma once
#include "topography_generator/utils/contours.hpp"
#include "geometry/latlon.hpp"
#include <algorithm>
#include <deque>
#include <list>
#include <vector>
#include <unordered_map>
namespace topography_generator
{
class ContoursBuilder
{
public:
ContoursBuilder(size_t levelsCount, std::string const & debugId);
void AddSegment(size_t levelInd, ms::LatLon const & beginPos, ms::LatLon const & endPos);
void BeginLine();
void EndLine(bool finalLine);
template <typename ValueType>
void GetContours(ValueType minValue, ValueType valueStep,
std::unordered_map<ValueType, std::vector<Contour>> & contours)
{
contours.clear();
for (size_t i = 0; i < m_finalizedContours.size(); ++i)
{
auto const levelValue = minValue + i * valueStep;
auto const & contoursList = m_finalizedContours[i];
for (auto const & contour : contoursList)
{
Contour contourMerc;
contourMerc.reserve(contour.size());
std::transform(contour.begin(), contour.end(), std::back_inserter(contourMerc),
[](ms::LatLon const & pt){ return mercator::FromLatLon(pt); });
contours[levelValue].emplace_back(std::move(contourMerc));
}
}
}
private:
using ContourRaw = std::deque<ms::LatLon>;
using ContoursList = std::list<ContourRaw>;
struct ActiveContour
{
explicit ActiveContour(ContourRaw && isoline)
: m_countour(std::move(isoline))
{}
ContourRaw m_countour;
bool m_active = true;
};
using ActiveContoursList = std::list<ActiveContour>;
using ActiveContourIter = ActiveContoursList::iterator;
ActiveContourIter FindContourWithStartPoint(size_t levelInd, ms::LatLon const & pos);
ActiveContourIter FindContourWithEndPoint(size_t levelInd, ms::LatLon const & pos);
size_t const m_levelsCount;
std::vector<ContoursList> m_finalizedContours;
std::vector<ActiveContoursList> m_activeContours;
std::string m_debugId;
};
} // namespace topography_generator
| 779 |
1,034 |
package com.android.reverse.smali;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import javax.annotation.Nonnull;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.runtime.Token;
import org.antlr.runtime.TokenSource;
import org.antlr.runtime.tree.CommonTree;
import org.antlr.runtime.tree.CommonTreeNodeStream;
import org.jf.dexlib2.writer.builder.DexBuilder;
import org.jf.dexlib2.writer.io.FileDataStore;
import org.jf.smali.LexerErrorInterface;
import org.jf.smali.smaliFlexLexer;
import org.jf.smali.smaliParser;
import org.jf.smali.smaliTreeWalker;
import com.android.reverse.collecter.ModuleContext;
import com.android.reverse.util.Logger;
import com.google.common.collect.Lists;
public class DexFileBuilder {
public static boolean buildDexFile(String smaliPath,String dexFileName) {
int jobs = 8;
boolean allowOdex = false;
boolean verboseErrors = false;
boolean printTokens = false;
int apiLevel = ModuleContext.getInstance().getApiLevel();
try {
LinkedHashSet<File> filesToProcess = new LinkedHashSet<File>();
File argFile = new File(smaliPath);
if (!argFile.exists()) {
throw new RuntimeException("Cannot find file or directory \""
+ smaliPath + "\"");
}
if (argFile.isDirectory()) {
getSmaliFilesInDir(argFile, filesToProcess);
}
boolean errors = false;
final DexBuilder dexBuilder = DexBuilder.makeDexBuilder(apiLevel);
ExecutorService executor = Executors.newFixedThreadPool(jobs);
List<Future<Boolean>> tasks = Lists.newArrayList();
final boolean finalVerboseErrors = verboseErrors;
final boolean finalPrintTokens = printTokens;
final boolean finalAllowOdex = allowOdex;
final int finalApiLevel = apiLevel;
for (final File file : filesToProcess) {
tasks.add(executor.submit(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return assembleSmaliFile(file, dexBuilder,
finalVerboseErrors, finalPrintTokens,
finalAllowOdex, finalApiLevel);
}
}));
}
for (Future<Boolean> task : tasks) {
while (true) {
try {
if (!task.get()) {
errors = true;
}
} catch (InterruptedException ex) {
continue;
}
break;
}
}
executor.shutdown();
if (errors) {
Logger.log("build the dexfile error0");
return false;
}
dexBuilder.writeTo(new FileDataStore(new File(dexFileName)));
Logger.log("build the dexfile ok");
return true;
} catch (RuntimeException ex) {
Logger.log("build the dexfile error1");
return false;
} catch (Throwable ex) {
Logger.log("build the dexfile error2");
return false;
}
}
private static void getSmaliFilesInDir(@Nonnull File dir,
@Nonnull Set<File> smaliFiles) {
File[] files = dir.listFiles();
if (files != null) {
for (File file : files) {
if (file.isDirectory()) {
getSmaliFilesInDir(file, smaliFiles);
} else if (file.getName().endsWith(".smali")) {
smaliFiles.add(file);
}
}
}
}
private static boolean assembleSmaliFile(File smaliFile,
DexBuilder dexBuilder, boolean verboseErrors, boolean printTokens,
boolean allowOdex, int apiLevel) throws Exception {
CommonTokenStream tokens;
LexerErrorInterface lexer;
FileInputStream fis = new FileInputStream(smaliFile.getAbsolutePath());
InputStreamReader reader = new InputStreamReader(fis, "UTF-8");
lexer = new smaliFlexLexer(reader);
((smaliFlexLexer) lexer).setSourceFile(smaliFile);
tokens = new CommonTokenStream((TokenSource) lexer);
if (printTokens) {
tokens.getTokens();
for (int i = 0; i < tokens.size(); i++) {
Token token = tokens.get(i);
if (token.getChannel() == smaliParser.HIDDEN) {
continue;
}
System.out.println(smaliParser.tokenNames[token.getType()]
+ ": " + token.getText());
}
}
smaliParser parser = new smaliParser(tokens);
parser.setVerboseErrors(verboseErrors);
parser.setAllowOdex(allowOdex);
parser.setApiLevel(apiLevel);
smaliParser.smali_file_return result = parser.smali_file();
if (parser.getNumberOfSyntaxErrors() > 0
|| lexer.getNumberOfSyntaxErrors() > 0) {
return false;
}
CommonTree t = result.getTree();
CommonTreeNodeStream treeStream = new CommonTreeNodeStream(t);
treeStream.setTokenStream(tokens);
smaliTreeWalker dexGen = new smaliTreeWalker(treeStream);
dexGen.setVerboseErrors(verboseErrors);
dexGen.setDexBuilder(dexBuilder);
dexGen.smali_file();
return dexGen.getNumberOfSyntaxErrors() == 0;
}
}
| 1,845 |
10,876 |
<reponame>eerimoq/vcpkg<gh_stars>1000+
{
"name": "argagg",
"version-string": "0.4.6",
"port-version": 1,
"description": "A simple C++11 command line argument parser"
}
| 72 |
892 |
{
"schema_version": "1.2.0",
"id": "GHSA-555r-hp4v-qj62",
"modified": "2021-12-28T00:01:11Z",
"published": "2021-12-22T00:00:26Z",
"aliases": [
"CVE-2021-44423"
],
"details": "An out-of-bounds read vulnerability exists when reading a BMP file using Open Design Alliance (ODA) Drawings Explorer before 2022.12. The specific issue exists after loading BMP files. Unchecked input data from a crafted BMP file leads to an out-of-bounds read. An attacker can leverage this vulnerability to execute code in the context of the current process.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2021-44423"
},
{
"type": "WEB",
"url": "https://www.opendesign.com/security-advisories"
}
],
"database_specific": {
"cwe_ids": [
"CWE-125"
],
"severity": "HIGH",
"github_reviewed": false
}
}
| 385 |
575 |
<reponame>jason-fox/Fast-RTPS
// Copyright 2020 Proyectos y Sistemas de Mantenimiento SL (eProsima).
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <gtest/gtest.h>
#include <rtps/history/TopicPayloadPool.hpp>
#include <tuple>
using namespace eprosima::fastrtps::rtps;
using namespace ::testing;
using namespace std;
constexpr uint32_t max_num_reserves = 1000u;
class TopicPayloadPoolTests : public TestWithParam<tuple<uint32_t, uint32_t, uint32_t, MemoryManagementPolicy_t>>
{
protected:
TopicPayloadPoolTests()
: pool(nullptr)
, test_input_pool_size(10)
, test_input_max_pool_size(0)
, payload_size(128)
, memory_policy(MemoryManagementPolicy_t::PREALLOCATED_MEMORY_MODE)
, expected_pool_size_for_writers(0)
, expected_pool_size_for_readers(0)
, expected_finite_max_pool_size(0)
, num_of_infinite_histories(0)
{
}
virtual ~TopicPayloadPoolTests()
{
}
virtual void SetUp()
{
test_input_pool_size = get<0>(GetParam());
test_input_max_pool_size = get<1>(GetParam());
payload_size = get<2>(GetParam());
memory_policy = get<3>(GetParam());
std::cout << "[init:" << test_input_pool_size
<< " max:" << test_input_max_pool_size
<< " size:" << payload_size
<< " policy:" << memory_policy
<< "]" << std::endl;
PoolConfig config{ memory_policy, payload_size, test_input_pool_size, test_input_max_pool_size };
pool = TopicPayloadPool::get(config);
}
virtual void TearDown()
{
pool.reset();
}
/**
* Checks the sizes of the pool when no payload has been ever reserved.
* In order to perform a check on the maximum size of the pool, this method
* reserves all the possible payloads, thus affecting to the final size of the pool.
* So to this, calling this method twice may fail, depending on the configuration.
*/
void check_initial_sizes()
{
// Compute the expected sizes
uint32_t expected_max_pool_size =
num_of_infinite_histories == 0 ? expected_finite_max_pool_size : 0;
uint32_t expected_pool_size = 0;
switch (memory_policy)
{
case MemoryManagementPolicy_t::PREALLOCATED_MEMORY_MODE:
case MemoryManagementPolicy_t::PREALLOCATED_WITH_REALLOC_MEMORY_MODE:
expected_pool_size = expected_pool_size_for_writers + expected_pool_size_for_readers;
break;
case MemoryManagementPolicy_t::DYNAMIC_RESERVE_MEMORY_MODE:
case MemoryManagementPolicy_t::DYNAMIC_REUSABLE_MEMORY_MODE:
expected_pool_size = 0;
break;
}
check_sizes(expected_pool_size, expected_max_pool_size);
}
/**
* Checks the sizes of the pool after the maximum size of the pool has been
* reached and all payloads have been released later.
* Calling this method twice should not fail.
*/
void check_final_sizes()
{
// Compute the expected sizes
uint32_t expected_max_pool_size =
num_of_infinite_histories == 0 ? expected_finite_max_pool_size : 0;
uint32_t expected_pool_size = 0;
switch (memory_policy)
{
// These policies do not free released payloads
case MemoryManagementPolicy_t::PREALLOCATED_MEMORY_MODE:
case MemoryManagementPolicy_t::PREALLOCATED_WITH_REALLOC_MEMORY_MODE:
case MemoryManagementPolicy_t::DYNAMIC_REUSABLE_MEMORY_MODE:
expected_pool_size = expected_max_pool_size;
if (expected_max_pool_size == 0)
{
expected_pool_size = max_num_reserves + 1;
}
break;
// This policy frees released payloads
case MemoryManagementPolicy_t::DYNAMIC_RESERVE_MEMORY_MODE:
expected_pool_size = 0;
break;
}
check_sizes(expected_pool_size, expected_max_pool_size);
}
void check_sizes(
uint32_t expected_pool_size,
uint32_t expected_max_pool_size)
{
// Check the reserved sizes
ASSERT_EQ(pool->payload_pool_allocated_size(), expected_pool_size);
// Check the maximum sizes
// As there is no public interface exposing this data,
//we reserve caches until the pool reaches its maximum
std::vector<CacheChange_t*> cache_changes;
uint32_t num_reserves = expected_max_pool_size;
if (expected_max_pool_size == 0)
{
num_reserves = max_num_reserves;
}
//Reserve to the expected maximum
for (uint32_t i = 0; i < num_reserves; i++)
{
uint32_t data_size = i * 16 + 1u;
CacheChange_t* ch = new CacheChange_t();
cache_changes.push_back(ch);
ASSERT_TRUE(pool->get_payload(data_size, *ch));
ASSERT_NE(ch->serializedPayload.data, nullptr);
switch (memory_policy)
{
case MemoryManagementPolicy_t::PREALLOCATED_MEMORY_MODE:
ASSERT_EQ(ch->serializedPayload.max_size, payload_size);
break;
case MemoryManagementPolicy_t::PREALLOCATED_WITH_REALLOC_MEMORY_MODE:
ASSERT_GE(ch->serializedPayload.max_size, max(payload_size, data_size));
break;
case MemoryManagementPolicy_t::DYNAMIC_RESERVE_MEMORY_MODE:
ASSERT_EQ(ch->serializedPayload.max_size, data_size);
break;
case MemoryManagementPolicy_t::DYNAMIC_REUSABLE_MEMORY_MODE:
ASSERT_GE(ch->serializedPayload.max_size, data_size);
break;
}
}
//Try to reserve one more
if (expected_max_pool_size == 0)
{
CacheChange_t* ch = new CacheChange_t();
cache_changes.push_back(ch);
ASSERT_TRUE(pool->get_payload(payload_size, *ch));
}
else
{
CacheChange_t* ch = new CacheChange_t();
ASSERT_FALSE(pool->get_payload(payload_size, *ch));
delete ch;
}
// Get the same payloads for another cache change
for (uint32_t i = 0; i < num_reserves; i++)
{
CacheChange_t* ch = new CacheChange_t();
cache_changes.push_back(ch);
ch->writerGUID = GUID_t(GuidPrefix_t(), 1);
ch->sequenceNumber = SequenceNumber_t(0, i);
IPayloadPool* owner = cache_changes[i]->payload_owner();
ASSERT_TRUE(pool->get_payload(cache_changes[i]->serializedPayload, owner, *ch));
ASSERT_NE(ch->serializedPayload.data, nullptr);
ASSERT_EQ(ch->serializedPayload.data, cache_changes[i]->serializedPayload.data);
ASSERT_EQ(ch->payload_owner(), owner);
}
for (CacheChange_t* ch : cache_changes)
{
ASSERT_TRUE(pool->release_payload(*ch));
delete ch;
}
cache_changes.clear();
}
void do_reserve_history(
uint32_t new_reserved_size,
uint32_t new_reserved_max_size,
bool is_reader)
{
// Do the reserve
PoolConfig config{ memory_policy, 0, new_reserved_size, new_reserved_max_size };
ASSERT_TRUE(pool->reserve_history(config, is_reader));
// Update the expected pool sizes
if (new_reserved_max_size == 0)
{
++num_of_infinite_histories;
}
else
{
expected_finite_max_pool_size +=
(new_reserved_max_size < new_reserved_size ? new_reserved_size : new_reserved_max_size);
}
if (is_reader)
{
expected_pool_size_for_readers += new_reserved_size;
}
else
{
expected_pool_size_for_writers += new_reserved_size;
}
}
void do_release_history(
uint32_t new_released_size,
uint32_t new_released_max_size,
bool is_reader)
{
// Do the release
PoolConfig config{ memory_policy, 0, new_released_size, new_released_max_size };
ASSERT_TRUE(pool->release_history(config, is_reader));
// Update the expected pool sizes
if (new_released_max_size == 0)
{
--num_of_infinite_histories;
}
else
{
expected_finite_max_pool_size -=
(new_released_max_size < new_released_size ? new_released_size : new_released_max_size);
}
if (is_reader)
{
expected_pool_size_for_readers -= new_released_size;
}
else
{
expected_pool_size_for_writers -= new_released_size;
}
}
/**
* - Reserves a reader history with the limits configured on the fixture
* - Reserves a writer history with the limits configured on the fixture
* - Reserves the testing history with the configuration provided on the arguments
* - Checks that the pool size and limits are correct
* - Releases the testing history
* - Checks that the pool size and limits are correct
*/
void do_history_test (
uint32_t size,
uint32_t max_size,
bool is_reader,
uint32_t num_times = 2u)
{
for (uint32_t i = 0; i < num_times; i++)
{
// First history reserved for a reader.
do_reserve_history(test_input_pool_size, test_input_max_pool_size, true);
// Another history reserved for a writer.
do_reserve_history(test_input_pool_size, test_input_max_pool_size, false);
// Another history reserved requested limits.
do_reserve_history(size, max_size, is_reader);
check_initial_sizes();
// Release the last history
do_release_history(size, max_size, is_reader);
check_final_sizes();
// Release the first two histories
do_release_history(test_input_pool_size, test_input_max_pool_size, true);
do_release_history(test_input_pool_size, test_input_max_pool_size, false);
}
EXPECT_EQ(pool->payload_pool_available_size(), 0u);
EXPECT_EQ(pool->payload_pool_allocated_size(), 0u);
}
std::unique_ptr<ITopicPayloadPool> pool; //< The pool under test
uint32_t test_input_pool_size; //< Pool size given to the parametric test
uint32_t test_input_max_pool_size; //< Max pool size given to the parametric test
uint32_t payload_size; //< Payload size given to the parametric test
MemoryManagementPolicy_t memory_policy; //< Memory policy size given to the parametric test
uint32_t expected_pool_size_for_writers; //< Initial pool size due to writers (sum of all writers)
uint32_t expected_pool_size_for_readers; //< Initial pool size due to readers (max of all readers)
uint32_t expected_finite_max_pool_size; //< Expected max pool size without counting the infinite histories
uint32_t num_of_infinite_histories; //< Number of infinite histories reserved
};
TEST_P(TopicPayloadPoolTests, reserve_history_reader_same_size)
{
// A new history reserved for a reader. Same limits as fixture.
uint32_t reserve_size = test_input_pool_size;
uint32_t reserve_max_size = test_input_max_pool_size;
do_history_test(reserve_size, reserve_max_size, true);
}
TEST_P(TopicPayloadPoolTests, reserve_history_reader_lower_size)
{
// A new history reserved for a reader. Lower limits than fixture.
uint32_t reserve_size = test_input_pool_size / 2;
uint32_t reserve_max_size = test_input_max_pool_size / 5;
do_history_test(reserve_size, reserve_max_size, true);
}
TEST_P(TopicPayloadPoolTests, reserve_history_reader_larger_size)
{
// A new history reserved for a reader. Larger limits than fixture.
uint32_t reserve_size = test_input_pool_size * 2;
uint32_t reserve_max_size = test_input_max_pool_size * 5;
do_history_test(reserve_size, reserve_max_size, true);
}
TEST_P(TopicPayloadPoolTests, reserve_history_writer_same_size)
{
// A new history reserved for a writer. Same limits as fixture.
uint32_t reserve_size = test_input_pool_size;
uint32_t reserve_max_size = test_input_max_pool_size;
do_history_test(reserve_size, reserve_max_size, false);
}
TEST_P(TopicPayloadPoolTests, reserve_history_writer_lower_size)
{
// A new history reserved for a writer. Lower limits than fixture.
uint32_t reserve_size = test_input_pool_size / 2;
uint32_t reserve_max_size = test_input_max_pool_size / 5;
do_history_test(reserve_size, reserve_max_size, false);
}
TEST_P(TopicPayloadPoolTests, reserve_history_writer_larger_size)
{
// A new history reserved for a writer. Larger limits than fixture.
uint32_t reserve_size = test_input_pool_size * 2;
uint32_t reserve_max_size = test_input_max_pool_size * 5;
do_history_test(reserve_size, reserve_max_size, false);
}
TEST_P(TopicPayloadPoolTests, release_history_reader_infinite_size)
{
// A history with infinite size reserved for a reader is released.
uint32_t reserve_size = test_input_pool_size;
uint32_t reserve_max_size = 0;
do_history_test(reserve_size, reserve_max_size, true);
}
TEST_P(TopicPayloadPoolTests, release_history_reader_finite_size)
{
// A history with finite size reserved for a reader is released.
uint32_t reserve_size = test_input_pool_size;
uint32_t reserve_max_size = 100;
do_history_test(reserve_size, reserve_max_size, true);
}
TEST_P(TopicPayloadPoolTests, release_history_writer_infinite_size)
{
// A history with infinite size reserved for a reader is releasde.
uint32_t reserve_size = test_input_pool_size;
uint32_t reserve_max_size = 0;
do_history_test(reserve_size, reserve_max_size, false);
}
TEST_P(TopicPayloadPoolTests, release_history_writer_finite_size)
{
// A history with finite size reserved for a reader is released.
uint32_t reserve_size = test_input_pool_size;
uint32_t reserve_max_size = 100;
do_history_test(reserve_size, reserve_max_size, false);
}
#ifdef INSTANTIATE_TEST_SUITE_P
#define GTEST_INSTANTIATE_TEST_MACRO(x, y, z) INSTANTIATE_TEST_SUITE_P(x, y, z)
#else
#define GTEST_INSTANTIATE_TEST_MACRO(x, y, z) INSTANTIATE_TEST_CASE_P(x, y, z, )
#endif // ifdef INSTANTIATE_TEST_SUITE_P
GTEST_INSTANTIATE_TEST_MACRO(
TopicPayloadPoolTests,
TopicPayloadPoolTests,
Combine(Values(0, 10, 20, 30),
Values(0, 10, 20, 30),
Values(128, 256, 512, 1024),
Values(MemoryManagementPolicy_t::PREALLOCATED_MEMORY_MODE,
MemoryManagementPolicy_t::PREALLOCATED_WITH_REALLOC_MEMORY_MODE,
MemoryManagementPolicy_t::DYNAMIC_RESERVE_MEMORY_MODE,
MemoryManagementPolicy_t::DYNAMIC_REUSABLE_MEMORY_MODE))
);
int main(
int argc,
char** argv)
{
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
| 6,958 |
5,250 |
<reponame>jiandiao/flowable-engine
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.cmmn.engine.impl.runtime;
import java.util.Collections;
import java.util.List;
/**
* @author <NAME>
*/
public class MovePlanItemInstanceIdContainer {
protected List<String> planItemInstanceIds;
protected List<String> moveToPlanItemDefinitionIds;
protected String newAssigneeId;
public MovePlanItemInstanceIdContainer(String singlePlanItemInstanceId, String moveToPlanItemDefinitionId) {
this(singlePlanItemInstanceId, moveToPlanItemDefinitionId, null);
}
public MovePlanItemInstanceIdContainer(String singlePlanItemInstanceId, String moveToPlanItemDefinitionId, String newAssigneeId) {
this.planItemInstanceIds = Collections.singletonList(singlePlanItemInstanceId);
this.moveToPlanItemDefinitionIds = Collections.singletonList(moveToPlanItemDefinitionId);
this.newAssigneeId = newAssigneeId;
}
public MovePlanItemInstanceIdContainer(List<String> planItemInstanceIds, String moveToPlanItemDefinitionId) {
this(planItemInstanceIds, moveToPlanItemDefinitionId, null);
}
public MovePlanItemInstanceIdContainer(List<String> planItemInstanceIds, String moveToPlanItemDefinitionId, String newAssigneeId) {
this.planItemInstanceIds = planItemInstanceIds;
this.moveToPlanItemDefinitionIds = Collections.singletonList(moveToPlanItemDefinitionId);
this.newAssigneeId = newAssigneeId;
}
public MovePlanItemInstanceIdContainer(String singlePlanItemInstanceId, List<String> moveToPlanItemDefinitionIds) {
this(singlePlanItemInstanceId, moveToPlanItemDefinitionIds, null);
}
public MovePlanItemInstanceIdContainer(String singlePlanItemInstanceId, List<String> moveToPlanItemDefinitionIds, String newAssigneeId) {
this.planItemInstanceIds = Collections.singletonList(singlePlanItemInstanceId);
this.moveToPlanItemDefinitionIds = moveToPlanItemDefinitionIds;
this.newAssigneeId = newAssigneeId;
}
public List<String> getPlanItemInstanceIds() {
return planItemInstanceIds;
}
public void setPlanItemInstanceIds(List<String> planItemInstanceIds) {
this.planItemInstanceIds = planItemInstanceIds;
}
public List<String> getMoveToPlanItemDefinitionIds() {
return moveToPlanItemDefinitionIds;
}
public void setMoveToPlanItemDefinitionIds(List<String> moveToPlanItemDefinitionIds) {
this.moveToPlanItemDefinitionIds = moveToPlanItemDefinitionIds;
}
public String getNewAssigneeId() {
return newAssigneeId;
}
public void setNewAssigneeId(String newAssigneeId) {
this.newAssigneeId = newAssigneeId;
}
}
| 1,071 |
2,272 |
//
// HttpRequest.h
// TSLocationManager
//
// Created by <NAME> on 2019-10-31.
// Copyright © 2019 <NAME>. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "HttpResponse.h"
@interface HttpRequest : NSObject
@property(nonatomic) id requestData;
@property(nonatomic) NSURL *url;
+(void) execute:(NSArray*)records callback:(void(^)(HttpRequest*, HttpResponse*))callback;
-(instancetype) initWithRecords:(NSArray*)records callback:(void(^)(HttpRequest*, HttpResponse*))callback;
@end
| 170 |
678 |
/**
* This header is generated by class-dump-z 0.2b.
*
* Source: /System/Library/PrivateFrameworks/GeoServices.framework/GeoServices
*/
#import <GeoServices/XXUnknownSuperclass.h>
__attribute__((visibility("hidden")))
@interface GEOThrottleState : XXUnknownSuperclass {
unsigned _requestCount; // 4 = 0x4
double _lastResetTime; // 8 = 0x8
}
@property(assign, nonatomic) double lastResetTime; // G=0x1f505; S=0x1f51d; @synthesize=_lastResetTime
@property(assign, nonatomic) unsigned requestCount; // G=0x1f4e5; S=0x1f4f5; @synthesize=_requestCount
// declared property setter: - (void)setLastResetTime:(double)time; // 0x1f51d
// declared property getter: - (double)lastResetTime; // 0x1f505
// declared property setter: - (void)setRequestCount:(unsigned)count; // 0x1f4f5
// declared property getter: - (unsigned)requestCount; // 0x1f4e5
@end
| 316 |
406 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2007-2015 Broad Institute
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/*
* ExpressionDataset.java
*
* Created on October 18, 2007, 2:20 PM
*
* To change this template, choose Tools | Template Manager
* and open the template in the editor.
*/
package org.broad.igv.data.expression;
import org.broad.igv.Globals;
import org.broad.igv.data.Dataset;
import org.broad.igv.feature.genome.Genome;
import org.broad.igv.track.TrackProperties;
import org.broad.igv.track.TrackType;
import org.broad.igv.util.ParsingUtils;
import java.util.HashMap;
import java.util.Map;
/**
* @author jrobinso
*/
public class ExpressionDataset implements Dataset {
private String name;
private TrackType type = TrackType.GENE_EXPRESSION;
private Genome genome;
private String[] columnHeadings;
private boolean normalized = false;
private boolean logValues = false;
/**
* Map colum heading -> index for effecient reverse lookup
*/
private Map<String, Integer> headingIndexMap = new HashMap();
Map<String, int[]> startLocationMap = new HashMap();
Map<String, int[]> endLocationMap = new HashMap();
private Map<String, Integer> longestFeatureMap;
/**
* Map of chromosome -> array of data values
*/
Map<String, Map<String, float[]>> dataMap = new HashMap();
/**
* Map of chromosome -> array of feature names
*/
Map<String, String[]> featureNameMap = new HashMap();
private TrackProperties trackProperties = new TrackProperties();
/**
* Creates a new instance of ExpressionDataset
*/
public ExpressionDataset(Genome genome) {
this.genome = genome;
}
// Todo -- implement
public float getDataMin() {
return -3f;
}
public float getDataMax() {
return 3f;
}
public void setColumnHeadings(String[] columnHeadings) {
this.columnHeadings = columnHeadings;
for (int i = 0; i < columnHeadings.length; i++) {
headingIndexMap.put(columnHeadings[i], i);
}
}
public String[] getTrackNames() {
return columnHeadings;
}
public String getName() {
return name;
}
public void setType(TrackType type) {
this.type = type;
}
public TrackType getType() {
return type;
}
public boolean isEmpty() {
return startLocationMap.isEmpty();
}
public String[] getChromosomes() {
return startLocationMap.keySet().toArray(new String[0]);
}
public void setFeatureNames(String chr, String[] names) {
this.featureNameMap.put(chr, names);
}
public String[] getFeatureNames(String chr) {
return featureNameMap.get(chr);
}
public void setStartLocations(String chr, int[] startLocations) {
this.startLocationMap.put(chr, startLocations);
}
public int[] getStartLocations(String chr) {
return startLocationMap.get(chr);
}
public void setEndLocations(String chr, int[] endLocations) {
this.endLocationMap.put(chr, endLocations);
}
public int[] getEndLocations(String chr) {
if (chr.equals(Globals.CHR_ALL)) {
return null;
}
return endLocationMap.get(chr);
}
public boolean isLogNormalized() {
return normalized;
}
public void setData(String heading, String chr, float[] data) {
Map<String, float[]> tmp = dataMap.get(heading);
if (tmp == null) {
tmp = new HashMap();
dataMap.put(heading, tmp);
}
tmp.put(chr, data);
}
public float[] getData(String heading, String chr) {
Map<String, float[]> tmp = dataMap.get(heading);
if (tmp != null) {
return tmp.get(chr);
}
return null;
}
public void setName(String name) {
this.name = name;
}
public boolean isLogValues() {
return logValues;
}
public void setLogValues(boolean logValues) {
this.logValues = logValues;
}
public void setNormalized(boolean normalized) {
this.normalized = normalized;
}
public TrackProperties getTrackProperties() {
return trackProperties;
}
public void setTrackLine(String trackLine) {
ParsingUtils.parseTrackLine(trackLine, trackProperties);
}
public Integer getLongestFeature(String chr) {
return longestFeatureMap == null ? 1000 :
longestFeatureMap.containsKey(chr) ? longestFeatureMap.get(chr) : 1;
}
public void setLongestFeatureMap(Map<String, Integer> longestFeatureMap) {
this.longestFeatureMap = longestFeatureMap;
}
}
| 2,109 |
2,151 |
<filename>app/src/main/java/org/chromium/shape_detection/InterfaceRegistrar.java<gh_stars>1000+
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.shape_detection;
import org.chromium.base.annotations.CalledByNative;
import org.chromium.base.annotations.JNINamespace;
import org.chromium.mojo.system.impl.CoreImpl;
import org.chromium.services.service_manager.InterfaceRegistry;
import org.chromium.shape_detection.mojom.BarcodeDetection;
import org.chromium.shape_detection.mojom.FaceDetectionProvider;
import org.chromium.shape_detection.mojom.TextDetection;
@JNINamespace("shape_detection")
class InterfaceRegistrar {
@CalledByNative
static void createInterfaceRegistryForContext(int nativeHandle) {
// Note: The bindings code manages the lifetime of this object, so it
// is not necessary to hold on to a reference to it explicitly.
InterfaceRegistry registry = InterfaceRegistry.create(
CoreImpl.getInstance().acquireNativeHandle(nativeHandle).toMessagePipeHandle());
registry.addInterface(BarcodeDetection.MANAGER, new BarcodeDetectionImpl.Factory());
registry.addInterface(
FaceDetectionProvider.MANAGER, new FaceDetectionProviderImpl.Factory());
registry.addInterface(TextDetection.MANAGER, new TextDetectionImpl.Factory());
}
}
| 485 |
421 |
<reponame>hamarb123/dotnet-api-docs
// <snippet1>
using namespace System;
using namespace System::IO;
int main()
{
// Open an existing file, or create a new one.
FileInfo^ fi = gcnew FileInfo( "temp.txt" );
// Determine the full path of the file just created.
DirectoryInfo^ di = fi->Directory;
// Figure out what other entries are in that directory.
array<FileSystemInfo^>^fsi = di->GetFileSystemInfos();
Console::WriteLine( "The directory '{0}' contains the following files and directories:", di->FullName );
// Print the names of all the files and subdirectories of that directory.
Collections::IEnumerator^ myEnum = fsi->GetEnumerator();
while ( myEnum->MoveNext() )
{
FileSystemInfo^ info = safe_cast<FileSystemInfo^>(myEnum->Current);
Console::WriteLine( info->Name );
}
}
//This code produces output similar to the following;
//results may vary based on the computer/file structure/etc.:
//
//The directory 'C:\Visual Studio 2005\release' contains the following files
//and directories:
//fileinfodirectory.exe
//fileinfodirectory.pdb
//newTemp.txt
//
// </snippet1>
| 414 |
475 |
/*
* Copyright (C) 2020 The zfoo Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*/
package com.zfoo.util.math;
/**
* @author jaysunxiao
* @version 3.0
*/
public class Combinatorics {// |kɒmbinəˋtɒ:riks|n.组合学
//输出数组a的startIndex~endIndex(包括端点)的全排列
public void permutation(int[] a, int startIndex, int endIndex) {
if (startIndex == endIndex) {
for (int i = 0; i <= endIndex; i++) {
System.out.print(a[i]);
}
System.out.println();
}
for (int i = startIndex; i <= endIndex; i++) {
swap(a, i, startIndex);
permutation(a, startIndex + 1, endIndex);
swap(a, i, startIndex);
}
}
//输出数组a的全排列
public void permutation(int[] a) {
permutation(a, 0, a.length - 1);
}
//public void Combination(int[] a,int[] b,int)
//交换数组中的两个元素
public void swap(int[] a, int xIndex, int yIndex) {
int temp = a[xIndex];
a[xIndex] = a[yIndex];
a[yIndex] = temp;
}
}
| 685 |
679 |
/**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
// MARKER(update_precomp.py): autogen include statement, do not remove
#include "precompiled_sc.hxx"
// System - Includes -----------------------------------------------------
// INCLUDE ---------------------------------------------------------------
#include "hintwin.hxx"
#include "global.hxx"
#define HINT_LINESPACE 2
#define HINT_INDENT 3
#define HINT_MARGIN 4
//==================================================================
ScHintWindow::ScHintWindow( Window* pParent, const String& rTit, const String& rMsg ) :
Window( pParent, WinBits( WB_BORDER ) ),
aTitle( rTit ),
aMessage( rMsg )
{
aMessage.ConvertLineEnd( LINEEND_CR );
// Hellgelb, wie Notizen in detfunc.cxx
Color aYellow( 255,255,192 ); // hellgelb
SetBackground( aYellow );
aTextFont = GetFont();
aTextFont.SetTransparent( sal_True );
aTextFont.SetWeight( WEIGHT_NORMAL );
aHeadFont = aTextFont;
aHeadFont.SetWeight( WEIGHT_BOLD );
SetFont( aHeadFont );
Size aHeadSize( GetTextWidth( aTitle ), GetTextHeight() );
SetFont( aTextFont );
Size aTextSize;
xub_StrLen nIndex = 0;
while ( nIndex != STRING_NOTFOUND )
{
String aLine = aMessage.GetToken( 0, CHAR_CR, nIndex );
Size aLineSize( GetTextWidth( aLine ), GetTextHeight() );
nTextHeight = aLineSize.Height();
aTextSize.Height() += nTextHeight;
if ( aLineSize.Width() > aTextSize.Width() )
aTextSize.Width() = aLineSize.Width();
}
aTextSize.Width() += HINT_INDENT;
aTextStart = Point( HINT_MARGIN + HINT_INDENT,
aHeadSize.Height() + HINT_MARGIN + HINT_LINESPACE );
Size aWinSize( Max( aHeadSize.Width(), aTextSize.Width() ) + 2 * HINT_MARGIN + 1,
aHeadSize.Height() + aTextSize.Height() + HINT_LINESPACE + 2 * HINT_MARGIN + 1 );
SetOutputSizePixel( aWinSize );
}
ScHintWindow::~ScHintWindow()
{
}
void __EXPORT ScHintWindow::Paint( const Rectangle& /* rRect */ )
{
SetFont( aHeadFont );
DrawText( Point(HINT_MARGIN,HINT_MARGIN), aTitle );
SetFont( aTextFont );
xub_StrLen nIndex = 0;
Point aLineStart = aTextStart;
while ( nIndex != STRING_NOTFOUND )
{
String aLine = aMessage.GetToken( 0, CHAR_CR, nIndex );
DrawText( aLineStart, aLine );
aLineStart.Y() += nTextHeight;
}
}
| 1,029 |
1,362 |
from __future__ import absolute_import, print_function, division, unicode_literals
from xcessiv.presets.cvsetting import k_fold
extraction_default_source = """\"\"\"In this code block, you must define the function `extract_{}_dataset`.
`extract_{}_dataset` must take no arguments and return a tuple (X, y), where X is a
Numpy array with shape (n_samples, n_features) corresponding to the features of your
{} dataset and y is the Numpy array corresponding to the ground truth labels of each
sample.
\"\"\"
def extract_{}_dataset():
return [[1, 2], [2, 1]], [0, 1]
"""
meta_feature_generation_default_source = k_fold['source']
DEFAULT_EXTRACTION_MAIN_DATASET = {
"source": extraction_default_source.format('main', 'main', 'main', 'main')
}
DEFAULT_EXTRACTION_TEST_DATASET = {
"method": None,
"source": extraction_default_source.format('test', 'test', 'test', 'test')
}
DEFAULT_EXTRACTION_META_FEATURE_GENERATION = {
"source": meta_feature_generation_default_source
}
tpot_learner_docstring = '''"""
The following code is directly from the TPOT learner's `export` function.
You must modify it in order to conform to the Xcessiv format.
The only relevant lines are the lines required to define `exported_pipeline`.
You may remove all others and rename `exported_pipeline` to `base_learner`.
"""
'''
| 429 |
348 |
<reponame>chamberone/Leaflet.PixiOverlay
{"nom":"Versigny","circ":"1ère circonscription","dpt":"Aisne","inscrits":328,"abs":165,"votants":163,"blancs":11,"nuls":4,"exp":148,"res":[{"nuance":"FN","nom":"<NAME>","voix":75},{"nuance":"REM","nom":"<NAME>","voix":73}]}
| 107 |
5,169 |
<filename>Specs/b/b/1/trunk2/0.0.3/trunk2.podspec.json
{
"name": "trunk2",
"version": "0.0.3",
"summary": "开源高效处理圆角的扩展,包括UIImageView、UIView、UIButton、UIImage的扩展API,可根据图片颜色生成图片带任意圆角,可给UIButton根据不同状态处理图片",
"description": "支持生成带任意圆角的图片,支持背景色设置以解决图层混合问题;支持UIButton根据不同状态设置图片及背景图片;支持图片裁剪成任意大小及添加任意圆角且不会离屏渲染;支持根据颜色生成图片且可带任意圆角;支持给任意继承于UIView的控件添加圆角,通过mask来实现;更多功能请查看API。",
"homepage": "https://git.oschina.net/qyu",
"license": "MIT",
"authors": {
"huangyibiao": ""
},
"source": {
"git": "https://git.oschina.net/qyu/trunk2.git",
"tag": "0.0.3"
},
"source_files": [
"testView",
"*.{h,m}"
],
"requires_arc": true,
"platforms": {
"ios": "6.0"
}
}
| 645 |
8,107 |
"""Functionality related to displaying the profile report in Jupyter notebooks."""
import html
import uuid
from pathlib import Path
from typing import TYPE_CHECKING, Union
if TYPE_CHECKING:
from IPython.core.display import HTML
from IPython.lib.display import IFrame
from pandas_profiling import ProfileReport
from pandas_profiling.config import IframeAttribute, Settings
def get_notebook_iframe_srcdoc(config: Settings, profile: ProfileReport) -> "HTML":
"""Get the IPython HTML object with iframe with the srcdoc attribute
Args:
config: Settings
profile: The profile report object
Returns:
IPython HTML object.
"""
from IPython.core.display import HTML
width = config.notebook.iframe.width
height = config.notebook.iframe.height
src = html.escape(profile.to_html())
iframe = f'<iframe width="{width}" height="{height}" srcdoc="{src}" frameborder="0" allowfullscreen></iframe>'
return HTML(iframe)
def get_notebook_iframe_src(config: Settings, profile: ProfileReport) -> "IFrame":
"""Get the IPython IFrame object
Args:
config: Settings
profile: The profile report object
Returns:
IPython IFrame object.
"""
tmp_file = Path("./ipynb_tmp") / f"{uuid.uuid4().hex}.html"
tmp_file.parent.mkdir(exist_ok=True)
profile.to_file(tmp_file)
from IPython.lib.display import IFrame
return IFrame(
str(tmp_file),
width=config.notebook.iframe.width,
height=config.notebook.iframe.height,
)
def get_notebook_iframe(
config: Settings, profile: ProfileReport
) -> Union["IFrame", "HTML"]:
"""Display the profile report in an iframe in the Jupyter notebook
Args:
config: Settings
profile: The profile report object
Returns:
Displays the Iframe
"""
attribute = config.notebook.iframe.attribute
if attribute == IframeAttribute.src:
output = get_notebook_iframe_src(config, profile)
elif attribute == IframeAttribute.srcdoc:
output = get_notebook_iframe_srcdoc(config, profile)
else:
raise ValueError(
f'Iframe Attribute can be "src" or "srcdoc" (current: {attribute}).'
)
return output
| 816 |
530 |
/*
* Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* The contents of this file are subject to the terms of either the Universal Permissive License
* v 1.0 as shown at http://oss.oracle.com/licenses/upl
*
* or the following license:
*
* Redistribution and use in source and binary forms, with or without modification, are permitted
* provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of conditions
* and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of
* conditions and the following disclaimer in the documentation and/or other materials provided with
* the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used to
* endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
* WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.openjdk.jmc.console.uitest;
import java.text.DateFormat;
import java.text.ParseException;
import java.util.List;
import org.junit.Assert;
import org.junit.ClassRule;
import org.junit.Test;
import org.openjdk.jmc.test.jemmy.MCJemmyTestBase;
import org.openjdk.jmc.test.jemmy.MCUITestRule;
import org.openjdk.jmc.test.jemmy.misc.base.wrappers.MCJemmyBase;
import org.openjdk.jmc.test.jemmy.misc.wrappers.JmxConsole;
import org.openjdk.jmc.test.jemmy.misc.wrappers.MC;
import org.openjdk.jmc.test.jemmy.misc.wrappers.MCButton;
import org.openjdk.jmc.test.jemmy.misc.wrappers.MCHyperlink;
import org.openjdk.jmc.test.jemmy.misc.wrappers.MCLabel;
import org.openjdk.jmc.test.jemmy.misc.wrappers.MCTable;
/**
* Class for testing the JMX Console Threads page
*/
public class ThreadsTabTest extends MCJemmyTestBase {
private static final int TIMESTAMP_LABEL_MAX_SPAN_SECONDS = 3;
private static final int MIN_THREADS = 10;
private static final int MAX_THREADS = 150;
private static String DEAD_LOCK = "Deadlock Detection";
private static String REFRESH_LIVE_THREADS = "Refresh Live Threads";
private static String LIVE_THREADS = "Live Threads ";
private static String REFRESH_STACK_TRACE = "Refresh Stack Trace";
private static String STACK_TRACE = "Stack traces for selected threads ";
private static MCTable threadsTable;
@ClassRule
public static MCUITestRule classTestRule = new MCUITestRule(verboseRuleOutput) {
@Override
public void before() {
MC.jvmBrowser.connect();
JmxConsole.selectTab(JmxConsole.Tabs.THREADS);
threadsTable = MCTable.getByColumnHeader("Thread Name");
}
};
@Test
public void testAllocationOptionUnchecked() {
// ensure that the Allocation checkbox is unchecked
MCButton.getByLabel("Allocation").setState(false);
// read all cells in the column "Allocated Bytes"
List<String> columnData = threadsTable.getColumnItemTexts("Allocated Memory");
// verify that the number of cells is sane and that they only contain "Not Enabled"
assertSaneNumberOfRows(columnData);
for (String data : columnData) {
Assert.assertEquals("Not Enabled", data);
}
}
@Test
public void testAllocationOptionChecked() {
// ensure that the Allocation checkbox is checked
MCButton.getByLabel("Allocation").setState(true);
// read all cells in the column "Allocated Bytes"
List<String> columnData = threadsTable.getColumnItemTexts("Allocated Memory");
// verify that the cells contain data that can be parsed as Double
assertSaneNumberOfRows(columnData);
verifyDoubles(columnData);
}
@Test
public void testTimestampLabelsUpdating() {
MCButton.getByLabel(DEAD_LOCK).click();
MCHyperlink.getByTooltip(REFRESH_LIVE_THREADS).click();
verifyRefreshing(LIVE_THREADS, true);
MCHyperlink.getByTooltip(REFRESH_LIVE_THREADS).click();
verifyRefreshing(LIVE_THREADS, false);
// We need to select the table here so that the stack trace view is rendered.
// threadsTable.mouse().click(1, new Point(50, 50));
threadsTable.click();
MCJemmyBase.waitForIdle();
MCHyperlink.getByTooltip(REFRESH_STACK_TRACE).click();
verifyRefreshing(STACK_TRACE, true);
MCHyperlink.getByTooltip(REFRESH_STACK_TRACE).click();
verifyRefreshing(STACK_TRACE, false);
// make sure that the labels are equal (or at least close)
Assert.assertTrue("Time labels for " + LIVE_THREADS + "and " + STACK_TRACE + "should be equal",
fuzzyTimeLabelCompare(getRefreshTime(LIVE_THREADS), getRefreshTime(STACK_TRACE),
TIMESTAMP_LABEL_MAX_SPAN_SECONDS));
}
private void assertSaneNumberOfRows(List<String> columnData) {
int numOfThreads = columnData.size();
System.out.println("Threads. Current number of threads: " + numOfThreads);
Assert.assertFalse("There are less than " + MIN_THREADS + " rows in the threads list",
numOfThreads < MIN_THREADS);
Assert.assertFalse("There are more than " + MAX_THREADS + " rows in the threads list",
numOfThreads > MAX_THREADS);
}
/**
* This method takes an array of Strings and verifies that the strings can be transformed to
* Double. The string versions of the doubles comes from the UI and have spaces in them.
*/
private void verifyDoubles(List<String> values) {
StringBuilder sb = new StringBuilder();
String[] splitnumbers = null;
for (String s : values) {
splitnumbers = s.split("\\D");
for (String number : splitnumbers) {
sb.append(number);
}
Assert.assertNotNull("fail to make a digit of the string: " + sb.toString(), Double.valueOf(sb.toString()));
sb.delete(0, (sb.length() - 1));
}
}
private void verifyRefreshing(String label, boolean condition) {
MCJemmyBase.waitForIdle();
String a = getRefreshTime(label);
sleep(6000);
String b = getRefreshTime(label);
if (condition) {
Assert.assertTrue(buildAssertionMessageString(label, true, a, b), a.equals(b));
} else {
Assert.assertFalse(buildAssertionMessageString(label, false, a, b), a.equals(b));
}
}
private String buildAssertionMessageString(String label, boolean eqaulity, String first, String second) {
return "Time stamp for label \"" + label.trim() + "\" should" + ((eqaulity == true) ? "" : " not")
+ " be equal between samples (but was " + first + " and " + second + ")";
}
private boolean fuzzyTimeLabelCompare(String first, String second, int maxSpanSeconds) {
DateFormat df = DateFormat.getTimeInstance(DateFormat.MEDIUM);
boolean fuzzyMatch = true;
try {
if (Math.abs(df.parse(first).getTime() - df.parse(second).getTime()) > maxSpanSeconds * 1000) {
fuzzyMatch = false;
}
} catch (ParseException e) {
e.printStackTrace();
}
return fuzzyMatch;
}
private String getRefreshTime(String label) {
MCJemmyBase.waitForIdle();
return MCLabel.getByLabelSubstring(label).getText().split(label)[1];
}
}
| 2,574 |
405 |
<filename>src/main/java/org/jeecgframework/web/demo/service/impl/test/JeecgNoteServiceImpl.java
package org.jeecgframework.web.demo.service.impl.test;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.jeecgframework.web.demo.service.test.JeecgNoteServiceI;
import org.jeecgframework.core.common.service.impl.CommonServiceImpl;
@Service("jeecgNoteService")
@Transactional
public class JeecgNoteServiceImpl extends CommonServiceImpl implements JeecgNoteServiceI {
}
| 174 |
348 |
{"nom":"Sainte-Luce-sur-Loire","circ":"5ème circonscription","dpt":"Loire-Atlantique","inscrits":10640,"abs":4442,"votants":6198,"blancs":74,"nuls":32,"exp":6092,"res":[{"nuance":"MDM","nom":"Mme <NAME>","voix":2671},{"nuance":"FI","nom":"Mme <NAME>","voix":878},{"nuance":"SOC","nom":"M. <NAME>","voix":775},{"nuance":"UDI","nom":"M. <NAME>","voix":755},{"nuance":"FN","nom":"M. <NAME>","voix":369},{"nuance":"ECO","nom":"<NAME>","voix":273},{"nuance":"DLF","nom":"Mme <NAME>","voix":79},{"nuance":"REG","nom":"M. Pierre-<NAME>","voix":60},{"nuance":"COM","nom":"M. <NAME>","voix":55},{"nuance":"EXG","nom":"M. <NAME>","voix":46},{"nuance":"DIV","nom":"M. <NAME>","voix":37},{"nuance":"DVG","nom":"Mme <NAME>","voix":36},{"nuance":"REG","nom":"<NAME>","voix":30},{"nuance":"DVD","nom":"M. <NAME>","voix":28}]}
| 331 |
7,220 |
<gh_stars>1000+
# coding=utf-8
# Copyright 2021 The Trax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests different ways to use the public tf-numpy module."""
import numpy as onp
import tensorflow as tf
import tensorflow.experimental.numpy as np1
from tensorflow.experimental import numpy as np2 # pylint: disable=reimported
np3 = tf.experimental.numpy
class PublicSymbolTest(tf.test.TestCase):
def testSimple(self):
a = 0.1
b = 0.2
for op in [np1.add, np2.add, np3.add]:
self.assertAllClose(onp.add(a, b), op(a, b))
if __name__ == "__main__":
tf.compat.v1.enable_eager_execution()
tf.test.main()
| 379 |
617 |
<reponame>RamboWu/pokerstove<gh_stars>100-1000
/**
* Copyright (c) 2012 <NAME>. All rights reserved.
* $Id$
*/
#ifndef PENUM_SIMPLE_DECK_H_
#define PENUM_SIMPLE_DECK_H_
#include <algorithm>
#include <array>
#include <functional>
#include <pokerstove/peval/Card.h>
#include <pokerstove/peval/CardSet.h>
#include <pokerstove/peval/Rank.h> // needed for NUM_RANK
#include <pokerstove/peval/Suit.h> // needed for NUM_SUIT
#include <pokerstove/util/lastbit.h>
#include <string>
namespace pokerstove
{
/**
* used for removing cards from the deck
*/
struct isLive : public std::binary_function<pokerstove::CardSet,
pokerstove::CardSet,
bool>
{
bool operator()(const CardSet& c, const CardSet& dead) const
{
return !dead.contains(c);
}
};
/**
* A very simple deck of the cards.
*/
class SimpleDeck
{
public:
/**
* construct a deck that is in-order
*/
SimpleDeck()
{
for (uint8_t i = 0; i < STANDARD_DECK_SIZE; i++)
{
_deck[i] = CardSet(Card(i));
}
reset();
}
/**
* put all dealt cards back into deck, don't reorder
*/
void reset() { _current = STANDARD_DECK_SIZE; }
/**
* number of cards left in the deck
*/
size_t size() const { return _current; }
/**
* print cards in deck with un/dealt divider
*/
std::string str() const
{
std::string ret;
for (uint i = 0; i < STANDARD_DECK_SIZE; i++)
{
if (i == _current)
ret += "/";
ret = ret + _deck[i].str();
}
if (_current == STANDARD_DECK_SIZE)
ret += "/";
return ret;
}
pokerstove::CardSet deal(size_t ncards)
{
// TODO: fix and test this code, edge cases clearly at risk here
if (ncards == 0)
return pokerstove::CardSet();
_current -= static_cast<uint>(ncards);
CardSet* pcur = &_deck[_current];
const CardSet* pend = pcur + ncards;
CardSet cards(*pcur++);
while (pcur < pend)
cards |= *pcur++;
return cards;
}
pokerstove::CardSet dead() const
{
pokerstove::CardSet cs;
for (size_t i = _current; i < STANDARD_DECK_SIZE; i++)
cs.insert(_deck[i]);
return cs;
}
/**
* Move all cards which are not live to the end of the deck
*/
void remove(const pokerstove::CardSet& cards)
{
int decr = CardSet(cards | dead()).size();
stable_partition(_deck.begin(), _deck.end(), bind2nd(isLive(), cards));
_current = STANDARD_DECK_SIZE - decr;
}
/**
* look at ith card from the top of the deck
*/
CardSet operator[](size_t i) const { return _deck[i]; }
void shuffle()
{
std::random_shuffle(_deck.begin(), _deck.end());
reset(); //_current = 0;
}
/**
* peek at the set of cards defined by the mask
*/
CardSet peek(uint64_t mask) const
{
#ifdef WIN32
// disable the unary negation of unsigned int
#pragma warning(disable : 4146)
#endif
CardSet ret;
uint32_t lower = static_cast<uint32_t>(mask & UINT32_C(0xFFFFFFFF));
uint32_t upper =
static_cast<uint32_t>((mask & UINT64_C(0xFFFFFFFF00000000)) >> 32);
while (lower)
{
ret |= _deck[lastbit(lower)];
lower ^= (lower & -lower);
}
const CardSet* top = &_deck[32];
while (upper)
{
ret |= top[lastbit(upper)];
upper ^= (upper & -upper);
}
#ifdef WIN32
// set back to default
#pragma warning(default : 4146)
#endif
return ret;
}
private:
// these are the data which track info about the deck
std::array<CardSet, STANDARD_DECK_SIZE> _deck;
size_t _current;
};
} // namespace pokerstove
#endif // PENUM_SIMPLE_DECK_H_
| 1,925 |
797 |
<filename>src/main/com/intellij/lang/jsgraphql/types/schema/idl/SchemaTypeExtensionsChecker.java
/*
The MIT License (MIT)
Copyright (c) 2015 <NAME> and Contributors
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
(the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package com.intellij.lang.jsgraphql.types.schema.idl;
import com.intellij.lang.jsgraphql.types.GraphQLError;
import com.intellij.lang.jsgraphql.types.Internal;
import com.intellij.lang.jsgraphql.types.language.*;
import com.intellij.lang.jsgraphql.types.schema.idl.errors.*;
import com.intellij.lang.jsgraphql.types.util.FpKit;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import static com.intellij.lang.jsgraphql.types.DirectivesUtil.nonRepeatableDirectivesOnly;
import static com.intellij.lang.jsgraphql.types.schema.idl.SchemaTypeChecker.checkNamedUniqueness;
import static com.intellij.lang.jsgraphql.types.util.FpKit.mergeFirst;
/**
* A support class to help break up the large SchemaTypeChecker class. This handles
* the checking of "type extensions"
*/
@Internal
class SchemaTypeExtensionsChecker {
void checkTypeExtensions(List<GraphQLError> errors, TypeDefinitionRegistry typeRegistry) {
Map<String, DirectiveDefinition> directiveDefinitionMap = typeRegistry.getDirectiveDefinitions();
checkObjectTypeExtensions(errors, typeRegistry, directiveDefinitionMap);
checkInterfaceTypeExtensions(errors, typeRegistry, directiveDefinitionMap);
checkUnionTypeExtensions(errors, typeRegistry, directiveDefinitionMap);
checkEnumTypeExtensions(errors, typeRegistry, directiveDefinitionMap);
checkScalarTypeExtensions(errors, typeRegistry, directiveDefinitionMap);
checkInputObjectTypeExtensions(errors, typeRegistry, directiveDefinitionMap);
}
/*
* Object type extensions have the potential to be invalid if incorrectly defined.
*
* The named type must already be defined and must be an Object type.
* The fields of an Object type extension must have unique names; no two fields may share the same name.
* Any fields of an Object type extension must not be already defined on the original Object type.
* Any directives provided must not already apply to the original Object type.
* Any interfaces provided must not be already implemented by the original Object type.
* The resulting extended object type must be a super-set of all interfaces it implements.
*/
private void checkObjectTypeExtensions(List<GraphQLError> errors, TypeDefinitionRegistry typeRegistry,Map<String, DirectiveDefinition> directiveDefinitionMap) {
typeRegistry.objectTypeExtensions()
.forEach((name, extensions) -> {
checkTypeExtensionHasCorrespondingType(errors, typeRegistry, name, extensions, ObjectTypeDefinition.class);
checkTypeExtensionDirectiveRedefinition(errors, typeRegistry, name, extensions, ObjectTypeDefinition.class, directiveDefinitionMap);
extensions.forEach(extension -> {
List<FieldDefinition> fieldDefinitions = extension.getFieldDefinitions();
// field unique ness
checkNamedUniqueness(errors, extension.getFieldDefinitions(), FieldDefinition::getName,
(namedField, fieldDef) -> new NonUniqueNameError(extension, fieldDef));
// field arg unique ness
extension.getFieldDefinitions().forEach(fld -> checkNamedUniqueness(errors, fld.getInputValueDefinitions(), InputValueDefinition::getName,
(namedField, inputValueDefinition) -> new NonUniqueArgumentError(extension, fld, namedField)));
// directive checks
extension.getFieldDefinitions().forEach(fld -> checkNamedUniqueness(errors, nonRepeatableDirectivesOnly(directiveDefinitionMap, fld.getDirectives()), Directive::getName,
(directiveName, directive) -> new NonUniqueDirectiveError(extension, fld, directiveName)));
fieldDefinitions.forEach(fld -> fld.getDirectives().forEach(directive ->
checkNamedUniqueness(errors, directive.getArguments(), Argument::getName,
(argumentName, argument) -> new NonUniqueArgumentError(extension, fld, argumentName))));
//
// fields must be unique within a type extension
forEachBut(extension, extensions,
otherTypeExt -> checkForFieldRedefinition(errors, otherTypeExt, otherTypeExt.getFieldDefinitions(), fieldDefinitions));
//
// then check for field re-defs from the base type
Optional<ObjectTypeDefinition> baseTypeOpt = typeRegistry.getType(extension.getName(), ObjectTypeDefinition.class);
baseTypeOpt.ifPresent(baseTypeDef -> checkForFieldRedefinition(errors, extension, fieldDefinitions, baseTypeDef.getFieldDefinitions()));
});
}
);
}
/*
* Interface type extensions have the potential to be invalid if incorrectly defined.
*
* The named type must already be defined and must be an Interface type.
* The fields of an Interface type extension must have unique names; no two fields may share the same name.
* Any fields of an Interface type extension must not be already defined on the original Interface type.
* Any Object type which implemented the original Interface type must also be a super-set of the fields of the Interface type extension (which may be due to Object type extension).
* Any directives provided must not already apply to the original Interface type.
*/
private void checkInterfaceTypeExtensions(List<GraphQLError> errors, TypeDefinitionRegistry typeRegistry,Map<String, DirectiveDefinition> directiveDefinitionMap) {
typeRegistry.interfaceTypeExtensions()
.forEach((name, extensions) -> {
checkTypeExtensionHasCorrespondingType(errors, typeRegistry, name, extensions, InterfaceTypeDefinition.class);
checkTypeExtensionDirectiveRedefinition(errors, typeRegistry, name, extensions, InterfaceTypeDefinition.class, directiveDefinitionMap);
extensions.forEach(extension -> {
List<FieldDefinition> fieldDefinitions = extension.getFieldDefinitions();
// field unique ness
checkNamedUniqueness(errors, extension.getFieldDefinitions(), FieldDefinition::getName,
(namedField, fieldDef) -> new NonUniqueNameError(extension, fieldDef));
// field arg unique ness
extension.getFieldDefinitions().forEach(fld -> checkNamedUniqueness(errors, fld.getInputValueDefinitions(), InputValueDefinition::getName,
(namedField, inputValueDefinition) -> new NonUniqueArgumentError(extension, fld, namedField)));
// directive checks
extension.getFieldDefinitions().forEach(fld -> checkNamedUniqueness(errors, nonRepeatableDirectivesOnly(directiveDefinitionMap,fld.getDirectives()), Directive::getName,
(directiveName, directive) -> new NonUniqueDirectiveError(extension, fld, directiveName)));
fieldDefinitions.forEach(fld -> fld.getDirectives().forEach(directive ->
checkNamedUniqueness(errors, directive.getArguments(), Argument::getName,
(argumentName, argument) -> new NonUniqueArgumentError(extension, fld, argumentName))));
//
// fields must be unique within a type extension
forEachBut(extension, extensions,
otherTypeExt -> checkForFieldRedefinition(errors, otherTypeExt, otherTypeExt.getFieldDefinitions(), fieldDefinitions));
//
// then check for field re-defs from the base type
Optional<InterfaceTypeDefinition> baseTypeOpt = typeRegistry.getType(extension.getName(), InterfaceTypeDefinition.class);
baseTypeOpt.ifPresent(baseTypeDef -> checkForFieldRedefinition(errors, extension, fieldDefinitions, baseTypeDef.getFieldDefinitions()));
});
});
}
/*
* Union type extensions have the potential to be invalid if incorrectly defined.
*
* The named type must already be defined and must be a Union type.
* The member types of a Union type extension must all be Object base types; Scalar, Interface and Union types must not be member types of a Union. Similarly, wrapping types must not be member types of a Union.
* All member types of a Union type extension must be unique.
* All member types of a Union type extension must not already be a member of the original Union type.
* Any directives provided must not already apply to the original Union type.
*/
private void checkUnionTypeExtensions(List<GraphQLError> errors, TypeDefinitionRegistry typeRegistry, Map<String, DirectiveDefinition> directiveDefinitionMap) {
typeRegistry.unionTypeExtensions()
.forEach((name, extensions) -> {
checkTypeExtensionHasCorrespondingType(errors, typeRegistry, name, extensions, UnionTypeDefinition.class);
checkTypeExtensionDirectiveRedefinition(errors, typeRegistry, name, extensions, UnionTypeDefinition.class, directiveDefinitionMap);
extensions.forEach(extension -> {
List<TypeName> memberTypes = extension.getMemberTypes().stream()
.map(t -> TypeInfo.typeInfo(t).getTypeName()).collect(Collectors.toList());
checkNamedUniqueness(errors, memberTypes, TypeName::getName,
(namedMember, memberType) -> new NonUniqueNameError(extension, namedMember));
memberTypes.forEach(
memberType -> {
Optional<ObjectTypeDefinition> unionTypeDefinition = typeRegistry.getType(memberType, ObjectTypeDefinition.class);
if (!unionTypeDefinition.isPresent()) {
errors.add(new MissingTypeError("union member", extension, memberType));
}
}
);
});
});
}
/*
* Enum type extensions have the potential to be invalid if incorrectly defined.
*
* The named type must already be defined and must be an Enum type.
* All values of an Enum type extension must be unique.
* All values of an Enum type extension must not already be a value of the original Enum.
* Any directives provided must not already apply to the original Enum type.
*/
private void checkEnumTypeExtensions(List<GraphQLError> errors, TypeDefinitionRegistry typeRegistry, Map<String, DirectiveDefinition> directiveDefinitionMap) {
typeRegistry.enumTypeExtensions()
.forEach((name, extensions) -> {
checkTypeExtensionHasCorrespondingType(errors, typeRegistry, name, extensions, EnumTypeDefinition.class);
checkTypeExtensionDirectiveRedefinition(errors, typeRegistry, name, extensions, EnumTypeDefinition.class, directiveDefinitionMap);
extensions.forEach(extension -> {
// field unique ness
List<EnumValueDefinition> enumValueDefinitions = extension.getEnumValueDefinitions();
checkNamedUniqueness(errors, enumValueDefinitions, EnumValueDefinition::getName,
(namedField, enumValue) -> new NonUniqueNameError(extension, enumValue));
//
// enum values must be unique within a type extension
forEachBut(extension, extensions,
otherTypeExt -> checkForEnumValueRedefinition(errors, otherTypeExt, otherTypeExt.getEnumValueDefinitions(), enumValueDefinitions));
//
// then check for field re-defs from the base type
Optional<EnumTypeDefinition> baseTypeOpt = typeRegistry.getType(extension.getName(), EnumTypeDefinition.class);
baseTypeOpt.ifPresent(baseTypeDef -> checkForEnumValueRedefinition(errors, extension, enumValueDefinitions, baseTypeDef.getEnumValueDefinitions()));
});
});
}
/*
* Scalar type extensions have the potential to be invalid if incorrectly defined.
*
* The named type must already be defined and must be a Scalar type.
* Any directives provided must not already apply to the original Scalar type.
*/
private void checkScalarTypeExtensions(List<GraphQLError> errors, TypeDefinitionRegistry typeRegistry, Map<String, DirectiveDefinition> directiveDefinitionMap) {
typeRegistry.scalarTypeExtensions()
.forEach((name, extensions) -> {
checkTypeExtensionHasCorrespondingType(errors, typeRegistry, name, extensions, ScalarTypeDefinition.class);
checkTypeExtensionDirectiveRedefinition(errors, typeRegistry, name, extensions, ScalarTypeDefinition.class, directiveDefinitionMap);
});
}
/*
* Input object type extensions have the potential to be invalid if incorrectly defined.
*
* The named type must already be defined and must be a Input Object type.
* All fields of an Input Object type extension must have unique names.
* All fields of an Input Object type extension must not already be a field of the original Input Object.
* Any directives provided must not already apply to the original Input Object type.
*/
private void checkInputObjectTypeExtensions(List<GraphQLError> errors, TypeDefinitionRegistry typeRegistry,Map<String, DirectiveDefinition> directiveDefinitionMap) {
typeRegistry.inputObjectTypeExtensions()
.forEach((name, extensions) -> {
checkTypeExtensionHasCorrespondingType(errors, typeRegistry, name, extensions, InputObjectTypeDefinition.class);
checkTypeExtensionDirectiveRedefinition(errors, typeRegistry, name, extensions, InputObjectTypeDefinition.class, directiveDefinitionMap);
// field redefinitions
extensions.forEach(extension -> {
List<InputValueDefinition> inputValueDefinitions = extension.getInputValueDefinitions();
// field unique ness
checkNamedUniqueness(errors, inputValueDefinitions, InputValueDefinition::getName,
(namedField, fieldDef) -> new NonUniqueNameError(extension, fieldDef));
// directive checks
inputValueDefinitions.forEach(fld -> checkNamedUniqueness(errors, nonRepeatableDirectivesOnly(directiveDefinitionMap, fld.getDirectives()), Directive::getName,
(directiveName, directive) -> new NonUniqueDirectiveError(extension, fld, directiveName)));
inputValueDefinitions.forEach(fld -> fld.getDirectives().forEach(directive ->
checkNamedUniqueness(errors, directive.getArguments(), Argument::getName,
(argumentName, argument) -> new NonUniqueArgumentError(extension, fld, argumentName))));
//
// fields must be unique within a type extension
forEachBut(extension, extensions,
otherTypeExt -> checkForInputValueRedefinition(errors, otherTypeExt, otherTypeExt.getInputValueDefinitions(), inputValueDefinitions));
//
// then check for field re-defs from the base type
Optional<InputObjectTypeDefinition> baseTypeOpt = typeRegistry.getType(extension.getName(), InputObjectTypeDefinition.class);
baseTypeOpt.ifPresent(baseTypeDef -> checkForInputValueRedefinition(errors, extension, inputValueDefinitions, baseTypeDef.getInputValueDefinitions()));
});
});
}
private void checkTypeExtensionHasCorrespondingType(List<GraphQLError> errors, TypeDefinitionRegistry typeRegistry, String name, List<? extends TypeDefinition> extTypeList, Class<? extends TypeDefinition> targetClass) {
TypeDefinition extensionDefinition = extTypeList.get(0);
Optional<? extends TypeDefinition> typeDefinition = typeRegistry.getType(TypeName.newTypeName().name(name).build(), targetClass);
if (!typeDefinition.isPresent()) {
errors.add(new TypeExtensionMissingBaseTypeError(extensionDefinition));
}
}
@SuppressWarnings("unchecked")
private void checkTypeExtensionDirectiveRedefinition(List<GraphQLError> errors, TypeDefinitionRegistry typeRegistry, String name, List<? extends TypeDefinition> extensions, Class<? extends TypeDefinition> targetClass, Map<String, DirectiveDefinition> directiveDefinitionMap) {
Optional<? extends TypeDefinition> typeDefinition = typeRegistry.getType(TypeName.newTypeName().name(name).build(), targetClass);
if (typeDefinition.isPresent() && typeDefinition.get().getClass().equals(targetClass)) {
List<Directive> directives = typeDefinition.get().getDirectives();
Map<String, Directive> directiveMap = FpKit.getByName(directives, Directive::getName, mergeFirst());
extensions.forEach(typeExt -> {
List<Directive> extDirectives = nonRepeatableDirectivesOnly(directiveDefinitionMap, typeExt.getDirectives());
extDirectives.forEach(directive -> {
if (directiveMap.containsKey(directive.getName())) {
errors.add(new TypeExtensionDirectiveRedefinitionError(typeExt, directive));
}
});
}
);
}
}
private void checkForFieldRedefinition(List<GraphQLError> errors, TypeDefinition typeDefinition, List<FieldDefinition> fieldDefinitions, List<FieldDefinition> referenceFieldDefinitions) {
Map<String, FieldDefinition> referenceMap = FpKit.getByName(referenceFieldDefinitions, FieldDefinition::getName, mergeFirst());
fieldDefinitions.forEach(fld -> {
if (referenceMap.containsKey(fld.getName())) {
FieldDefinition redefinedField = referenceMap.get(fld.getName());
errors.add(new TypeExtensionFieldRedefinitionError(typeDefinition, fld, redefinedField));
}
});
}
private void checkForInputValueRedefinition(List<GraphQLError> errors, InputObjectTypeExtensionDefinition typeDefinition, List<InputValueDefinition> inputValueDefinitions, List<InputValueDefinition> referenceInputValues) {
Map<String, InputValueDefinition> referenceMap = FpKit.getByName(referenceInputValues, InputValueDefinition::getName, mergeFirst());
inputValueDefinitions.forEach(fld -> {
if (referenceMap.containsKey(fld.getName())) {
InputValueDefinition redefinedField = referenceMap.get(fld.getName());
errors.add(new TypeExtensionFieldRedefinitionError(typeDefinition, fld, redefinedField));
}
});
}
private void checkForEnumValueRedefinition(List<GraphQLError> errors, TypeDefinition typeDefinition, List<EnumValueDefinition> enumValueDefinitions, List<EnumValueDefinition> referenceEnumValueDefinitions) {
Map<String, EnumValueDefinition> referenceMap = FpKit.getByName(referenceEnumValueDefinitions, EnumValueDefinition::getName, mergeFirst());
enumValueDefinitions.forEach(fld -> {
if (referenceMap.containsKey(fld.getName())) {
EnumValueDefinition redefinedValue = referenceMap.get(fld.getName());
errors.add(new TypeExtensionEnumValueRedefinitionError(typeDefinition, fld, redefinedValue));
}
});
}
private <T> void forEachBut(T butThisOne, List<T> list, Consumer<T> consumer) {
for (T t : list) {
if (t == butThisOne) {
continue;
}
consumer.accept(t);
}
}
}
| 8,767 |
1,006 |
/****************************************************************************
* libs/libnx/nxtk/nxtk_fillcircletoolbar.c
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership. The
* ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
****************************************************************************/
/****************************************************************************
* Included Files
****************************************************************************/
#include <nuttx/config.h>
#include <sys/types.h>
#include <debug.h>
#include <errno.h>
#include <nuttx/nx/nxglib.h>
#include <nuttx/nx/nxtk.h>
/****************************************************************************
* Pre-processor Definitions
****************************************************************************/
#define NCIRCLE_TRAPS 8
/****************************************************************************
* Public Functions
****************************************************************************/
/****************************************************************************
* Name: nxtk_fillcircletoolbar
*
* Description:
* Fill a circular region using the specified color.
*
* Input Parameters:
* hfwnd - The window handle returned by nxtk_openwindow()
* center - A pointer to the point that is the center of the circle
* radius - The radius of the circle in pixels.
* color - The color to use to fill the circle.
*
* Returned Value:
* OK on success; ERROR on failure with errno set appropriately
*
****************************************************************************/
int nxtk_fillcircletoolbar(NXWINDOW hfwnd,
FAR const struct nxgl_point_s *center,
nxgl_coord_t radius,
nxgl_mxpixel_t color[CONFIG_NX_NPLANES])
{
FAR struct nxgl_trapezoid_s traps[NCIRCLE_TRAPS];
int i;
int ret;
/* Describe the circular region as a sequence of 8 trapezoids */
nxgl_circletraps(center, radius, traps);
/* Then rend those trapezoids */
for (i = 0; i < NCIRCLE_TRAPS; i++)
{
ret = nxtk_filltraptoolbar(hfwnd, &traps[i], color);
if (ret != OK)
{
return ret;
}
}
return OK;
}
| 834 |
333 |
{
"name": "react-swipe-views-bower-brunch-example",
"version": "0.0.12",
"description": "An implementation example of react-swipe-views built with Bower and Brunch",
"main": "app/scripts/main.js",
"scripts": {
"install": "bower install",
"start": "npm install && brunch watch",
"build": "brunch build"
},
"devDependencies": {
"babel-brunch": "^4.0.0",
"bower": "^1.5.3",
"browser-sync-brunch": "0.0.9",
"brunch": "^1.7.20",
"css-brunch": "^1.7.0",
"javascript-brunch": "^1.7.1"
}
}
| 241 |
13,846 |
<gh_stars>1000+
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license.
//-------------------------------------------------------------------------------------------------
// <summary>
// Windows Installer XML CustomAction utility library CaScript functions
// </summary>
//-------------------------------------------------------------------------------------------------
#include "precomp.h"
static HRESULT CaScriptFileName(
__in WCA_ACTION action,
__in WCA_CASCRIPT script,
__in BOOL fImpersonated,
__in LPCWSTR wzScriptKey,
__out LPWSTR* pwzScriptName
);
/********************************************************************
WcaCaScriptCreateKey() - creates a unique script key for this
CustomAction.
********************************************************************/
extern "C" HRESULT WIXAPI WcaCaScriptCreateKey(
__out LPWSTR* ppwzScriptKey
)
{
AssertSz(WcaIsInitialized(), "WcaInitialize() should have been called before calling this function.");
HRESULT hr = S_OK;
hr = StrAllocStringAnsi(ppwzScriptKey, WcaGetLogName(), 0, CP_ACP);
ExitOnFailure(hr, "Failed to create script key.");
LExit:
return hr;
}
/********************************************************************
WcaCaScriptCreate() - creates the appropriate script for this
CustomAction Script Key.
********************************************************************/
extern "C" HRESULT WIXAPI WcaCaScriptCreate(
__in WCA_ACTION action,
__in WCA_CASCRIPT script,
__in BOOL fImpersonated,
__in LPCWSTR wzScriptKey,
__in BOOL fAppend,
__in WCA_CASCRIPT_HANDLE* phScript
)
{
HRESULT hr = S_OK;
LPWSTR pwzScriptPath = NULL;
HANDLE hScriptFile = INVALID_HANDLE_VALUE;
hr = CaScriptFileName(action, script, fImpersonated, wzScriptKey, &pwzScriptPath);
ExitOnFailure(hr, "Failed to calculate script file name.");
hScriptFile = ::CreateFileW(pwzScriptPath, GENERIC_WRITE, FILE_SHARE_READ, NULL, fAppend ? OPEN_ALWAYS : CREATE_ALWAYS, FILE_ATTRIBUTE_NORMAL | FILE_FLAG_SEQUENTIAL_SCAN, NULL);
if (INVALID_HANDLE_VALUE == hScriptFile)
{
ExitWithLastError1(hr, "Failed to open CaScript: %S", pwzScriptPath);
}
if (fAppend && INVALID_SET_FILE_POINTER == ::SetFilePointer(hScriptFile, 0, NULL, FILE_END))
{
ExitWithLastError(hr, "Failed to seek to end of file.");
}
*phScript = reinterpret_cast<WCA_CASCRIPT_HANDLE>(MemAlloc(sizeof(WCA_CASCRIPT_STRUCT), TRUE));
ExitOnNull(*phScript, hr, E_OUTOFMEMORY, "Failed to allocate space for cascript handle.");
(*phScript)->pwzScriptPath = pwzScriptPath;
pwzScriptPath = NULL;
(*phScript)->hScriptFile = hScriptFile;
hScriptFile = INVALID_HANDLE_VALUE;
LExit:
if (INVALID_HANDLE_VALUE != hScriptFile)
{
::CloseHandle(hScriptFile);
}
ReleaseStr(pwzScriptPath);
return hr;
}
/********************************************************************
WcaCaScriptOpen() - opens the appropriate script for this CustomAction
Script Key.
********************************************************************/
extern "C" HRESULT WIXAPI WcaCaScriptOpen(
__in WCA_ACTION action,
__in WCA_CASCRIPT script,
__in BOOL fImpersonated,
__in LPCWSTR wzScriptKey,
__in WCA_CASCRIPT_HANDLE* phScript
)
{
HRESULT hr = S_OK;
LPWSTR pwzScriptPath = NULL;
HANDLE hScriptFile = INVALID_HANDLE_VALUE;
hr = CaScriptFileName(action, script, fImpersonated, wzScriptKey, &pwzScriptPath);
ExitOnFailure(hr, "Failed to calculate script file name.");
hScriptFile = ::CreateFileW(pwzScriptPath, GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL | FILE_FLAG_SEQUENTIAL_SCAN, NULL);
if (INVALID_HANDLE_VALUE == hScriptFile)
{
ExitWithLastError1(hr, "Failed to open CaScript: %S", pwzScriptPath);
}
*phScript = reinterpret_cast<WCA_CASCRIPT_HANDLE>(MemAlloc(sizeof(WCA_CASCRIPT_STRUCT), TRUE));
ExitOnNull(*phScript, hr, E_OUTOFMEMORY, "Failed to allocate space for cascript handle.");
(*phScript)->pwzScriptPath = pwzScriptPath;
pwzScriptPath = NULL;
(*phScript)->hScriptFile = hScriptFile;
hScriptFile = INVALID_HANDLE_VALUE;
LExit:
if (INVALID_HANDLE_VALUE != hScriptFile)
{
::CloseHandle(hScriptFile);
}
ReleaseStr(pwzScriptPath);
return hr;
}
/********************************************************************
WcaCaScriptClose() - closes an open script handle.
********************************************************************/
extern "C" void WIXAPI WcaCaScriptClose(
__in WCA_CASCRIPT_HANDLE hScript,
__in WCA_CASCRIPT_CLOSE closeOperation
)
{
if (hScript)
{
if (INVALID_HANDLE_VALUE != hScript->hScriptFile)
{
::CloseHandle(hScript->hScriptFile);
}
if (hScript->pwzScriptPath)
{
if (WCA_CASCRIPT_CLOSE_DELETE == closeOperation)
{
::DeleteFileW(hScript->pwzScriptPath);
}
StrFree(hScript->pwzScriptPath);
}
MemFree(hScript);
}
}
/********************************************************************
WcaCaScriptReadAsCustomActionData() - read the ca script into a format
that is useable by other CA data
functions.
********************************************************************/
extern "C" HRESULT WIXAPI WcaCaScriptReadAsCustomActionData(
__in WCA_CASCRIPT_HANDLE hScript,
__out LPWSTR* ppwzCustomActionData
)
{
HRESULT hr = S_OK;
LARGE_INTEGER liScriptSize = { 0 };
BYTE* pbData = NULL;
DWORD cbData = 0;
if (!::GetFileSizeEx(hScript->hScriptFile, &liScriptSize))
{
ExitWithLastError(hr, "Failed to get size of ca script file.");
}
if (0 != liScriptSize.HighPart || 0 != (liScriptSize.LowPart % sizeof(WCHAR)))
{
hr = HRESULT_FROM_WIN32(ERROR_INVALID_DATA);
ExitOnFailure(hr, "Invalid data read from ca script.");
}
cbData = liScriptSize.LowPart;
pbData = static_cast<BYTE*>(MemAlloc(cbData, TRUE));
ExitOnNull(pbData, hr, E_OUTOFMEMORY, "Failed to allocate memory to read in ca script.");
if (INVALID_SET_FILE_POINTER == ::SetFilePointer(hScript->hScriptFile, 0, NULL, FILE_BEGIN))
{
ExitWithLastError(hr, "Failed to reset to beginning of ca script.");
}
DWORD cbTotalRead = 0;
DWORD cbRead = 0;
do
{
if (!::ReadFile(hScript->hScriptFile, pbData + cbTotalRead, cbData - cbTotalRead, &cbRead, NULL))
{
ExitWithLastError(hr, "Failed to read from ca script.");
}
cbTotalRead += cbRead;
} while (cbRead && cbTotalRead < cbData);
if (cbTotalRead != cbData)
{
hr = E_UNEXPECTED;
ExitOnFailure(hr, "Failed to completely read ca script.");
}
// Add one to the allocated space because the data stored in the script is not
// null terminated. After copying the memory over, we'll ensure the string is
// null terminated.
DWORD cchData = cbData / sizeof(WCHAR) + 1;
hr = StrAlloc(ppwzCustomActionData, cchData);
ExitOnFailure(hr, "Failed to copy ca script.");
CopyMemory(*ppwzCustomActionData, pbData, cbData);
(*ppwzCustomActionData)[cchData - 1] = L'\0';
LExit:
ReleaseMem(pbData);
return hr;
}
/********************************************************************
WcaCaScriptWriteString() - writes a string to the ca script.
********************************************************************/
extern "C" HRESULT WIXAPI WcaCaScriptWriteString(
__in WCA_CASCRIPT_HANDLE hScript,
__in LPCWSTR wzValue
)
{
HRESULT hr = S_OK;
DWORD cbFile = 0;
DWORD cbWrite = 0;
DWORD cbTotalWritten = 0;
WCHAR delim[] = { MAGIC_MULTISZ_DELIM }; // magic char followed by NULL terminator
cbFile = ::SetFilePointer(hScript->hScriptFile, 0, NULL, FILE_END);
if (INVALID_SET_FILE_POINTER == cbFile)
{
ExitWithLastError(hr, "Failed to move file pointer to end of file.");
}
// If there is existing data in the file, append on the magic delimeter
// before adding our new data on the end of the file.
if (0 < cbFile)
{
cbWrite = sizeof(delim);
cbTotalWritten = 0;
while (cbTotalWritten < cbWrite)
{
DWORD cbWritten = 0;
if (!::WriteFile(hScript->hScriptFile, reinterpret_cast<BYTE*>(delim) + cbTotalWritten, cbWrite - cbTotalWritten, &cbWritten, NULL))
{
ExitWithLastError(hr, "Failed to write data to ca script.");
}
cbTotalWritten += cbWritten;
}
}
cbWrite = lstrlenW(wzValue) * sizeof(WCHAR);
cbTotalWritten = 0;
while (cbTotalWritten < cbWrite)
{
DWORD cbWritten = 0;
if (!::WriteFile(hScript->hScriptFile, reinterpret_cast<const BYTE*>(wzValue) + cbTotalWritten, cbWrite - cbTotalWritten, &cbWritten, NULL))
{
ExitWithLastError(hr, "Failed to write data to ca script.");
}
cbTotalWritten += cbWritten;
}
LExit:
return hr;
}
/********************************************************************
WcaCaScriptWriteNumber() - writes a number to the ca script.
********************************************************************/
extern "C" HRESULT WIXAPI WcaCaScriptWriteNumber(
__in WCA_CASCRIPT_HANDLE hScript,
__in DWORD dwValue
)
{
HRESULT hr = S_OK;
WCHAR wzBuffer[13] = { 0 };
hr = ::StringCchPrintfW(wzBuffer, countof(wzBuffer), L"%u", dwValue);
ExitOnFailure(hr, "Failed to convert number into string.");
hr = WcaCaScriptWriteString(hScript, wzBuffer);
ExitOnFailure(hr, "Failed to write number to script.");
LExit:
return hr;
}
/********************************************************************
WcaCaScriptFlush() - best effort function to get script written to
disk.
********************************************************************/
extern "C" void WIXAPI WcaCaScriptFlush(
__in WCA_CASCRIPT_HANDLE hScript
)
{
::FlushFileBuffers(hScript->hScriptFile);
}
/********************************************************************
WcaCaScriptCleanup() - best effort clean-up of any cascripts left
over from this install/uninstall.
********************************************************************/
extern "C" void WIXAPI WcaCaScriptCleanup(
__in LPCWSTR wzProductCode,
__in BOOL fImpersonated
)
{
HRESULT hr = S_OK;
WCHAR wzTempPath[MAX_PATH];
LPWSTR pwzWildCardPath = NULL;
WIN32_FIND_DATAW fd = { 0 };
HANDLE hff = INVALID_HANDLE_VALUE;
LPWSTR pwzDeletePath = NULL;
if (fImpersonated)
{
if (!::GetTempPathW(countof(wzTempPath), wzTempPath))
{
ExitWithLastError(hr, "Failed to get temp path.");
}
}
else
{
if (!::GetWindowsDirectoryW(wzTempPath, countof(wzTempPath)))
{
ExitWithLastError(hr, "Failed to get windows path.");
}
hr = ::StringCchCatW(wzTempPath, countof(wzTempPath), L"\\Installer\\");
ExitOnFailure(hr, "Failed to concat Installer directory on windows path string.");
}
hr = StrAllocFormatted(&pwzWildCardPath, L"%swix%s.*.???", wzTempPath, wzProductCode);
ExitOnFailure(hr, "Failed to allocate wildcard path to ca scripts.");
hff = ::FindFirstFileW(pwzWildCardPath, &fd);
if (INVALID_HANDLE_VALUE == hff)
{
ExitWithLastError1(hr, "Failed to find files with pattern: %S", pwzWildCardPath);
}
do
{
hr = StrAllocFormatted(&pwzDeletePath, L"%s%s", wzTempPath, fd.cFileName);
if (SUCCEEDED(hr))
{
if (!::DeleteFileW(pwzDeletePath))
{
DWORD er = ::GetLastError();
WcaLog(LOGMSG_VERBOSE, "Failed to clean up CAScript file: %S, er: %d", fd.cFileName, er);
}
}
else
{
WcaLog(LOGMSG_VERBOSE, "Failed to allocate path to clean up CAScript file: %S, hr: 0x%x", fd.cFileName, hr);
}
} while(::FindNextFileW(hff, &fd));
LExit:
if (INVALID_HANDLE_VALUE == hff)
{
::FindClose(hff);
}
ReleaseStr(pwzDeletePath);
ReleaseStr(pwzWildCardPath);
return;
}
static HRESULT CaScriptFileName(
__in WCA_ACTION action,
__in WCA_CASCRIPT script,
__in BOOL fImpersonated,
__in LPCWSTR wzScriptKey,
__out LPWSTR* ppwzScriptName
)
{
HRESULT hr = S_OK;
WCHAR wzTempPath[MAX_PATH];
LPWSTR pwzProductCode = NULL;
WCHAR chInstallOrUninstall = action == WCA_ACTION_INSTALL ? L'i' : L'u';
WCHAR chScheduledOrRollback = script == WCA_CASCRIPT_SCHEDULED ? L's' : L'r';
WCHAR chUserOrMachine = fImpersonated ? L'u' : L'm';
if (fImpersonated)
{
if (!::GetTempPathW(countof(wzTempPath), wzTempPath))
{
ExitWithLastError(hr, "Failed to get temp path.");
}
}
else
{
if (!::GetWindowsDirectoryW(wzTempPath, countof(wzTempPath)))
{
ExitWithLastError(hr, "Failed to get windows path.");
}
hr = ::StringCchCatW(wzTempPath, countof(wzTempPath), L"\\Installer\\");
ExitOnFailure(hr, "Failed to concat Installer directory on windows path string.");
}
hr = WcaGetProperty(L"ProductCode", &pwzProductCode);
ExitOnFailure(hr, "Failed to get ProductCode.");
hr = StrAllocFormatted(ppwzScriptName, L"%swix%s.%s.%c%c%c", wzTempPath, pwzProductCode, wzScriptKey, chScheduledOrRollback, chUserOrMachine, chInstallOrUninstall);
ExitOnFailure(hr, "Failed to allocate path to ca script.");
LExit:
ReleaseStr(pwzProductCode);
return hr;
}
| 5,690 |
1,127 |
<reponame>bhcsayx/libfuzzer-workshop<gh_stars>1000+
#!/usr/bin/env python2
import os
import subprocess
WORK_DIR = 'work'
def checkOutput(s):
if 'Segmentation fault' in s or 'error' in s.lower():
return False
else:
return True
corpus_dir = os.path.join(WORK_DIR, 'corpus')
corpus_filenames = os.listdir(corpus_dir)
for f in corpus_filenames:
testcase_path = os.path.join(corpus_dir, f)
cmd = ['bin/asan/pdfium_test', testcase_path]
process = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
output = process.communicate()[0]
if not checkOutput(output):
print testcase_path
print output
print '-' * 80
| 303 |
315 |
<reponame>g-r-a-n-t/py-libp2p
import pytest
from libp2p.security.noise.messages import NoiseHandshakePayload
from libp2p.tools.factories import noise_conn_factory, noise_handshake_payload_factory
DATA_0 = b"data_0"
DATA_1 = b"1" * 1000
DATA_2 = b"data_2"
@pytest.mark.trio
async def test_noise_transport(nursery):
async with noise_conn_factory(nursery):
pass
@pytest.mark.trio
async def test_noise_connection(nursery):
async with noise_conn_factory(nursery) as conns:
local_conn, remote_conn = conns
await local_conn.write(DATA_0)
await local_conn.write(DATA_1)
assert DATA_0 == (await remote_conn.read(len(DATA_0)))
assert DATA_1 == (await remote_conn.read(len(DATA_1)))
await local_conn.write(DATA_2)
assert DATA_2 == (await remote_conn.read(len(DATA_2)))
def test_noise_handshake_payload():
payload = noise_handshake_payload_factory()
payload_serialized = payload.serialize()
payload_deserialized = NoiseHandshakePayload.deserialize(payload_serialized)
assert payload == payload_deserialized
| 450 |
3,212 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.registry.variable;
import java.util.Collections;
import java.util.Map;
import java.util.Objects;
import org.apache.nifi.registry.ComponentVariableRegistry;
import org.apache.nifi.registry.VariableDescriptor;
import org.apache.nifi.registry.VariableRegistry;
public class StandardComponentVariableRegistry implements ComponentVariableRegistry {
private volatile VariableRegistry parent;
public StandardComponentVariableRegistry(final VariableRegistry parent) {
this.parent = Objects.requireNonNull(parent);
}
@Override
public Map<VariableDescriptor, String> getVariableMap() {
return Collections.emptyMap();
}
@Override
public VariableRegistry getParent() {
return parent;
}
@Override
public void setParent(final VariableRegistry parentRegistry) {
this.parent = parentRegistry;
}
@Override
public VariableDescriptor getVariableKey(final String name) {
if (name == null) {
return null;
}
final VariableDescriptor spec = new VariableDescriptor(name);
for (final Map.Entry<VariableDescriptor, String> entry : getVariableMap().entrySet()) {
if (entry.getKey().equals(spec)) {
return entry.getKey();
}
}
return null;
}
@Override
public String getVariableValue(final String name) {
if (name == null) {
return null;
}
final VariableDescriptor descriptor = new VariableDescriptor(name);
final String value = getVariableMap().get(descriptor);
if (value != null) {
return value;
}
return parent.getVariableValue(descriptor);
}
@Override
public String getVariableValue(final VariableDescriptor descriptor) {
if (descriptor == null) {
return null;
}
final String value = getVariableMap().get(descriptor);
if (value != null) {
return value;
}
return parent.getVariableValue(descriptor);
}
}
| 1,000 |
387 |
"""
##################################################################################################
# Copyright Info : Copyright (c) Davar Lab @ Hikvision Research Institute. All rights reserved.
# Filename : LSTM.py
# Abstract : Implementations of Bidirectional LSTM
# Current Version: 1.0.0
# Date : 2021-05-01
##################################################################################################
"""
import torch.nn as nn
from davarocr.davar_common.models.builder import CONNECTS
@CONNECTS.register_module()
class BidirectionalLSTM(nn.Module):
"""
input : visual feature [batch_size x T x input_size]
output : contextual feature [batch_size x T x output_size]
Usage sample:
sequence_module=dict(
type='CascadeRNN',
mode=1,
repeat=2,
rnn_modules=[
dict(
type='BidirectionalLSTM',
input_size=512,
hidden_size=256,
output_size=512,
),
dict(
type='BidirectionalLSTM',
input_size=512,
hidden_size=256,
output_size=512,
),
]),
"""
def __init__(self, input_size,
hidden_size,
output_size=None,
num_layers=1,
dropout=0,
bidirectional=False,
batch_first=True,
with_linear=False):
"""
Args:
input_size (int): input feature dim
hidden_size (int): hidden state dim
output_size (int): output feature dim
num_layers (int): layers of the LSTM
dropout (float): probability of the dropout
bidirectional (bool): bidirectional interface, default False
batch_first (bool): feature format ''(batch, seq, feature)''
with_linear (bool): whether to combine linear layer with LSTM
"""
super(BidirectionalLSTM, self).__init__()
self.with_linear = with_linear
self.rnn = nn.LSTM(input_size,
hidden_size,
num_layers=num_layers,
dropout=dropout,
bidirectional=bidirectional,
batch_first=batch_first)
# text recognition the specified structure LSTM with linear
if self.with_linear:
self.linear = nn.Linear(hidden_size * 2, output_size)
def init_weights(self, pretrained=None):
"""
Args:
pretrained (str): model path of the pre_trained model
Returns:
"""
def forward(self, input_feature):
"""
Args:
input_feature (Torch.Tensor): visual feature [batch_size x T x input_size]
Returns:
Torch.Tensor: LSTM output contextual feature [batch_size x T x output_size]
"""
self.rnn.flatten_parameters()
recurrent, _ = self.rnn(input_feature) # batch_size x T x input_size -> batch_size x T x (2*hidden_size)
if self.with_linear:
output = self.linear(recurrent) # batch_size x T x output_size
return output
return recurrent
| 1,585 |
444 |
from angr import concretization_strategies
import angr
class InstrumentationMixin:
def enable_custom_concretize(self, state):
"""
obsolete
:param state:
:return:
"""
print('[+] switching current concretization_strategy to Kuafffp...')
state.inspect.address_concretization_strategy = self.kuafffp_concretizer
raw_input()
return
def add_concretization_strategy(self, state, name_concretization_strategy):
if name_concretization_strategy == 'kuafffp':
print 'using kuafffp concretization'
state.memory.read_strategies.insert(
0, concretization_strategies.SimConcretizationStrategyKuafffp(mapped_addr=
self.userspace_base,
length=4096,
limit=1))
elif name_concretization_strategy == 'kuafffp2': # more conservative concretization strategy
print 'using kuafffp2 concretization2'
state.memory.read_strategies.insert(
0, concretization_strategies.SimConcretizationStrategyKuafffp2(mapped_addr=
self.userspace_base,
length=4096*self.num_user_pages,
limit=1,
step=0x100
)
)
elif name_concretization_strategy == 'kuafffp3': # more conservative concretization strategy
print 'using kuafffp3 concretization3'
state.memory.read_strategies.insert(0, angr.concretization_strategies.mycontrolled_data.MySimConcretizationStrategyControlledData(\
1, [self.uaf_object_base]))
return
def add_instrumentation(self, s):
if len(self.extra_bp) > 0: # TODO such bp is really bad
for bp in self.extra_bp:
s.inspect.b('instruction', when=angr.BP_BEFORE
, instruction=bp
, action=self.track_bp)
s.inspect.b('mem_read', when=angr.BP_BEFORE, action=self.track_reads)
s.inspect.b('mem_write', when=angr.BP_BEFORE, action=self.track_writes)
s.inspect.b('call', when=angr.BP_BEFORE, action=self.track_call)
s.inspect.b('symbolic_variable', when=angr.BP_AFTER, action=self.track_symbolic_variable)
#s.inspect.b('address_concretization', when=angr.BP_BEFORE, action=self.address_concretization_before)
#s.inspect.b('address_concretization', when=angr.BP_AFTER, action=self.address_concretization_after)
| 1,701 |
3,263 |
<filename>criteria/common/test/org/immutables/criteria/backend/JavaBeanNamingTest.java
/*
* Copyright 2019 Immutables Authors and Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.immutables.criteria.backend;
import org.immutables.criteria.expression.Path;
import org.junit.jupiter.api.Test;
import static org.immutables.check.Checkers.check;
class JavaBeanNamingTest {
private final JavaBeanNaming naming = new JavaBeanNaming();
@Test
void bean() throws NoSuchMethodException {
check(naming.name(path("getA"))).is("a");
check(naming.name(path("getFoo"))).is("foo");
check(naming.name(path("notABean"))).is("notABean");
check(naming.name(path("getURL"))).is("URL");
check(naming.name(path("isBool"))).is("bool");
check(naming.name(path("isBOOLEAN"))).is("BOOLEAN");
check(naming.name(path("isNotBoolean"))).is("isNotBoolean");
}
@Test
void booleans() throws NoSuchMethodException {
check(!JavaBeanNaming.IS_GETTER.test(JavaBean.class.getDeclaredMethod("isNotBoolean")));
check(JavaBeanNaming.IS_GETTER.test(JavaBean.class.getDeclaredMethod("isBOOLEAN")));
check(JavaBeanNaming.IS_GETTER.test(JavaBean.class.getDeclaredMethod("isBool")));
check(JavaBeanNaming.IS_GETTER.test(JavaBean.class.getDeclaredMethod("getURL")));
check(!JavaBeanNaming.IS_GETTER.test(JavaBean.class.getDeclaredMethod("notABean")));
check(JavaBeanNaming.IS_GETTER.test(JavaBean.class.getDeclaredMethod("getA")));
}
private static Path path(String name) throws NoSuchMethodException {
return Path.ofMember(JavaBean.class.getDeclaredMethod(name));
}
static class JavaBean {
public String getNotABean(String foo) {
return "";
}
public String notABean() {
return "";
}
public String getA() {
return "";
}
public String getFoo() {
return "";
}
public String getURL() {
return "";
}
public boolean isBool() {
return true;
}
public boolean isBOOLEAN() {
return true;
}
/**
* This is not a getter because return type is not boolean
*/
public int isNotBoolean() {
return 0;
}
}
}
| 983 |
4,538 |
/*
* Copyright (C) 2015-2017 Alibaba Group Holding Limited
*/
#include <stdio.h>
#include <stdlib.h>
#include "k_api.h"
#if AOS_COMP_DEBUG
#include "aos/debug.h"
extern uint32_t debug_task_id_now();
extern void debug_cpu_stop(void);
#endif
#if (RHINO_CONFIG_MM_TLF > 0)
extern k_mm_region_t g_mm_region[];
extern int g_region_num;
#if RHINO_CONFIG_MM_DEBUG
#if (RHINO_CONFIG_MM_TRACE_LVL > 0)
volatile uint32_t g_kmm_bt = 0;
int backtrace_now_get(void *trace[], int size, int offset);
void kmm_bt_disable(void)
{
g_kmm_bt = KMM_BT_SET_BY_KV;
}
/* check bt status
* ret 0 : enable
* ret 1 : disable
* */
int kmm_bt_check(void)
{
return (g_kmm_bt == KMM_BT_SET_BY_KV);
}
#endif
void kmm_error(uint32_t mm_status_locked)
{
dumpsys_mm_info_func(mm_status_locked);
k_err_proc(RHINO_SYS_FATAL_ERR);
}
#endif
void k_mm_init(void)
{
uint32_t e = 0;
/* init memory region */
(void)krhino_init_mm_head(&g_kmm_head, g_mm_region[0].start, g_mm_region[0].len);
for (e = 1 ; e < g_region_num ; e++) {
krhino_add_mm_region(g_kmm_head, g_mm_region[e].start, g_mm_region[e].len);
}
}
/* init a region, contain 3 mmblk
* -------------------------------------------------------------------
* | k_mm_list_t | k_mm_region_info_t | k_mm_list_t | free space |k_mm_list_t|
* -------------------------------------------------------------------
*
* "regionaddr" and "len" is aligned by caller
*/
RHINO_INLINE k_mm_list_t *init_mm_region(void *regionaddr, size_t len)
{
k_mm_list_t *midblk, *lastblk, *firstblk;
k_mm_region_info_t *region;
/* first mmblk for region info */
firstblk = (k_mm_list_t *) regionaddr;
firstblk->prev = NULL;
firstblk->buf_size = MM_ALIGN_UP(sizeof(k_mm_region_info_t))
| MM_BUFF_USED | MM_BUFF_PREV_USED;
#if (RHINO_CONFIG_MM_DEBUG > 0u)
firstblk->dye = MM_DYE_USED;
firstblk->owner_id = MM_OWNER_ID_SELF;
firstblk->trace_id = 0;
firstblk->owner = 0;
#endif
/*last mmblk for stop merge */
lastblk = (k_mm_list_t *)((char *)regionaddr + len - MMLIST_HEAD_SIZE);
/*middle mmblk for heap use */
midblk = MM_GET_NEXT_BLK(firstblk);
midblk->buf_size = ((char *)lastblk - (char *)midblk->mbinfo.buffer)
| MM_BUFF_USED | MM_BUFF_PREV_USED;
midblk->mbinfo.free_ptr.prev = midblk->mbinfo.free_ptr.next = 0;
/*last mmblk for stop merge */
lastblk->prev = midblk;
/* set alloced, can't be merged */
lastblk->buf_size = 0 | MM_BUFF_USED | MM_BUFF_PREV_FREE;
#if (RHINO_CONFIG_MM_DEBUG > 0u)
lastblk->dye = MM_DYE_USED;
lastblk->owner_id = MM_OWNER_ID_SELF;
lastblk->trace_id = 0;
lastblk->owner = MM_LAST_BLK_MAGIC;
#endif
region = (k_mm_region_info_t *)firstblk->mbinfo.buffer;
region->next = 0;
region->end = lastblk;
return firstblk;
}
/* 2^(N + MM_MIN_BIT) <= size < 2^(1 + N + MM_MIN_BIT) */
static int32_t size_to_level(size_t size)
{
size_t cnt = 32 - krhino_clz32(size);
if (cnt < MM_MIN_BIT) {
return 0;
}
if (cnt > MM_MAX_BIT) {
return -1;
}
return cnt - MM_MIN_BIT;
}
#if (K_MM_STATISTIC > 0)
static void addsize(k_mm_head *mmhead, size_t size, size_t req_size)
{
int32_t level;
if (mmhead->free_size > size) {
mmhead->free_size -= size;
} else {
mmhead->free_size = 0;
}
mmhead->used_size += size;
if (mmhead->used_size > mmhead->maxused_size) {
mmhead->maxused_size = mmhead->used_size;
}
if (req_size > 0) {
level = size_to_level(req_size);
if (level != -1) {
mmhead->alloc_times[level]++;
}
}
}
static void removesize(k_mm_head *mmhead, size_t size)
{
if (mmhead->used_size > size) {
mmhead->used_size -= size;
} else {
mmhead->used_size = 0;
}
mmhead->free_size += size;
}
/* used_size++, free_size--, maybe maxused_size++ */
#define stats_addsize(mmhead, size, req_size) addsize(mmhead, size, req_size)
/* used_size--, free_size++ */
#define stats_removesize(mmhead, size) removesize(mmhead, size)
#else
#define stats_addsize(mmhead, size, req_size) do {} while (0)
#define stats_removesize(mmhead, size) do {} while (0)
#endif
kstat_t krhino_init_mm_head(k_mm_head **ppmmhead, void *addr, size_t len)
{
k_mm_list_t *nextblk;
k_mm_list_t *firstblk;
k_mm_head *pmmhead;
void *orig_addr;
#if (RHINO_CONFIG_MM_BLK > 0)
mblk_pool_t *mmblk_pool;
kstat_t stat;
#endif
NULL_PARA_CHK(ppmmhead);
NULL_PARA_CHK(addr);
memset(addr, 0, len);
/* check paramters, addr and len need algin
* 1. the length at least need RHINO_CONFIG_MM_TLF_BLK_SIZE for fixed size memory block
* 2. and also ast least have 1k for user alloced
*/
orig_addr = addr;
addr = (void *) MM_ALIGN_UP((size_t)addr);
len -= (size_t)addr - (size_t)orig_addr;
len = MM_ALIGN_DOWN(len);
if (len == 0
|| len < MM_MIN_HEAP_SIZE + RHINO_CONFIG_MM_TLF_BLK_SIZE
|| len > MM_MAX_SIZE) {
return RHINO_MM_POOL_SIZE_ERR;
}
pmmhead = (k_mm_head *)addr;
/* Zeroing the memory head */
memset(pmmhead, 0, sizeof(k_mm_head));
#if (RHINO_CONFIG_MM_REGION_MUTEX > 0)
krhino_mutex_create(&pmmhead->mm_mutex, "mm_mutex");
#else
krhino_spin_lock_init(&pmmhead->mm_lock);
#endif
firstblk = init_mm_region((void *)((size_t)addr + MM_ALIGN_UP(sizeof(k_mm_head))),
MM_ALIGN_DOWN(len - sizeof(k_mm_head)));
pmmhead->regioninfo = (k_mm_region_info_t *)firstblk->mbinfo.buffer;
nextblk = MM_GET_NEXT_BLK(firstblk);
*ppmmhead = pmmhead;
/*mark it as free and set it to bitmap*/
#if (RHINO_CONFIG_MM_DEBUG > 0u)
nextblk->dye = MM_DYE_USED;
nextblk->owner_id = MM_OWNER_ID_SELF;
nextblk->trace_id = 0;
nextblk->owner = 0;
#endif
/* release free blk */
k_mm_free(pmmhead, nextblk->mbinfo.buffer);
#if (K_MM_STATISTIC > 0)
pmmhead->free_size = MM_GET_BUF_SIZE(nextblk);
pmmhead->used_size = len - MM_GET_BUF_SIZE(nextblk);
pmmhead->maxused_size = pmmhead->used_size;
#endif
#if (RHINO_CONFIG_MM_BLK > 0)
pmmhead->fix_pool = NULL;
mmblk_pool = k_mm_alloc(pmmhead, RHINO_CONFIG_MM_TLF_BLK_SIZE + MM_ALIGN_UP(sizeof(mblk_pool_t)));
if (mmblk_pool) {
stat = krhino_mblk_pool_init(mmblk_pool, "fixed_mm_blk",
(void *)((size_t)mmblk_pool + MM_ALIGN_UP(sizeof(mblk_pool_t))),
RHINO_CONFIG_MM_TLF_BLK_SIZE);
if (stat == RHINO_SUCCESS) {
pmmhead->fix_pool = mmblk_pool;
} else {
k_mm_free(pmmhead, mmblk_pool);
}
}
#endif
return RHINO_SUCCESS;
}
kstat_t krhino_deinit_mm_head(k_mm_head *mmhead)
{
#if (RHINO_CONFIG_MM_REGION_MUTEX > 0)
krhino_mutex_del(&mmhead->mm_mutex);
#endif
memset(mmhead, 0, sizeof(k_mm_head));
return RHINO_SUCCESS;
}
kstat_t krhino_add_mm_region(k_mm_head *mmhead, void *addr, size_t len)
{
void *orig_addr;
k_mm_region_info_t *region;
k_mm_list_t *firstblk;
k_mm_list_t *nextblk;
cpu_cpsr_t flags_cpsr;
(void)flags_cpsr;
NULL_PARA_CHK(mmhead);
NULL_PARA_CHK(addr);
orig_addr = addr;
addr = (void *) MM_ALIGN_UP((size_t)addr);
len -= (size_t)addr - (size_t)orig_addr;
len = MM_ALIGN_DOWN(len);
if (!len || len < sizeof(k_mm_region_info_t) + MMLIST_HEAD_SIZE * 3 + MM_MIN_SIZE) {
return RHINO_MM_POOL_SIZE_ERR;
}
memset(addr, 0, len);
MM_CRITICAL_ENTER(mmhead, flags_cpsr);
firstblk = init_mm_region(addr, len);
nextblk = MM_GET_NEXT_BLK(firstblk);
/* Inserting the area in the list of linked areas */
region = (k_mm_region_info_t *)firstblk->mbinfo.buffer;
region->next = mmhead->regioninfo;
mmhead->regioninfo = region;
#if (RHINO_CONFIG_MM_DEBUG > 0u)
nextblk->dye = MM_DYE_USED;
nextblk->owner_id = MM_OWNER_ID_SELF;
nextblk->trace_id = 0;
nextblk->owner = 0;
#endif
#if (K_MM_STATISTIC > 0)
/* keep "used_size" not changed.
* change "used_size" here then k_mm_free will decrease it.
*/
mmhead->used_size += MM_GET_BLK_SIZE(nextblk);
#endif
MM_CRITICAL_EXIT(mmhead, flags_cpsr);
/*mark nextblk as free*/
k_mm_free(mmhead, nextblk->mbinfo.buffer);
return RHINO_SUCCESS;
}
/* insert blk to freelist[level], and set freebitmap */
static void k_mm_freelist_insert(k_mm_head *mmhead, k_mm_list_t *blk)
{
int32_t level;
level = size_to_level(MM_GET_BUF_SIZE(blk));
if (level < 0 || level >= MM_BIT_LEVEL) {
return;
}
blk->mbinfo.free_ptr.prev = NULL;
blk->mbinfo.free_ptr.next = mmhead->freelist[level];
if (mmhead->freelist[level] != NULL) {
mmhead->freelist[level]->mbinfo.free_ptr.prev = blk;
}
mmhead->freelist[level] = blk;
/* freelist not null, so set the bit */
mmhead->free_bitmap |= (1 << level);
}
static void k_mm_freelist_delete(k_mm_head *mmhead, k_mm_list_t *blk)
{
int32_t level;
level = size_to_level(MM_GET_BUF_SIZE(blk));
if (level < 0 || level >= MM_BIT_LEVEL) {
return;
}
if (blk->mbinfo.free_ptr.next != NULL) {
blk->mbinfo.free_ptr.next->mbinfo.free_ptr.prev = blk->mbinfo.free_ptr.prev;
}
if (blk->mbinfo.free_ptr.prev != NULL) {
blk->mbinfo.free_ptr.prev->mbinfo.free_ptr.next = blk->mbinfo.free_ptr.next;
}
if (mmhead->freelist[level] == blk) {
/* first blk in this freelist */
mmhead->freelist[level] = blk->mbinfo.free_ptr.next;
if (mmhead->freelist[level] == NULL) {
/* freelist null, so clear the bit */
mmhead->free_bitmap &= (~(1 << level));
}
}
blk->mbinfo.free_ptr.prev = NULL;
blk->mbinfo.free_ptr.next = NULL;
}
static k_mm_list_t *find_up_level(k_mm_head *mmhead, int32_t level)
{
uint32_t bitmap;
bitmap = mmhead->free_bitmap & (0xfffffffful << (level + 1));
level = krhino_ctz32(bitmap);
if (level < MM_BIT_LEVEL) {
return mmhead->freelist[level];
}
return NULL;
}
void *k_mm_alloc(k_mm_head *mmhead, size_t size)
{
void *retptr;
k_mm_list_t *get_b, *new_b, *next_b;
int32_t level;
size_t left_size;
size_t req_size = size;
cpu_cpsr_t flags_cpsr;
if (!mmhead) {
return NULL;
}
if (size == 0) {
return NULL;
}
MM_CRITICAL_ENTER(mmhead, flags_cpsr);
#if (RHINO_CONFIG_MM_BLK > 0)
/* little blk, try to get from mm_pool */
if (mmhead->fix_pool != NULL && size <= RHINO_CONFIG_MM_BLK_SIZE) {
retptr = krhino_mblk_alloc_nolock((mblk_pool_t *)mmhead->fix_pool, size);
if (retptr) {
MM_CRITICAL_EXIT(mmhead, flags_cpsr);
return retptr;
}
}
#endif
retptr = NULL;
size = MM_ALIGN_UP(size);
size = size < MM_MIN_SIZE ? MM_MIN_SIZE : size;
if ((level = size_to_level(size)) == -1) {
goto ALLOCEXIT;
}
#if (RHINO_CONFIG_MM_QUICK > 0)
/* try to find in higher level */
get_b = find_up_level(mmhead, level);
if (get_b == NULL) {
/* try to find in same level */
get_b = mmhead->freelist[level];
while (get_b != NULL) {
if (MM_GET_BUF_SIZE(get_b) >= size) {
break;
}
get_b = get_b->mbinfo.free_ptr.next;
}
if (get_b == NULL) {
/* do not find availalbe freeblk */
goto ALLOCEXIT;
}
}
#else
/* try to find in same level */
get_b = mmhead->freelist[level];
while (get_b != NULL) {
if (MM_GET_BUF_SIZE(get_b) >= size) {
break;
}
get_b = get_b->mbinfo.free_ptr.next;
}
if (get_b == NULL) {
/* try to find in higher level */
get_b = find_up_level(mmhead, level);
if (get_b == NULL) {
/* do not find availalbe freeblk */
goto ALLOCEXIT;
}
}
#endif
k_mm_freelist_delete(mmhead, get_b);
next_b = MM_GET_NEXT_BLK(get_b);
/* Should the block be split? */
if (MM_GET_BUF_SIZE(get_b) >= size + MMLIST_HEAD_SIZE + MM_MIN_SIZE) {
left_size = MM_GET_BUF_SIZE(get_b) - size - MMLIST_HEAD_SIZE;
get_b->buf_size = size | (get_b->buf_size & MM_PRESTAT_MASK);
new_b = MM_GET_NEXT_BLK(get_b);
new_b->prev = get_b;
new_b->buf_size = left_size | MM_BUFF_FREE | MM_BUFF_PREV_USED;
#if (RHINO_CONFIG_MM_DEBUG > 0u)
new_b->dye = MM_DYE_FREE;
new_b->owner_id = 0;
new_b->trace_id = 0;
new_b->owner = 0;
#endif
next_b->prev = new_b;
k_mm_freelist_insert(mmhead, new_b);
} else {
next_b->buf_size &= (~MM_BUFF_PREV_FREE);
}
get_b->buf_size &= (~MM_BUFF_FREE); /* Now it's used */
#if (RHINO_CONFIG_MM_DEBUG > 0u)
get_b->dye = MM_DYE_USED;
get_b->owner_id = (uint8_t)debug_task_id_now();
get_b->trace_id = g_mmlk_cnt;
get_b->owner = 0;
#endif
retptr = (void *)get_b->mbinfo.buffer;
if (retptr != NULL) {
stats_addsize(mmhead, MM_GET_BLK_SIZE(get_b), req_size);
}
ALLOCEXIT:
MM_CRITICAL_EXIT(mmhead, flags_cpsr);
return retptr ;
}
void k_mm_free(k_mm_head *mmhead, void *ptr)
{
k_mm_list_t *free_b, *next_b, *prev_b;
cpu_cpsr_t flags_cpsr;
(void)flags_cpsr;
if (!ptr || !mmhead) {
return;
}
MM_CRITICAL_ENTER(mmhead, flags_cpsr);
#if (RHINO_CONFIG_MM_BLK > 0)
if (krhino_mblk_check(mmhead->fix_pool, ptr)) {
(void)krhino_mblk_free_nolock((mblk_pool_t *)mmhead->fix_pool, ptr);
MM_CRITICAL_EXIT(mmhead, flags_cpsr);
return;
}
#endif
free_b = MM_GET_THIS_BLK(ptr);
#if (RHINO_CONFIG_MM_DEBUG > 0u)
if (free_b->dye == MM_DYE_FREE) {
/* step 1 : do not call mm_critical_exit to stop malloc by other core */
//MM_CRITICAL_EXIT(mmhead, flags_cpsr);
/* step 2 : freeze other core */
debug_cpu_stop();
/* step 3 :printk(do not use printf, maybe malloc) log */
printk("WARNING, memory maybe double free!! 0x%x\r\n", (unsigned int)free_b);
/* setp 4 :dumpsys memory and then go to fatal error */
kmm_error(KMM_ERROR_LOCKED);
}
if (free_b->dye != MM_DYE_USED) {
//MM_CRITICAL_EXIT(mmhead, flags_cpsr);
debug_cpu_stop();
printk("WARNING, memory maybe corrupt!! 0x%x\r\n", (unsigned int)free_b);
kmm_error(KMM_ERROR_LOCKED);
}
free_b->dye = MM_DYE_FREE;
free_b->owner_id = 0;
free_b->trace_id = 0;
free_b->owner = 0;
#endif
free_b->buf_size |= MM_BUFF_FREE;
stats_removesize(mmhead, MM_GET_BLK_SIZE(free_b));
/* if the blk after this freed one is freed too, merge them */
next_b = MM_GET_NEXT_BLK(free_b);
#if (RHINO_CONFIG_MM_DEBUG > 0u)
if (next_b->dye != MM_DYE_FREE && next_b->dye != MM_DYE_USED) {
//MM_CRITICAL_EXIT(mmhead, flags_cpsr);
debug_cpu_stop();
printk("WARNING, memory overwritten!! 0x%x 0x%x\r\n", (unsigned int)free_b, (unsigned int)next_b);
kmm_error(KMM_ERROR_LOCKED);
} else if (MM_LAST_BLK_MAGIC != next_b->owner) {
k_mm_list_t *nnext_b = MM_GET_NEXT_BLK(next_b);
if (nnext_b->dye != MM_DYE_FREE && nnext_b->dye != MM_DYE_USED) {
debug_cpu_stop();
printk("WARNING, nnext memory overwritten!! 0x%x 0x%x 0x%x\r\n", (unsigned int)free_b, (unsigned int)next_b,
(unsigned int)nnext_b);
kmm_error(KMM_ERROR_LOCKED);
}
}
#endif
if (next_b->buf_size & MM_BUFF_FREE) {
k_mm_freelist_delete(mmhead, next_b);
free_b->buf_size += MM_GET_BLK_SIZE(next_b);
}
/* if the blk before this freed one is freed too, merge them */
if (free_b->buf_size & MM_BUFF_PREV_FREE) {
prev_b = free_b->prev;
k_mm_freelist_delete(mmhead, prev_b);
prev_b->buf_size += MM_GET_BLK_SIZE(free_b);
free_b = prev_b;
}
/* after merge, free to list */
k_mm_freelist_insert(mmhead, free_b);
next_b = MM_GET_NEXT_BLK(free_b);
next_b->prev = free_b;
next_b->buf_size |= MM_BUFF_PREV_FREE;
MM_CRITICAL_EXIT(mmhead, flags_cpsr);
}
void *k_mm_realloc(k_mm_head *mmhead, void *oldmem, size_t new_size)
{
void *ptr_aux = NULL;
uint32_t cpsize;
k_mm_list_t *this_b, *split_b, *next_b;
size_t old_size, split_size;
size_t req_size = 0;
cpu_cpsr_t flags_cpsr;
(void)flags_cpsr;
(void)req_size;
if (oldmem == NULL) {
if (new_size > 0) {
return (void *)k_mm_alloc(mmhead, new_size);
} else {
return NULL;
}
} else if (new_size == 0) {
k_mm_free(mmhead, oldmem);
return NULL;
}
req_size = new_size;
#if (RHINO_CONFIG_MM_BLK > 0)
if (krhino_mblk_check(mmhead->fix_pool, oldmem)) {
ptr_aux = k_mm_alloc(mmhead, new_size);
if (ptr_aux) {
int cp_len = krhino_mblk_get_size(mmhead->fix_pool, oldmem);
cp_len = cp_len > new_size ? new_size : cp_len;
memcpy(ptr_aux, oldmem, cp_len);
MM_CRITICAL_ENTER(mmhead, flags_cpsr);
(void)krhino_mblk_free_nolock((mblk_pool_t *)mmhead->fix_pool, oldmem);
MM_CRITICAL_EXIT(mmhead, flags_cpsr);
}
return ptr_aux;
}
#endif
MM_CRITICAL_ENTER(mmhead, flags_cpsr);
this_b = MM_GET_THIS_BLK(oldmem);
old_size = MM_GET_BUF_SIZE(this_b);
next_b = MM_GET_NEXT_BLK(this_b);
new_size = MM_ALIGN_UP(new_size);
new_size = new_size < MM_MIN_SIZE ? MM_MIN_SIZE : new_size;
if (new_size <= old_size) {
/* shrink blk */
stats_removesize(mmhead, MM_GET_BLK_SIZE(this_b));
if (next_b->buf_size & MM_BUFF_FREE) {
/* merge next free */
k_mm_freelist_delete(mmhead, next_b);
old_size += MM_GET_BLK_SIZE(next_b);
next_b = MM_GET_NEXT_BLK(next_b);
}
if (old_size >= new_size + MMLIST_HEAD_SIZE + MM_MIN_SIZE) {
/* split blk */
split_size = old_size - new_size - MMLIST_HEAD_SIZE;
this_b->buf_size = new_size | (this_b->buf_size & MM_PRESTAT_MASK);
split_b = MM_GET_NEXT_BLK(this_b);
split_b->prev = this_b;
split_b->buf_size = split_size | MM_BUFF_FREE | MM_BUFF_PREV_USED;
#if (RHINO_CONFIG_MM_DEBUG > 0u)
split_b->dye = MM_DYE_FREE;
split_b->owner_id = 0;
split_b->trace_id = 0;
split_b->owner = 0;
#endif
next_b->prev = split_b;
next_b->buf_size |= MM_BUFF_PREV_FREE;
k_mm_freelist_insert(mmhead, split_b);
}
stats_addsize(mmhead, MM_GET_BLK_SIZE(this_b), req_size);
ptr_aux = (void *)this_b->mbinfo.buffer;
} else if ((next_b->buf_size & MM_BUFF_FREE)) {
/* enlarge blk */
if (new_size <= (old_size + MM_GET_BLK_SIZE(next_b))) {
stats_removesize(mmhead, MM_GET_BLK_SIZE(this_b));
/* delete next blk from freelist */
k_mm_freelist_delete(mmhead, next_b);
/* enlarge this blk */
this_b->buf_size += MM_GET_BLK_SIZE(next_b);
next_b = MM_GET_NEXT_BLK(this_b);
next_b->prev = this_b;
next_b->buf_size &= ~MM_BUFF_PREV_FREE;
if (MM_GET_BUF_SIZE(this_b) >= new_size + MMLIST_HEAD_SIZE + MM_MIN_SIZE) {
/* split blk */
split_size = MM_GET_BUF_SIZE(this_b) - new_size - MMLIST_HEAD_SIZE;
this_b->buf_size = new_size | (this_b->buf_size & MM_PRESTAT_MASK);
split_b = MM_GET_NEXT_BLK(this_b);
split_b->prev = this_b;
split_b->buf_size = split_size | MM_BUFF_FREE | MM_BUFF_PREV_USED;
#if (RHINO_CONFIG_MM_DEBUG > 0u)
split_b->dye = MM_DYE_FREE;
split_b->owner_id = 0;
split_b->trace_id = 0;
split_b->owner = 0;
#endif
next_b->prev = split_b;
next_b->buf_size |= MM_BUFF_PREV_FREE;
k_mm_freelist_insert(mmhead, split_b);
}
stats_addsize(mmhead, MM_GET_BLK_SIZE(this_b), req_size);
ptr_aux = (void *)this_b->mbinfo.buffer;
}
}
if (ptr_aux) {
#if (RHINO_CONFIG_MM_DEBUG > 0u)
this_b->dye = MM_DYE_USED;
this_b->owner_id = (uint8_t)debug_task_id_now();
this_b->trace_id = g_mmlk_cnt;
#endif
MM_CRITICAL_EXIT(mmhead, flags_cpsr);
return ptr_aux;
}
MM_CRITICAL_EXIT(mmhead, flags_cpsr);
/* re alloc blk */
ptr_aux = k_mm_alloc(mmhead, new_size);
if (!ptr_aux) {
return NULL;
}
cpsize = (MM_GET_BUF_SIZE(this_b) > new_size) ? new_size : MM_GET_BUF_SIZE(this_b);
memcpy(ptr_aux, oldmem, cpsize);
k_mm_free(mmhead, oldmem);
return ptr_aux;
}
#if (RHINO_CONFIG_MM_DEBUG > 0u)
void krhino_owner_attach(void *addr, size_t allocator)
{
k_mm_list_t *blk;
char *PC;
int *SP;
__asm__ volatile("mov %0, sp\n" : "=r"(SP));
__asm__ volatile("mov %0, pc\n" : "=r"(PC));
if (NULL == addr) {
return;
}
#if (RHINO_CONFIG_MM_BLK > 0)
/* fix blk, do not support debug info */
if (krhino_mblk_check(g_kmm_head->fix_pool, addr)) {
return;
}
#endif
blk = MM_GET_THIS_BLK(addr);
#if (RHINO_CONFIG_MM_TRACE_LVL > 0)
if ((g_sys_stat == RHINO_RUNNING) &&
(kmm_bt_check() == 0)) {
backtrace_now_get((void **) blk->trace, RHINO_CONFIG_MM_TRACE_LVL, 2);
} else {
memset(blk->trace, 0, sizeof(blk->trace));
}
#endif
blk->owner = allocator;
}
#endif
void *krhino_mm_alloc(size_t size)
{
void *tmp;
#if (RHINO_CONFIG_MM_DEBUG > 0u)
uint32_t app_malloc = size & AOS_UNSIGNED_INT_MSB;
size = size & (~AOS_UNSIGNED_INT_MSB);
#endif
if (size == 0) {
printf("WARNING, malloc size = 0\r\n");
return NULL;
}
tmp = k_mm_alloc(g_kmm_head, size);
if (tmp == NULL) {
#if (RHINO_CONFIG_MM_DEBUG > 0)
static int32_t dumped;
int32_t freesize;
freesize = g_kmm_head->free_size;
printf("WARNING, malloc failed!!!! need size:%d, but free size:%d\r\n", size, freesize);
if (dumped) {
return tmp;
}
dumped = 1;
debug_cpu_stop();
kmm_error(KMM_ERROR_UNLOCKED);
#endif
}
#if (RHINO_CONFIG_USER_HOOK > 0)
krhino_mm_alloc_hook(tmp, size);
#endif
#if (RHINO_CONFIG_MM_DEBUG > 0u)
if (app_malloc == 0) {
krhino_owner_return_addr(tmp);
}
#endif
return tmp;
}
void krhino_mm_free(void *ptr)
{
k_mm_free(g_kmm_head, ptr);
}
void *krhino_mm_realloc(void *oldmem, size_t newsize)
{
void *tmp;
#if (RHINO_CONFIG_MM_DEBUG > 0u)
uint32_t app_malloc = newsize & AOS_UNSIGNED_INT_MSB;
newsize = newsize & (~AOS_UNSIGNED_INT_MSB);
#endif
tmp = k_mm_realloc(g_kmm_head, oldmem, newsize);
#if (RHINO_CONFIG_MM_DEBUG > 0u)
if (app_malloc == 0) {
krhino_owner_return_addr(tmp);
}
#endif
if (tmp == NULL && newsize != 0) {
#if (RHINO_CONFIG_MM_DEBUG > 0)
static int32_t reallocdumped;
printf("WARNING, realloc failed!!!! newsize : %d\r\n", newsize);
if (reallocdumped) {
return tmp;
}
reallocdumped = 1;
debug_cpu_stop();
kmm_error(KMM_ERROR_UNLOCKED);
#endif
}
return tmp;
}
size_t krhino_mm_max_free_size_get(void)
{
int32_t index;
k_mm_list_t *max, *tmp;
size_t max_free_block_size = 0;
/* In order to avoid getting stuck after the exception, the critical zone protection is removed.
Currently, this interface can only be invoked in exception handling. */
//cpu_cpsr_t flags_cpsr;
//MM_CRITICAL_ENTER(g_kmm_head,flags_cpsr);
index = krhino_clz32(g_kmm_head->free_bitmap);
if (index > 31) {
return 0;
}
max = g_kmm_head->freelist[31 - index];
while (max) {
if (max_free_block_size < MM_GET_BUF_SIZE(max)) {
max_free_block_size = MM_GET_BUF_SIZE(max);
}
tmp = max->mbinfo.free_ptr.next;
max = tmp;
}
return max_free_block_size;
}
#endif
| 12,587 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.