max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
435 | {
"copyright_text": "Standard YouTube License",
"description": "PyData SF 2016\n\nPython is a great tool for performing data analysis, but often time the hardest part is getting access to your data that\u2019s located in a variety of business systems - files, database, and SaaS applications. Productionizing this process is even harder: scripts frequently fail and require precious to to fix and re-test. In this talk, I will review some open source tools I authored and show you how\n\nIn this talk we will cover: \n\n- How we created a data collection tool that can read any chaotically formatted files called \"CSV\" by guessing its structure automatically \n- Explore the plugin-based-architecture that makes it easy to load data from external sources and publish to production systems. From files to business systems such as Salesforce & Mixpanel. \n- Review current plugins (over 100 released by the OSS community) and use cases \n- Explain how distributed execution enhances stability and scalability",
"duration": 2530,
"language": "eng",
"recorded": "2016-08-24",
"related_urls": [],
"speakers": [
"<NAME>"
],
"tags": [
"embulk"
],
"thumbnail_url": "https://i.ytimg.com/vi/ocuez7Mikh4/maxresdefault.jpg",
"title": "Fighting Against Chaotically Separated Values with Embulk",
"videos": [
{
"type": "youtube",
"url": "https://www.youtube.com/watch?v=ocuez7Mikh4"
}
]
}
| 414 |
852 | <reponame>malbouis/cmssw
/*
* ProcessorBase.h
*
* Created on: Jul 28, 2017
* Author: kbunkow
*/
#ifndef L1T_OmtfP1_PROCESSORBASE_H_
#define L1T_OmtfP1_PROCESSORBASE_H_
#include "L1Trigger/L1TMuonOverlapPhase1/interface/Omtf/GoldenPatternBase.h"
#include "L1Trigger/L1TMuonOverlapPhase1/interface/Omtf/OMTFConfiguration.h"
#include "L1Trigger/L1TMuonOverlapPhase1/interface/Omtf/OMTFinput.h"
#include <memory>
class L1TMuonOverlapParams;
class SimTrack;
template <class GoldenPatternType>
class ProcessorBase {
public:
ProcessorBase(OMTFConfiguration* omtfConfig, const L1TMuonOverlapParams* omtfPatterns) : myOmtfConfig(omtfConfig) {
configure(omtfConfig, omtfPatterns);
};
///omtfConfig is not const, because the omtfConfig->setPatternPtRange is called inside the constructor
///takes the ownership of the gps (uses move)
ProcessorBase(OMTFConfiguration* omtfConfig, GoldenPatternVec<GoldenPatternType>&& gps);
virtual ~ProcessorBase() {}
///Return vector of GoldenPatterns
virtual GoldenPatternVec<GoldenPatternType>& getPatterns() { return theGPs; };
///Fill GP vec with patterns from CondFormats object
virtual bool configure(OMTFConfiguration* omtfParams, const L1TMuonOverlapParams* omtfPatterns);
///Add GoldenPattern to pattern vec.
virtual void addGP(GoldenPatternType* aGP);
///Reset all configuration parameters
virtual void resetConfiguration();
virtual void initPatternPtRange(bool firstPatFrom0);
const std::vector<OMTFConfiguration::PatternPt>& getPatternPtRange() const { return patternPts; }
virtual void printInfo() const;
protected:
///Configuration of the algorithm. This object
///does not contain the patterns data.
const OMTFConfiguration* myOmtfConfig;
///vector holding Golden Patterns
GoldenPatternVec<GoldenPatternType> theGPs;
///Remove hits whis are outside input range
///for given processor and cone
virtual MuonStubPtrs1D restrictInput(unsigned int iProcessor,
unsigned int iCone,
unsigned int iLayer,
const OMTFinput& input);
std::vector<OMTFConfiguration::PatternPt> patternPts;
};
#endif /* L1T_OmtfP1_PROCESSORBASE_H_ */
| 818 |
1,144 | package de.metas.product.impl;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import de.metas.acct.api.AcctSchema;
import de.metas.acct.api.IAcctSchemaDAO;
import de.metas.costing.CostingLevel;
import de.metas.costing.IProductCostingBL;
import de.metas.i18n.ITranslatableString;
import de.metas.i18n.TranslatableStrings;
import de.metas.logging.LogManager;
import de.metas.organization.OrgId;
import de.metas.product.IProductBL;
import de.metas.product.IProductDAO;
import de.metas.product.IProductDAO.ProductQuery;
import de.metas.product.ProductCategoryId;
import de.metas.product.ProductId;
import de.metas.product.ProductType;
import de.metas.uom.IUOMConversionBL;
import de.metas.uom.IUOMConversionDAO;
import de.metas.uom.IUOMDAO;
import de.metas.uom.UOMConversionContext;
import de.metas.uom.UOMPrecision;
import de.metas.uom.UomId;
import de.metas.uom.X12DE355;
import de.metas.util.Check;
import de.metas.util.Services;
import lombok.NonNull;
import org.adempiere.ad.trx.api.ITrx;
import org.adempiere.mm.attributes.AttributeSetId;
import org.adempiere.mm.attributes.api.IAttributeDAO;
import org.adempiere.model.InterfaceWrapperHelper;
import org.adempiere.service.ClientId;
import org.adempiere.service.IClientDAO;
import org.compiere.model.I_C_UOM;
import org.compiere.model.I_M_AttributeSet;
import org.compiere.model.I_M_AttributeSetInstance;
import org.compiere.model.I_M_Product;
import org.compiere.model.I_M_Product_Category;
import org.compiere.model.MAttributeSet;
import org.compiere.model.X_C_UOM;
import org.compiere.util.Env;
import org.slf4j.Logger;
import javax.annotation.Nullable;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.Properties;
import java.util.Set;
import static de.metas.util.Check.assumeNotNull;
import static org.adempiere.model.InterfaceWrapperHelper.loadOutOfTrx;
public final class ProductBL implements IProductBL
{
private static final Logger logger = LogManager.getLogger(ProductBL.class);
private final IProductDAO productsRepo = Services.get(IProductDAO.class);
private final IUOMDAO uomsRepo = Services.get(IUOMDAO.class);
private final IClientDAO clientDAO = Services.get(IClientDAO.class);
private final IAttributeDAO attributesRepo = Services.get(IAttributeDAO.class);
private final IAcctSchemaDAO acctSchemasRepo = Services.get(IAcctSchemaDAO.class);
private final IProductCostingBL productCostingBL = Services.get(IProductCostingBL.class);
@Override
public I_M_Product getById(@NonNull final ProductId productId)
{
return productsRepo.getById(productId);
}
@Override
public I_M_Product getByIdInTrx(@NonNull final ProductId productId)
{
return productsRepo.getByIdInTrx(productId);
}
@Override
public ProductId getProductIdByValue(
@NonNull final OrgId orgId,
@NonNull final String productValue)
{
final ProductQuery query = ProductQuery.builder()
.orgId(orgId)
.value(productValue).build();
return productsRepo.retrieveProductIdBy(query);
}
@Override
public UOMPrecision getUOMPrecision(final I_M_Product product)
{
final UomId uomId = UomId.ofRepoId(product.getC_UOM_ID());
return uomsRepo.getStandardPrecision(uomId);
}
@Override
public UOMPrecision getUOMPrecision(@NonNull final ProductId productId)
{
final I_M_Product product = getById(productId);
return getUOMPrecision(product);
}
@Override
public String getMMPolicy(final I_M_Product product)
{
final ProductCategoryId productCategoryId = ProductCategoryId.ofRepoId(product.getM_Product_Category_ID());
final I_M_Product_Category pc = productsRepo.getProductCategoryById(productCategoryId);
String policy = pc.getMMPolicy();
if (policy == null || policy.isEmpty())
{
policy = clientDAO.retriveClient(Env.getCtx()).getMMPolicy();
}
return policy;
}
@Override
public String getMMPolicy(final int productId)
{
Check.assume(productId > 0, "productId > 0");
final I_M_Product product = loadOutOfTrx(productId, I_M_Product.class);
return getMMPolicy(product);
}
@Override
public I_C_UOM getStockUOM(@NonNull final I_M_Product product)
{
return uomsRepo.getById(product.getC_UOM_ID());
}
@Override
public I_C_UOM getStockUOM(final int productId)
{
// we don't know if the product of productId was already committed, so we can't load it out-of-trx
final I_M_Product product = InterfaceWrapperHelper.load(productId, I_M_Product.class);
Check.assumeNotNull(product, "Unable to load M_Product record for M_Product_ID={}", productId);
return Check.assumeNotNull(getStockUOM(product), "The uom for productId={} may not be null", productId);
}
/**
* @return UOM used for Product's Weight; never return null
*/
public I_C_UOM getWeightUOM(final I_M_Product product)
{
// FIXME: we hardcoded the UOM for M_Product.Weight to Kilogram
return uomsRepo.getByX12DE355(X12DE355.KILOGRAM);
}
@Override
public BigDecimal getWeight(
@NonNull final I_M_Product product,
@NonNull final I_C_UOM uomTo)
{
final BigDecimal weightPerStockingUOM = product.getWeight();
if (weightPerStockingUOM.signum() == 0)
{
return BigDecimal.ZERO;
}
final I_C_UOM stockingUom = getStockUOM(product);
//
// Calculate the rate to convert from stocking UOM to "uomTo"
final UOMConversionContext uomConversionCtx = UOMConversionContext.of(product.getM_Product_ID());
final IUOMConversionBL uomConversionBL = Services.get(IUOMConversionBL.class); // don't extract it to field because IUOMConversionBL already has IProductBL as a field
final BigDecimal stocking2uomToRate = uomConversionBL.convertQty(uomConversionCtx, BigDecimal.ONE, stockingUom, uomTo);
//
// Calculate the Weight for one "uomTo"
final int weightPerUomToPrecision = getWeightUOM(product).getStdPrecision();
final BigDecimal weightPerUomTo = weightPerStockingUOM
.multiply(stocking2uomToRate)
.setScale(weightPerUomToPrecision, RoundingMode.HALF_UP);
return weightPerUomTo;
}
@Override
public boolean isService(final I_M_Product product)
{
// i.e. PRODUCTTYPE_Service, PRODUCTTYPE_Resource, PRODUCTTYPE_Online
return ProductType.ofCode(product.getProductType()).isService();
}
@Override
public boolean isStocked(@NonNull final I_M_Product product)
{
if (!product.isStocked())
{
logger.debug("isStocked - M_Product_ID={} has isStocked=false; -> return false", product.getM_Product_ID());
return false;
}
final ProductType productType = ProductType.ofCode(product.getProductType());
final boolean result = productType.isItem();
logger.debug("isStocked - M_Product_ID={} is has isStocked=true and type={}; -> return {}", product.getM_Product_ID(), productType, result);
return result;
}
@Override
public boolean isStocked(@Nullable final ProductId productId)
{
if (productId == null)
{
logger.debug("isStocked - productId=null; -> return false");
return false;
}
// NOTE: we rely on table cache config
final I_M_Product product = getById(productId);
return isStocked(product);
}
@Override
public boolean isDiverse(@NonNull final ProductId productId)
{
return productsRepo
.getById(productId, de.metas.adempiere.model.I_M_Product.class)
.isDiverse();
}
@Override
public AttributeSetId getAttributeSetId(final I_M_Product product)
{
final ProductCategoryId productCategoryId = ProductCategoryId.ofRepoIdOrNull(product.getM_Product_Category_ID());
if (productCategoryId == null) // guard against NPE which might happen in unit tests
{
return AttributeSetId.NONE;
}
final I_M_Product_Category productCategoryRecord = productsRepo.getProductCategoryById(productCategoryId);
final int attributeSetId = productCategoryRecord.getM_AttributeSet_ID();
return attributeSetId > 0 ? AttributeSetId.ofRepoId(attributeSetId) : AttributeSetId.NONE;
}
@Override
public AttributeSetId getAttributeSetId(@NonNull final ProductId productId)
{
final I_M_Product product = getById(productId);
return getAttributeSetId(product);
}
@Override
@Nullable
public I_M_AttributeSet getAttributeSetOrNull(@NonNull final ProductId productId)
{
final AttributeSetId attributeSetId = getAttributeSetId(productId);
if (attributeSetId.isNone())
{
return null;
}
return attributesRepo.getAttributeSetById(attributeSetId);
}
@Nullable
@Override
public I_M_AttributeSetInstance getCreateASI(
final Properties ctx,
final int M_AttributeSetInstance_ID,
final int M_Product_ID)
{
// Load Instance if not 0
if (M_AttributeSetInstance_ID > 0)
{
logger.debug("From M_AttributeSetInstance_ID={}", M_AttributeSetInstance_ID);
return InterfaceWrapperHelper.create(ctx, M_AttributeSetInstance_ID, I_M_AttributeSetInstance.class, ITrx.TRXNAME_None);
}
// Get new from Product
logger.debug("From M_Product_ID={}", M_Product_ID);
if (M_Product_ID <= 0)
{
return null;
}
final AttributeSetId attributeSetId = getAttributeSetId(ProductId.ofRepoId(M_Product_ID));
final I_M_AttributeSetInstance asi = InterfaceWrapperHelper.create(ctx, I_M_AttributeSetInstance.class, ITrx.TRXNAME_None);
asi.setM_AttributeSet_ID(attributeSetId.getRepoId());
return asi;
} // get
@Override
public boolean isTradingProduct(final I_M_Product product)
{
Check.assumeNotNull(product, "product not null");
return product.isPurchased()
&& product.isSold();
}
@Override
public boolean isASIMandatory(
@NonNull final I_M_Product product,
final boolean isSOTrx)
{
final ClientId adClientId = ClientId.ofRepoId(product.getAD_Client_ID());
final OrgId adOrgId = OrgId.ofRepoId(product.getAD_Org_ID());
//
// If CostingLevel is BatchLot ASI is always mandatory - check all client acct schemas
for (final AcctSchema as : acctSchemasRepo.getAllByClient(adClientId))
{
if (as.isDisallowPostingForOrg(adOrgId))
{
continue;
}
final CostingLevel costingLevel = productCostingBL.getCostingLevel(product, as);
if (CostingLevel.BatchLot == costingLevel)
{
return true;
}
}
//
// Check Attribute Set settings
final AttributeSetId attributeSetId = getAttributeSetId(product);
if (!attributeSetId.isNone())
{
final MAttributeSet mas = MAttributeSet.get(attributeSetId);
if (mas == null || !mas.isInstanceAttribute())
{
return false;
}
// Outgoing transaction
else if (isSOTrx)
{
return mas.isMandatory();
}
// Incoming transaction
else
{
// isSOTrx == false
return mas.isMandatoryAlways();
}
}
//
// Default not mandatory
return false;
}
@Override
public boolean isASIMandatory(
@NonNull final ProductId productId,
final boolean isSOTrx)
{
final I_M_Product product = getById(productId);
return isASIMandatory(product, isSOTrx);
}
@Override
public boolean isInstanceAttribute(@NonNull final ProductId productId)
{
final I_M_AttributeSet mas = getAttributeSetOrNull(productId);
return mas != null && mas.isInstanceAttribute();
}
@Override
public boolean isProductInCategory(
final ProductId productId,
final ProductCategoryId expectedProductCategoryId)
{
if (productId == null || expectedProductCategoryId == null)
{
return false;
}
final ProductCategoryId productCategoryId = productsRepo.retrieveProductCategoryByProductId(productId);
return Objects.equals(productCategoryId, expectedProductCategoryId);
}
@Override
public String getProductValueAndName(@Nullable final ProductId productId)
{
if (productId == null)
{
return "-";
}
try
{
final I_M_Product product = getById(productId);
if (product == null)
{
return "<" + productId + ">";
}
return product.getValue() + "_" + product.getName();
}
catch (final Exception ex)
{
logger.warn("No product found for {}. Returning `<{}>`.", productId, productId, ex);
return "<" + productId + ">";
}
}
@Override
public String getProductValue(@NonNull final ProductId productId)
{
final I_M_Product product = getById(productId);
if (product == null)
{
return "<" + productId + ">";
}
return product.getValue();
}
@Override
public ImmutableMap<ProductId, String> getProductValues(@NonNull final Set<ProductId> productIds)
{
if (productIds.isEmpty())
{
return ImmutableMap.of();
}
return productsRepo.getByIds(productIds)
.stream()
.collect(ImmutableMap.toImmutableMap(
product -> ProductId.ofRepoId(product.getM_Product_ID()),
product -> product.getValue()));
}
@Override
public String getProductName(@NonNull final ProductId productId)
{
final I_M_Product product = getById(productId);
if (product == null)
{
return "<" + productId + ">";
}
return product.getName();
}
@Override
public Optional<UomId> getCatchUOMId(@NonNull final ProductId productId)
{
final IUOMConversionDAO uomConversionsRepo = Services.get(IUOMConversionDAO.class);
final ImmutableSet<UomId> catchUomIds = uomConversionsRepo.getProductConversions(productId)
.getCatchUomIds();
final List<I_C_UOM> catchUOMs = uomsRepo.getByIds(catchUomIds);
final ImmutableList<UomId> catchWeightUomIds = catchUOMs.stream()
.filter(uom -> uom.isActive())
.filter(uom -> X_C_UOM.UOMTYPE_Weigth.equals(uom.getUOMType()))
.map(uom -> UomId.ofRepoId(uom.getC_UOM_ID()))
.sorted()
.collect(ImmutableList.toImmutableList());
if (catchWeightUomIds.isEmpty())
{
return Optional.empty();
}
else
{
return Optional.of(catchWeightUomIds.get(0));
}
}
@Override
public ProductType getProductType(@NonNull final ProductId productId)
{
final I_M_Product product = assumeNotNull(getById(productId), "M_Product record with M_Product_ID={} needs to exist", productId.getRepoId());
return ProductType.ofCode(product.getProductType());
}
@Override
public ProductCategoryId getDefaultProductCategoryId()
{
return productsRepo.getDefaultProductCategoryId();
}
@Override
public ITranslatableString getProductNameTrl(@NonNull final ProductId productId)
{
final I_M_Product product = getById(productId);
if (product == null)
{
return TranslatableStrings.anyLanguage("<" + productId + ">");
}
return InterfaceWrapperHelper.getModelTranslationMap(product)
.getColumnTrl(I_M_Product.COLUMNNAME_Name, product.getName());
}
@Override
public ProductId retrieveMappedProductIdOrNull(final ProductId productId, final OrgId orgId)
{
return productsRepo.retrieveMappedProductIdOrNull(productId, orgId);
}
@Override
public boolean isHaddexProduct(final ProductId productId)
{
final org.compiere.model.I_M_Product product = getById(productId);
return product.isHaddexCheck();
}
@Nullable
@Override
public I_M_AttributeSet getProductMasterDataSchemaOrNull(final ProductId productId)
{
final I_M_Product product = productsRepo.getById(productId);
final int attributeSetRepoId = product.getM_AttributeSet_ID();
final AttributeSetId attributeSetId = AttributeSetId.ofRepoIdOrNone(attributeSetRepoId);
if (attributeSetId.isNone())
{
return null;
}
return attributesRepo.getAttributeSetById(attributeSetId);
}
@Override
public ImmutableList<String> retrieveSupplierApprovalNorms(@NonNull final ProductId productId)
{
final I_M_Product product = productsRepo.getById(productId);
if(!product.isRequiresSupplierApproval())
{
return ImmutableList.of();
}
return productsRepo.retrieveSupplierApprovalNorms(productId);
}
}
| 5,672 |
56,632 | <gh_stars>1000+
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#include "precomp.hpp"
#include "opencv2/dnn.hpp"
#include <algorithm>
namespace cv
{
class FaceRecognizerSFImpl : public FaceRecognizerSF
{
public:
FaceRecognizerSFImpl(const String& model, const String& config, int backend_id, int target_id)
{
net = dnn::readNet(model, config);
CV_Assert(!net.empty());
net.setPreferableBackend(backend_id);
net.setPreferableTarget(target_id);
};
void alignCrop(InputArray _src_img, InputArray _face_mat, OutputArray _aligned_img) const override
{
Mat face_mat = _face_mat.getMat();
float src_point[5][2];
for (int row = 0; row < 5; ++row)
{
for(int col = 0; col < 2; ++col)
{
src_point[row][col] = face_mat.at<float>(0, row*2+col+4);
}
}
Mat warp_mat = getSimilarityTransformMatrix(src_point);
warpAffine(_src_img, _aligned_img, warp_mat, Size(112, 112), INTER_LINEAR);
};
void feature(InputArray _aligned_img, OutputArray _face_feature) override
{
Mat inputBolb = dnn::blobFromImage(_aligned_img, 1, Size(112, 112), Scalar(0, 0, 0), true, false);
net.setInput(inputBolb);
net.forward(_face_feature);
};
double match(InputArray _face_feature1, InputArray _face_feature2, int dis_type) const override
{
Mat face_feature1 = _face_feature1.getMat(), face_feature2 = _face_feature2.getMat();
normalize(face_feature1, face_feature1);
normalize(face_feature2, face_feature2);
if(dis_type == DisType::FR_COSINE){
return sum(face_feature1.mul(face_feature2))[0];
}else if(dis_type == DisType::FR_NORM_L2){
return norm(face_feature1, face_feature2);
}else{
throw std::invalid_argument("invalid parameter " + std::to_string(dis_type));
}
};
private:
Mat getSimilarityTransformMatrix(float src[5][2]) const {
float dst[5][2] = { {38.2946f, 51.6963f}, {73.5318f, 51.5014f}, {56.0252f, 71.7366f}, {41.5493f, 92.3655f}, {70.7299f, 92.2041f} };
float avg0 = (src[0][0] + src[1][0] + src[2][0] + src[3][0] + src[4][0]) / 5;
float avg1 = (src[0][1] + src[1][1] + src[2][1] + src[3][1] + src[4][1]) / 5;
//Compute mean of src and dst.
float src_mean[2] = { avg0, avg1 };
float dst_mean[2] = { 56.0262f, 71.9008f };
//Subtract mean from src and dst.
float src_demean[5][2];
for (int i = 0; i < 2; i++)
{
for (int j = 0; j < 5; j++)
{
src_demean[j][i] = src[j][i] - src_mean[i];
}
}
float dst_demean[5][2];
for (int i = 0; i < 2; i++)
{
for (int j = 0; j < 5; j++)
{
dst_demean[j][i] = dst[j][i] - dst_mean[i];
}
}
double A00 = 0.0, A01 = 0.0, A10 = 0.0, A11 = 0.0;
for (int i = 0; i < 5; i++)
A00 += dst_demean[i][0] * src_demean[i][0];
A00 = A00 / 5;
for (int i = 0; i < 5; i++)
A01 += dst_demean[i][0] * src_demean[i][1];
A01 = A01 / 5;
for (int i = 0; i < 5; i++)
A10 += dst_demean[i][1] * src_demean[i][0];
A10 = A10 / 5;
for (int i = 0; i < 5; i++)
A11 += dst_demean[i][1] * src_demean[i][1];
A11 = A11 / 5;
Mat A = (Mat_<double>(2, 2) << A00, A01, A10, A11);
double d[2] = { 1.0, 1.0 };
double detA = A00 * A11 - A01 * A10;
if (detA < 0)
d[1] = -1;
double T[3][3] = { {1.0, 0.0, 0.0}, {0.0, 1.0, 0.0}, {0.0, 0.0, 1.0} };
Mat s, u, vt, v;
SVD::compute(A, s, u, vt);
double smax = s.ptr<double>(0)[0]>s.ptr<double>(1)[0] ? s.ptr<double>(0)[0] : s.ptr<double>(1)[0];
double tol = smax * 2 * FLT_MIN;
int rank = 0;
if (s.ptr<double>(0)[0]>tol)
rank += 1;
if (s.ptr<double>(1)[0]>tol)
rank += 1;
double arr_u[2][2] = { {u.ptr<double>(0)[0], u.ptr<double>(0)[1]}, {u.ptr<double>(1)[0], u.ptr<double>(1)[1]} };
double arr_vt[2][2] = { {vt.ptr<double>(0)[0], vt.ptr<double>(0)[1]}, {vt.ptr<double>(1)[0], vt.ptr<double>(1)[1]} };
double det_u = arr_u[0][0] * arr_u[1][1] - arr_u[0][1] * arr_u[1][0];
double det_vt = arr_vt[0][0] * arr_vt[1][1] - arr_vt[0][1] * arr_vt[1][0];
if (rank == 1)
{
if ((det_u*det_vt) > 0)
{
Mat uvt = u*vt;
T[0][0] = uvt.ptr<double>(0)[0];
T[0][1] = uvt.ptr<double>(0)[1];
T[1][0] = uvt.ptr<double>(1)[0];
T[1][1] = uvt.ptr<double>(1)[1];
}
else
{
double temp = d[1];
d[1] = -1;
Mat D = (Mat_<double>(2, 2) << d[0], 0.0, 0.0, d[1]);
Mat Dvt = D*vt;
Mat uDvt = u*Dvt;
T[0][0] = uDvt.ptr<double>(0)[0];
T[0][1] = uDvt.ptr<double>(0)[1];
T[1][0] = uDvt.ptr<double>(1)[0];
T[1][1] = uDvt.ptr<double>(1)[1];
d[1] = temp;
}
}
else
{
Mat D = (Mat_<double>(2, 2) << d[0], 0.0, 0.0, d[1]);
Mat Dvt = D*vt;
Mat uDvt = u*Dvt;
T[0][0] = uDvt.ptr<double>(0)[0];
T[0][1] = uDvt.ptr<double>(0)[1];
T[1][0] = uDvt.ptr<double>(1)[0];
T[1][1] = uDvt.ptr<double>(1)[1];
}
double var1 = 0.0;
for (int i = 0; i < 5; i++)
var1 += src_demean[i][0] * src_demean[i][0];
var1 = var1 / 5;
double var2 = 0.0;
for (int i = 0; i < 5; i++)
var2 += src_demean[i][1] * src_demean[i][1];
var2 = var2 / 5;
double scale = 1.0 / (var1 + var2)* (s.ptr<double>(0)[0] * d[0] + s.ptr<double>(1)[0] * d[1]);
double TS[2];
TS[0] = T[0][0] * src_mean[0] + T[0][1] * src_mean[1];
TS[1] = T[1][0] * src_mean[0] + T[1][1] * src_mean[1];
T[0][2] = dst_mean[0] - scale*TS[0];
T[1][2] = dst_mean[1] - scale*TS[1];
T[0][0] *= scale;
T[0][1] *= scale;
T[1][0] *= scale;
T[1][1] *= scale;
Mat transform_mat = (Mat_<double>(2, 3) << T[0][0], T[0][1], T[0][2], T[1][0], T[1][1], T[1][2]);
return transform_mat;
}
private:
dnn::Net net;
};
Ptr<FaceRecognizerSF> FaceRecognizerSF::create(const String& model, const String& config, int backend_id, int target_id)
{
return makePtr<FaceRecognizerSFImpl>(model, config, backend_id, target_id);
}
} // namespace cv
| 3,867 |
2,144 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pinot.controller.api.access;
import com.google.common.annotations.VisibleForTesting;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
import javax.inject.Inject;
import javax.inject.Provider;
import javax.ws.rs.DELETE;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.container.ContainerRequestFilter;
import javax.ws.rs.container.ResourceInfo;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.UriInfo;
import org.glassfish.grizzly.http.server.Request;
/**
* A container filter class responsible for automatic authentication of REST endpoints. Any rest endpoints annotated
* with {@link Authenticate} annotation, will go through authentication.
*/
@javax.ws.rs.ext.Provider
public class AuthenticationFilter implements ContainerRequestFilter {
private static final Set<String> UNPROTECTED_PATHS =
new HashSet<>(Arrays.asList("", "help", "auth/info", "auth/verify", "health"));
@Inject
Provider<Request> _requestProvider;
@Inject
AccessControlFactory _accessControlFactory;
@Context
ResourceInfo _resourceInfo;
@Context
HttpHeaders _httpHeaders;
@Override
public void filter(ContainerRequestContext requestContext)
throws IOException {
Method endpointMethod = _resourceInfo.getResourceMethod();
AccessControl accessControl = _accessControlFactory.create();
String endpointUrl = _requestProvider.get().getRequestURL().toString();
UriInfo uriInfo = requestContext.getUriInfo();
// exclude public/unprotected paths
if (isBaseFile(uriInfo.getPath()) || UNPROTECTED_PATHS.contains(uriInfo.getPath())) {
return;
}
// check if authentication is required implicitly
if (accessControl.protectAnnotatedOnly() && !endpointMethod.isAnnotationPresent(Authenticate.class)) {
return;
}
// Note that table name is extracted from "path parameters" or "query parameters" if it's defined as one of the
// followings:
// - "tableName",
// - "tableNameWithType", or
// - "schemaName"
// If table name is not available, it means the endpoint is not a table-level endpoint.
Optional<String> tableName = extractTableName(uriInfo.getPathParameters(), uriInfo.getQueryParameters());
// default access type
AccessType accessType = AccessType.READ;
if (endpointMethod.isAnnotationPresent(Authenticate.class)) {
accessType = endpointMethod.getAnnotation(Authenticate.class).value();
} else if (accessControl.protectAnnotatedOnly()) {
// heuristically infer access type via javax.ws.rs annotations
if (endpointMethod.getAnnotation(POST.class) != null) {
accessType = AccessType.CREATE;
} else if (endpointMethod.getAnnotation(PUT.class) != null) {
accessType = AccessType.UPDATE;
} else if (endpointMethod.getAnnotation(DELETE.class) != null) {
accessType = AccessType.DELETE;
}
}
new AccessControlUtils().validatePermission(tableName, accessType, _httpHeaders, endpointUrl, accessControl);
}
@VisibleForTesting
Optional<String> extractTableName(MultivaluedMap<String, String> pathParameters,
MultivaluedMap<String, String> queryParameters) {
Optional<String> tableName = extractTableName(pathParameters);
if (tableName.isPresent()) {
return tableName;
}
return extractTableName(queryParameters);
}
private Optional<String> extractTableName(MultivaluedMap<String, String> mmap) {
String tableName = mmap.getFirst("tableName");
if (tableName == null) {
tableName = mmap.getFirst("tableNameWithType");
if (tableName == null) {
tableName = mmap.getFirst("schemaName");
}
}
return Optional.ofNullable(tableName);
}
private static boolean isBaseFile(String path) {
return !path.contains("/") && path.contains(".");
}
}
| 1,567 |
1,770 | package com.willowtreeapps.hyperion.sharedpreferences.detail.viewholder;
import android.content.SharedPreferences;
import android.view.KeyEvent;
import android.view.View;
import android.widget.EditText;
import android.widget.TextView;
import com.willowtreeapps.hyperion.sharedpreferences.R;
public class StringPreferenceViewHolder extends PreferenceViewHolder<String> {
private final EditText editTextValue;
public StringPreferenceViewHolder(View itemView, SharedPreferences sharedPreferences) {
super(itemView);
editTextValue = itemView.findViewById(R.id.hsp_navigation_preference_value);
editTextValue.setOnEditorActionListener(new EditorListener(sharedPreferences));
}
@Override
public void bind(String preferenceKey, String preferenceValue) {
super.bind(preferenceKey, preferenceValue);
editTextValue.setText(preferenceValue);
}
private class EditorListener extends SharedPreferenceEditorListener {
EditorListener(SharedPreferences sharedPreferences) {
super(sharedPreferences);
}
@Override
public boolean onEditorAction(TextView v, int actionId, KeyEvent event) {
String value = v.getText().toString();
if (!value.equals(sharedPreferences.getString(getKey(), ""))) {
sharedPreferences.edit()
.putString(getKey(), value)
.apply();
}
return true;
}
}
}
| 570 |
480 | <gh_stars>100-1000
/* Copyright (c) 2011 Nordic Semiconductor. All Rights Reserved.
*
* The information contained herein is property of Nordic Semiconductor ASA.
* Terms and conditions of usage are described in detail in NORDIC
* SEMICONDUCTOR STANDARD SOFTWARE LICENSE AGREEMENT.
*
* Licensees are granted free, non-transferable use of the information. NO
* WARRANTY of ANY KIND is provided. This heading must NOT be removed from
* the file.
*
* $LastChangedRevision: 25851 $
*/
/**
* @file
* @brief Gazell Link Layer constants and default values.
*
* NOTE! Changing values here has no effect. They are only provided as a reference.
*/
#ifndef NRF_GZLL_CONSTANTS_H__
#define NRF_GZLL_CONSTANTS_H__
/**
* @addtogroup gzll_02_api
* @{
*/
/*****************************************************************************/
/** @name Hardware resources used by Gazell */
/*****************************************************************************/
#define NRF_GZLL_HIGH_IRQ_PRIORITY 0 ///< Interrupt priority the Gazell timer and the radio
#define NRF_GZLL_LOW_IRQ_PRIORITY 1 ///< Interrupt priority for Gazell callback functions.
#define NRF_GZLL_SWI_IRQn SWI0_IRQn ///< Software interrupt # used for callback functions.
#define NRF_GZLL_SWI_IRQ_HANDLER SWI0_IRQHandler ///< Software interrupt handler used for callback functions.
#define NRF_GZLL_TIMER NRF_TIMER2 ///< Timer to be used as flywheel timer.
#define NRF_GZLL_TIMER_PERPOWER_Msk POWER_PERPOWER_TIMER2_Msk ///< PERPOWER mask for the timer.
#define NRF_GZLL_TIMER_IRQn TIMER2_IRQn ///< Interrupt # for the timer.
#define NRF_GZLL_TIMER_IRQ_HANDLER TIMER2_IRQHandler ///< Interrupt handler for the timer.
// In addition, Gazell uses the radio peripheral and radio interrupts.
/*
* PPI configuration
*/
#define NRF_GZLL_PPI_EEP0 (NRF_PPI -> CH0_EEP) ///< Gazell PPI event endpoint 0
#define NRF_GZLL_PPI_TEP0 (NRF_PPI -> CH0_TEP) ///< Gazell PPI task endpoint 0
#define NRF_GZLL_PPI_EEP1 (NRF_PPI -> CH1_EEP) ///< Gazell PPI event endpoint 1
#define NRF_GZLL_PPI_TEP1 (NRF_PPI -> CH1_TEP) ///< Gazell PPI task endpoint 1
#define NRF_GZLL_PPI_EEP2 (NRF_PPI -> CH2_EEP) ///< Gazell PPI event endpoint 2
#define NRF_GZLL_PPI_TEP2 (NRF_PPI -> CH2_TEP) ///< Gazell PPI task endpoint 2
#define NRF_GZLL_PPI_CHEN_MSK_0_AND_1 (0x03) ///< Channel enable/disable mask for PPI endpoint 0 and 1.
#define NRF_GZLL_PPI_CHEN_MSK_2 (0x04) ///< Channel enable/disable mask for PPI endpoint 2.
#define NRF_GZLL_CONST_PIPE_COUNT 8 ///< Number of TX pipes (at least one for each Device-Host pairs).
#define NRF_GZLL_CONST_FIFO_LENGTH 3 ///< Maximum number of packets allowed in a TX or RX FIFO.
#define NRF_GZLL_CONST_MAX_TOTAL_PACKETS 6 ///< Maximum number of packets available for reservation at any one time.
#define NRF_GZLL_CONST_MAX_PAYLOAD_LENGTH 32 ///< Maximum allowed payload length in bytes.
#define NRF_GZLL_CONST_CALLBACK_QUEUE_LENGTH 10 ///< Maximum number of notifications allowed in the callback queue.
/** @} */
/*****************************************************************************/
/** @name Constant pipe and FIFO configuration */
/*****************************************************************************/
#define NRF_GZLL_CONST_PIPE_COUNT 8 ///< Number of TX pipes (at least one for each Device-Host pairs).
#define NRF_GZLL_CONST_FIFO_LENGTH 3 ///< Maximum number of packets allowed in a TX or RX FIFO.
#define NRF_GZLL_CONST_MAX_TOTAL_PACKETS 6 ///< Maximum number of packets available for reservation at any one time.
#define NRF_GZLL_CONST_MAX_PAYLOAD_LENGTH 32 ///< Maximum allowed payload length in bytes.
#define NRF_GZLL_CONST_CALLBACK_QUEUE_LENGTH 10 ///< Maximum number of notifications allowed in the callback queue.
/** @} */
/*****************************************************************************/
/** @name Default radio configuration */
/*****************************************************************************/
#define NRF_GZLL_DEFAULT_TX_POWER NRF_GZLL_TX_POWER_0_DBM ///< Default TX power.
#define NRF_GZLL_DEFAULT_DATARATE NRF_GZLL_DATARATE_2MBIT ///< Default data rate.
#define NRF_GZLL_DEFAULT_CHANNEL_TABLE {4, 25, 42, 63, 77} ///< Default channel table.
#define NRF_GZLL_DEFAULT_CHANNEL_TABLE_SIZE 5 ///< Default channel table size.
#define NRF_GZLL_CONST_MAX_CHANNEL_TABLE_SIZE 16 ///< Maximum channel table size allowed by Gazell.
/** @} */
/*****************************************************************************/
/** @name Default Address configuration */
/*****************************************************************************/
/*
Corresponds to Legacy nRFgo SDK Gazell config:
#define GZLL_DEFAULT_ADDRESS_PIPE0 {0x01, 0x04, 0x07, 0x0A, 0x0D} // {1, 4, 7, 10, 13}
#define GZLL_DEFAULT_ADDRESS_PIPE1 {0x02, 0x05, 0x08, 0x0B, 0x0E} // {2, 5, 8, 11, 14}
#define GZLL_DEFAULT_ADDRESS_PIPE2 3
#define GZLL_DEFAULT_ADDRESS_PIPE3 4
#define GZLL_DEFAULT_ADDRESS_PIPE4 5
#define GZLL_DEFAULT_ADDRESS_PIPE5 6
*/
#define NRF_GZLL_DEFAULT_FULL_ADDRESS_PIPE0 {0x01, 0x04, 0x07, 0x0A, 0x0D} ///< Corresponding legacy Gazell pipe 0 address.
#define NRF_GZLL_DEFAULT_BASE_ADDRESS_0 0x0D0A0704 ///< Default base address 0.
#define NRF_GZLL_DEFAULT_BASE_ADDRESS_1 0x0E0B0805 ///< Default base address 1.
#define NRF_GZLL_DEFAULT_PREFIX_BYTE_0 1 ///< Default prefix address pipe 0.
#define NRF_GZLL_DEFAULT_PREFIX_BYTE_1 2 ///< Default prefix address pipe 1.
#define NRF_GZLL_DEFAULT_PREFIX_BYTE_2 3 ///< Default prefix address pipe 2.
#define NRF_GZLL_DEFAULT_PREFIX_BYTE_3 4 ///< Default prefix address pipe 3.
#define NRF_GZLL_DEFAULT_PREFIX_BYTE_4 5 ///< Default prefix address pipe 4.
#define NRF_GZLL_DEFAULT_PREFIX_BYTE_5 6 ///< Default prefix address pipe 5.
#define NRF_GZLL_DEFAULT_PREFIX_BYTE_6 7 ///< Default prefix address pipe 6.
#define NRF_GZLL_DEFAULT_PREFIX_BYTE_7 8 ///< Default prefix address pipe 7.
#define NRF_GZLL_DEFAULT_BASE_ADDRESS_LENGTH NRF_GZLL_BASE_ADDRESS_LENGTH_4B ///< Default on-air base address length.
#define NRF_GZLL_DEFAULT_RX_PIPES_ENABLED 0x000000FF ///< Enabled Rx pipes. See nrf_gzll_set_rx_pipes_enabled().
/** @} */
/*****************************************************************************/
/** @name Default timeslot and synchronization configuration */
/*****************************************************************************/
#define NRF_GZLL_DEFAULT_TIMESLOT_PERIOD 600 ///< Default timeslot period.
#define NRF_GZLL_DEFAULT_TIMESLOTS_PER_CHANNEL 2 ///< Timeslots use by the Host and by the Device when communication is in sync.
#define NRF_GZLL_DEFAULT_TIMESLOTS_PER_CHANNEL_WHEN_DEVICE_OUT_OF_SYNC 15 ///< Timeslots use by the Device before communication is in sync.
#define NRF_GZLL_DEFAULT_SYNC_LIFETIME (3*NRF_GZLL_DEFAULT_CHANNEL_TABLE_SIZE*NRF_GZLL_DEFAULT_TIMESLOTS_PER_CHANNEL) ///< Number of timeslots to keep the timer running so that communication remains synchronized.
#define NRF_GZLL_DEFAULT_DEVICE_CHANNEL_SELECTION_POLICY NRF_GZLL_DEVICE_CHANNEL_SELECTION_POLICY_USE_SUCCESSFUL ///< Default channel Gazell Device channel selection policy
#define NRF_GZLL_DEFAULT_MAX_TX_ATTEMPTS 0 ///< Default maximum TX attempts for each packet. A value of zero implies maximum
#define NRF_GZLL_DEFAULT_XOSC_CTL NRF_GZLL_XOSC_CTL_AUTO ///< Deafult setting for controlling the XOSC
/** @} */
/** @} */
#endif
| 3,096 |
3,017 | package sagan.site.projects.support;
import java.time.LocalDate;
import org.springframework.util.Assert;
/**
* The support timeline for a {@link sagan.site.projects.ProjectGeneration}, given a defined {@link SupportPolicy}.
* A timeline is made of two {@link SupportPeriod}: open source support and commercial support.
*/
public class SupportTimeline {
private final LocalDate initialRelease;
private final SupportPeriod openSourceSupport;
private final SupportPeriod commercialSupport;
SupportTimeline(LocalDate initialRelease, SupportPeriod openSourceSupport, SupportPeriod commercialSupport) {
this.initialRelease = initialRelease;
this.openSourceSupport = openSourceSupport;
this.commercialSupport = commercialSupport;
}
public static Builder create(LocalDate initialRelease) {
return new Builder(initialRelease);
}
public LocalDate getInitialRelease() {
return this.initialRelease;
}
public SupportPeriod getOpenSourceSupport() {
return this.openSourceSupport;
}
public SupportPeriod getCommercialSupport() {
return this.commercialSupport;
}
@Override
public String toString() {
return "SupportTimeline{" +
"initialRelease=" + initialRelease +
", openSourceSupport=" + openSourceSupport +
", commercialSupport=" + commercialSupport +
'}';
}
public static class Builder {
private final LocalDate initialRelease;
private SupportPeriod openSourceSupport;
private SupportPeriod commercialSupport;
Builder(LocalDate initialRelease) {
this.initialRelease = initialRelease;
}
public Builder openSourceSupport(LocalDate startDate, LocalDate endDate, String reason) {
this.openSourceSupport = new SupportPeriod(startDate, endDate, reason);
return this;
}
public Builder commercialSupport(LocalDate startDate, LocalDate endDate, String reason) {
this.commercialSupport = new SupportPeriod(startDate, endDate, reason);
return this;
}
public SupportTimeline build() {
Assert.notNull(this.openSourceSupport, "openSourceSupport should not be null");
Assert.notNull(this.commercialSupport, "commercialSupport should not be null");
return new SupportTimeline(this.initialRelease, this.openSourceSupport, this.commercialSupport);
}
}
}
| 632 |
2,151 | <reponame>zipated/src<gh_stars>1000+
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "extensions/browser/api/declarative_net_request/declarative_net_request_api.h"
#include <utility>
#include "base/bind.h"
#include "base/strings/stringprintf.h"
#include "content/public/browser/browser_thread.h"
#include "extensions/browser/api/declarative_net_request/rules_monitor_service.h"
#include "extensions/browser/api/declarative_net_request/ruleset_manager.h"
#include "extensions/browser/extension_prefs.h"
#include "extensions/browser/extension_system.h"
#include "extensions/browser/info_map.h"
#include "extensions/common/api/declarative_net_request.h"
#include "extensions/common/extension_id.h"
#include "extensions/common/url_pattern.h"
#include "extensions/common/url_pattern_set.h"
namespace extensions {
namespace {
// Returns true if the given |extension| has a registered ruleset. If it
// doesn't, returns false and populates |error|.
bool HasRegisteredRuleset(content::BrowserContext* context,
const Extension* extension,
std::string* error) {
const auto* rules_monitor_service = BrowserContextKeyedAPIFactory<
declarative_net_request::RulesMonitorService>::Get(context);
DCHECK(rules_monitor_service);
if (rules_monitor_service->HasRegisteredRuleset(extension))
return true;
*error = "The extension must have a ruleset in order to call this function.";
return false;
}
} // namespace
DeclarativeNetRequestUpdateWhitelistedPagesFunction::
DeclarativeNetRequestUpdateWhitelistedPagesFunction() = default;
DeclarativeNetRequestUpdateWhitelistedPagesFunction::
~DeclarativeNetRequestUpdateWhitelistedPagesFunction() = default;
ExtensionFunction::ResponseAction
DeclarativeNetRequestUpdateWhitelistedPagesFunction::UpdateWhitelistedPages(
const std::vector<std::string>& patterns,
Action action) {
if (patterns.empty())
return RespondNow(NoArguments());
// It's ok to allow file access and to use SCHEME_ALL since this is not
// actually granting any permissions to the extension. This will only be used
// to whitelist requests.
URLPatternSet delta;
std::string error;
if (!delta.Populate(patterns, URLPattern::SCHEME_ALL,
true /*allow_file_access*/, &error)) {
return RespondNow(Error(error));
}
ExtensionPrefs* prefs = ExtensionPrefs::Get(browser_context());
URLPatternSet current_set = prefs->GetDNRWhitelistedPages(extension_id());
URLPatternSet new_set;
switch (action) {
case Action::ADD:
new_set = URLPatternSet::CreateUnion(current_set, delta);
break;
case Action::REMOVE:
new_set = URLPatternSet::CreateDifference(current_set, delta);
break;
}
if (static_cast<int>(new_set.size()) >
api::declarative_net_request::MAX_NUMBER_OF_WHITELISTED_PAGES) {
return RespondNow(Error(base::StringPrintf(
"The number of whitelisted page patterns can't exceed %d",
api::declarative_net_request::MAX_NUMBER_OF_WHITELISTED_PAGES)));
}
// Persist |new_set| as part of preferences.
prefs->SetDNRWhitelistedPages(extension_id(), new_set);
return RespondNow(NoArguments());
}
bool DeclarativeNetRequestUpdateWhitelistedPagesFunction::PreRunValidation(
std::string* error) {
return UIThreadExtensionFunction::PreRunValidation(error) &&
HasRegisteredRuleset(browser_context(), extension(), error);
}
DeclarativeNetRequestAddWhitelistedPagesFunction::
DeclarativeNetRequestAddWhitelistedPagesFunction() = default;
DeclarativeNetRequestAddWhitelistedPagesFunction::
~DeclarativeNetRequestAddWhitelistedPagesFunction() = default;
ExtensionFunction::ResponseAction
DeclarativeNetRequestAddWhitelistedPagesFunction::Run() {
using Params = api::declarative_net_request::AddWhitelistedPages::Params;
std::unique_ptr<Params> params(Params::Create(*args_));
EXTENSION_FUNCTION_VALIDATE(params);
return UpdateWhitelistedPages(params->page_patterns, Action::ADD);
}
DeclarativeNetRequestRemoveWhitelistedPagesFunction::
DeclarativeNetRequestRemoveWhitelistedPagesFunction() = default;
DeclarativeNetRequestRemoveWhitelistedPagesFunction::
~DeclarativeNetRequestRemoveWhitelistedPagesFunction() = default;
ExtensionFunction::ResponseAction
DeclarativeNetRequestRemoveWhitelistedPagesFunction::Run() {
using Params = api::declarative_net_request::AddWhitelistedPages::Params;
std::unique_ptr<Params> params(Params::Create(*args_));
EXTENSION_FUNCTION_VALIDATE(params);
return UpdateWhitelistedPages(params->page_patterns, Action::REMOVE);
}
DeclarativeNetRequestGetWhitelistedPagesFunction::
DeclarativeNetRequestGetWhitelistedPagesFunction() = default;
DeclarativeNetRequestGetWhitelistedPagesFunction::
~DeclarativeNetRequestGetWhitelistedPagesFunction() = default;
bool DeclarativeNetRequestGetWhitelistedPagesFunction::PreRunValidation(
std::string* error) {
return UIThreadExtensionFunction::PreRunValidation(error) &&
HasRegisteredRuleset(browser_context(), extension(), error);
}
ExtensionFunction::ResponseAction
DeclarativeNetRequestGetWhitelistedPagesFunction::Run() {
const ExtensionPrefs* prefs = ExtensionPrefs::Get(browser_context());
URLPatternSet current_set = prefs->GetDNRWhitelistedPages(extension_id());
return RespondNow(ArgumentList(
api::declarative_net_request::GetWhitelistedPages::Results::Create(
*current_set.ToStringVector())));
}
} // namespace extensions
| 1,831 |
881 | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from django.core.management.base import BaseCommand
from django.core.cache import caches
from blueapps.account.conf import APIGW_CACHE_KEY
cache = caches["login_db"]
class Command(BaseCommand):
def handle(self, **options):
cache.delete(APIGW_CACHE_KEY)
print("[APIGW] clean public key cache SUCCESS!")
| 320 |
854 | <reponame>timxor/leetcode-journal<gh_stars>100-1000
__________________________________________________________________________________________________
class Solution {
public:
TreeNode* str2tree(string s) {
if (s.empty()) return NULL;
auto found = s.find('(');
int val = (found == string::npos) ? stoi(s) : stoi(s.substr(0, found));
TreeNode *cur = new TreeNode(val);
if (found == string::npos) return cur;
int start = found, cnt = 0;
for (int i = start; i < s.size(); ++i) {
if (s[i] == '(') ++cnt;
else if (s[i] == ')') --cnt;
if (cnt == 0 && start == found) {
cur->left = str2tree(s.substr(start + 1, i - start - 1));
start = i + 1;
} else if (cnt == 0) {
cur->right = str2tree(s.substr(start + 1, i - start - 1));
}
}
return cur;
}
};
__________________________________________________________________________________________________
class Solution {
public:
TreeNode* str2tree(string s) {
if (s.empty()) return NULL;
stack<TreeNode*> st;
for (int i = 0; i < s.size(); ++i) {
int j = i;
if (s[i] == ')') st.pop();
else if ((s[i] >= '0' && s[i] <= '9') || s[i] == '-') {
while (i + 1 < s.size() && s[i + 1] >= '0' && s[i + 1] <= '9') ++i;
TreeNode *cur = new TreeNode(stoi(s.substr(j, i - j + 1)));
if (!st.empty()) {
TreeNode *t = st.top();
if (!t->left) t->left = cur;
else t->right = cur;
}
st.push(cur);
}
}
return st.top();
}
};
__________________________________________________________________________________________________
| 900 |
12,278 | <gh_stars>1000+
// Copyright <NAME> 2015.
// Use, modification and distribution are subject to the
// Boost Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
#ifdef _MSC_VER
# pragma warning(disable : 4756) // overflow in constant arithmetic
// Constants are too big for float case, but this doesn't matter for test.
#endif
#include <boost/math/concepts/real_concept.hpp>
#define BOOST_TEST_MAIN
#include <boost/test/unit_test.hpp>
#include <boost/test/floating_point_comparison.hpp>
#include <boost/math/special_functions/math_fwd.hpp>
#include <boost/math/constants/constants.hpp>
#include <boost/array.hpp>
#include "functor.hpp"
#include "handle_test_result.hpp"
#include "table_type.hpp"
#ifndef SC_
#define SC_(x) static_cast<typename table_type<T>::type>(BOOST_JOIN(x, L))
#endif
template <class Real, typename T>
void do_test_heuman_lambda(const T& data, const char* type_name, const char* test)
{
#if !(defined(ERROR_REPORTING_MODE) && !defined(HEUMAN_LAMBDA_FUNCTION_TO_TEST))
typedef Real value_type;
std::cout << "Testing: " << test << std::endl;
#ifdef HEUMAN_LAMBDA_FUNCTION_TO_TEST
value_type(*fp2)(value_type, value_type) = HEUMAN_LAMBDA_FUNCTION_TO_TEST;
#elif defined(BOOST_MATH_NO_DEDUCED_FUNCTION_POINTERS)
value_type (*fp2)(value_type, value_type) = boost::math::ellint_d<value_type, value_type>;
#else
value_type(*fp2)(value_type, value_type) = boost::math::heuman_lambda;
#endif
boost::math::tools::test_result<value_type> result;
result = boost::math::tools::test_hetero<Real>(
data,
bind_func<Real>(fp2, 1, 0),
extract_result<Real>(2));
handle_test_result(result, data[result.worst()], result.worst(),
type_name, "heuman_lambda", test);
std::cout << std::endl;
#endif
}
template <typename T>
void test_spots(T, const char* type_name)
{
BOOST_MATH_STD_USING
// Function values calculated on http://functions.wolfram.com/
// Note that Mathematica's EllipticE accepts k^2 as the second parameter.
static const boost::array<boost::array<T, 3>, 5> data1 = {{
{ { SC_(0.25), SC_(0.5), SC_(0.231195544262270355901990821099667428154924832224446817213200) } },
{ { SC_(-0.25), SC_(0.5), SC_(-0.231195544262270355901990821099667428154924832224446817213200) } },
{ { SC_(0), SC_(0.5), SC_(0) } },
{ { SC_(1), T(0.5), SC_(0.792745183008071035953588061452801838417979005666066982987549) } },
{ { SC_(1), T(0), SC_(0.841470984807896506652502321630298999622563060798371065672751) } },
}};
do_test_heuman_lambda<T>(data1, type_name, "Elliptic Integral Jacobi Zeta: Mathworld Data");
#include "heuman_lambda_data.ipp"
do_test_heuman_lambda<T>(heuman_lambda_data, type_name, "Elliptic Integral Heuman Lambda: Random Data");
}
| 1,196 |
2,151 | <filename>chrome/browser/ui/app_list/internal_app/internal_app_model_builder.h
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_UI_APP_LIST_INTERNAL_APP_INTERNAL_APP_MODEL_BUILDER_H_
#define CHROME_BROWSER_UI_APP_LIST_INTERNAL_APP_INTERNAL_APP_MODEL_BUILDER_H_
#include "base/macros.h"
#include "chrome/browser/ui/app_list/app_list_model_builder.h"
class AppListControllerDelegate;
// This class populates and maintains internal apps.
class InternalAppModelBuilder : public AppListModelBuilder {
public:
explicit InternalAppModelBuilder(AppListControllerDelegate* controller);
~InternalAppModelBuilder() override = default;
private:
// AppListModelBuilder:
void BuildModel() override;
DISALLOW_COPY_AND_ASSIGN(InternalAppModelBuilder);
};
#endif // CHROME_BROWSER_UI_APP_LIST_INTERNAL_APP_INTERNAL_APP_MODEL_BUILDER_H_
| 326 |
4,054 | // Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
#include "value_type.h"
#include "value_type_spec.h"
#include <algorithm>
#include <cassert>
namespace vespalib::eval {
namespace {
using Dimension = ValueType::Dimension;
using DimensionList = std::vector<Dimension>;
size_t my_dimension_index(const std::vector<Dimension> &list, const vespalib::string &name) {
for (size_t idx = 0; idx < list.size(); ++idx) {
if (list[idx].name == name) {
return idx;
}
}
return ValueType::Dimension::npos;
}
const Dimension *find_dimension(const std::vector<Dimension> &list, const vespalib::string &name) {
size_t idx = my_dimension_index(list, name);
return (idx != ValueType::Dimension::npos) ? &list[idx] : nullptr;
}
void sort_dimensions(DimensionList &dimensions) {
std::sort(dimensions.begin(), dimensions.end(),
[](const auto &a, const auto &b){ return (a.name < b.name); });
}
bool verify_dimensions(const DimensionList &dimensions) {
for (size_t i = 0; i < dimensions.size(); ++i) {
if (dimensions[i].size == 0) {
return false;
}
if ((i > 0) && (dimensions[i - 1].name == dimensions[i].name)) {
return false;
}
}
return true;
}
struct MyReduce {
bool has_error;
std::vector<Dimension> dimensions;
MyReduce(const std::vector<Dimension> &dim_list, const std::vector<vespalib::string> &rem_list)
: has_error(false), dimensions()
{
if (!rem_list.empty()) {
size_t removed = 0;
for (const Dimension &dim: dim_list) {
if (std::find(rem_list.begin(), rem_list.end(), dim.name) == rem_list.end()) {
dimensions.push_back(dim);
} else {
++removed;
}
}
if (removed != rem_list.size()) {
has_error = true;
}
}
}
};
struct MyJoin {
bool mismatch;
DimensionList dimensions;
vespalib::string concat_dim;
MyJoin(const DimensionList &lhs, const DimensionList &rhs)
: mismatch(false), dimensions(), concat_dim() { my_join(lhs, rhs); }
MyJoin(const DimensionList &lhs, const DimensionList &rhs, vespalib::string concat_dim_in)
: mismatch(false), dimensions(), concat_dim(concat_dim_in) { my_join(lhs, rhs); }
~MyJoin();
private:
void add(const Dimension &a) {
if (a.name == concat_dim) {
if (a.is_indexed()) {
dimensions.emplace_back(a.name, a.size + 1);
} else {
mismatch = true;
}
} else {
dimensions.push_back(a);
}
}
void unify(const Dimension &a, const Dimension &b) {
if (a.name == concat_dim) {
if (a.is_indexed() && b.is_indexed()) {
dimensions.emplace_back(a.name, a.size + b.size);
} else {
mismatch = true;
}
} else if (a == b) {
add(a);
} else {
mismatch = true;
}
}
void my_join(const DimensionList &lhs, const DimensionList &rhs) {
auto pos = rhs.begin();
auto end = rhs.end();
for (const Dimension &dim: lhs) {
while ((pos != end) && (pos->name < dim.name)) {
add(*pos++);
}
if ((pos != end) && (pos->name == dim.name)) {
unify(dim, *pos++);
} else {
add(dim);
}
}
while (pos != end) {
add(*pos++);
}
}
};
MyJoin::~MyJoin() = default;
struct Renamer {
const std::vector<vespalib::string> &from;
const std::vector<vespalib::string> &to;
size_t match_cnt;
Renamer(const std::vector<vespalib::string> &from_in,
const std::vector<vespalib::string> &to_in)
: from(from_in), to(to_in), match_cnt(0) {}
const vespalib::string &rename(const vespalib::string &name) {
for (size_t i = 0; i < from.size(); ++i) {
if (name == from[i]) {
++match_cnt;
return to[i];
}
}
return name;
}
bool matched_all() const { return (match_cnt == from.size()); }
};
} // namespace vespalib::eval::<unnamed>
constexpr ValueType::Dimension::size_type ValueType::Dimension::npos;
ValueType
ValueType::error_if(bool has_error, ValueType else_type)
{
if (has_error) {
return error_type();
} else {
return else_type;
}
}
ValueType::~ValueType() = default;
bool
ValueType::is_double() const {
if (!_error && _dimensions.empty()) {
assert(_cell_type == CellType::DOUBLE);
return true;
}
return false;
}
bool
ValueType::is_sparse() const
{
if (dimensions().empty()) {
return false;
}
for (const auto &dim : dimensions()) {
if (!dim.is_mapped()) {
return false;
}
}
return true;
}
bool
ValueType::is_dense() const
{
if (dimensions().empty()) {
return false;
}
for (const auto &dim : dimensions()) {
if (!dim.is_indexed()) {
return false;
}
}
return true;
}
size_t
ValueType::count_indexed_dimensions() const
{
size_t cnt = 0;
for (const auto &dim : dimensions()) {
if (dim.is_indexed()) {
++cnt;
}
}
return cnt;
}
size_t
ValueType::count_mapped_dimensions() const
{
size_t cnt = 0;
for (const auto &dim : dimensions()) {
if (dim.is_mapped()) {
++cnt;
}
}
return cnt;
}
size_t
ValueType::dense_subspace_size() const
{
size_t size = 1;
for (const auto &dim : dimensions()) {
if (dim.is_indexed()) {
size *= dim.size;
}
}
return size;
}
std::vector<ValueType::Dimension>
ValueType::nontrivial_indexed_dimensions() const {
std::vector<ValueType::Dimension> result;
for (const auto &dim: dimensions()) {
if (dim.is_indexed() && !dim.is_trivial()) {
result.push_back(dim);
}
}
return result;
}
std::vector<ValueType::Dimension>
ValueType::mapped_dimensions() const {
std::vector<ValueType::Dimension> result;
for (const auto &dim: dimensions()) {
if (dim.is_mapped()) {
result.push_back(dim);
}
}
return result;
}
size_t
ValueType::dimension_index(const vespalib::string &name) const {
return my_dimension_index(_dimensions, name);
}
std::vector<vespalib::string>
ValueType::dimension_names() const
{
std::vector<vespalib::string> result;
for (const auto &dimension: _dimensions) {
result.push_back(dimension.name);
}
return result;
}
ValueType
ValueType::map() const
{
auto meta = cell_meta().map();
return error_if(_error, make_type(meta.cell_type, _dimensions));
}
ValueType
ValueType::reduce(const std::vector<vespalib::string> &dimensions_in) const
{
MyReduce result(_dimensions, dimensions_in);
auto meta = cell_meta().reduce(result.dimensions.empty());
return error_if(_error || result.has_error,
make_type(meta.cell_type, std::move(result.dimensions)));
}
ValueType
ValueType::peek(const std::vector<vespalib::string> &dimensions_in) const
{
MyReduce result(_dimensions, dimensions_in);
auto meta = cell_meta().peek(result.dimensions.empty());
return error_if(_error || result.has_error || dimensions_in.empty(),
make_type(meta.cell_type, std::move(result.dimensions)));
}
ValueType
ValueType::rename(const std::vector<vespalib::string> &from,
const std::vector<vespalib::string> &to) const
{
if (from.empty() || (from.size() != to.size())) {
return error_type();
}
Renamer renamer(from, to);
std::vector<Dimension> dim_list;
for (const auto &dim: _dimensions) {
dim_list.emplace_back(renamer.rename(dim.name), dim.size);
}
auto meta = cell_meta().rename();
return error_if(!renamer.matched_all(),
make_type(meta.cell_type, std::move(dim_list)));
}
ValueType
ValueType::cell_cast(CellType to_cell_type) const
{
return error_if(_error, make_type(to_cell_type, _dimensions));
}
ValueType
ValueType::make_type(CellType cell_type, std::vector<Dimension> dimensions_in)
{
if (dimensions_in.empty() && (cell_type != CellType::DOUBLE)) {
// Note: all scalar values must have cell_type double
return error_type();
}
sort_dimensions(dimensions_in);
if (!verify_dimensions(dimensions_in)) {
return error_type();
}
return ValueType(cell_type, std::move(dimensions_in));
}
ValueType
ValueType::from_spec(const vespalib::string &spec)
{
return value_type::from_spec(spec);
}
ValueType
ValueType::from_spec(const vespalib::string &spec, std::vector<ValueType::Dimension> &unsorted)
{
return value_type::from_spec(spec, unsorted);
}
vespalib::string
ValueType::to_spec() const
{
return value_type::to_spec(*this);
}
ValueType
ValueType::join(const ValueType &lhs, const ValueType &rhs)
{
MyJoin result(lhs._dimensions, rhs._dimensions);
auto meta = CellMeta::join(lhs.cell_meta(), rhs.cell_meta());
return error_if(lhs._error || rhs._error || result.mismatch,
make_type(meta.cell_type, std::move(result.dimensions)));
}
ValueType
ValueType::merge(const ValueType &lhs, const ValueType &rhs)
{
auto meta = CellMeta::merge(lhs.cell_meta(), rhs.cell_meta());
return error_if(lhs._error || rhs._error || (lhs._dimensions != rhs._dimensions),
make_type(meta.cell_type, lhs._dimensions));
}
ValueType
ValueType::concat(const ValueType &lhs, const ValueType &rhs, const vespalib::string &dimension)
{
MyJoin result(lhs._dimensions, rhs._dimensions, dimension);
if (!find_dimension(result.dimensions, dimension)) {
result.dimensions.emplace_back(dimension, 2);
}
auto meta = CellMeta::concat(lhs.cell_meta(), rhs.cell_meta());
return error_if(lhs._error || rhs._error || result.mismatch,
make_type(meta.cell_type, std::move(result.dimensions)));
}
ValueType
ValueType::either(const ValueType &one, const ValueType &other) {
return error_if(one != other, one);
}
std::ostream &
operator<<(std::ostream &os, const ValueType &type) {
return os << type.to_spec();
}
}
| 4,685 |
381 | <reponame>wslblb/TestChat
package chen.testchat.util;
import android.app.Activity;
import android.app.LoaderManager;
import android.content.CursorLoader;
import android.content.Loader;
import android.database.Cursor;
import android.os.Bundle;
import android.provider.MediaStore;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import chen.testchat.bean.ImageFolder;
import chen.testchat.bean.ImageItem;
import chen.testchat.listener.OnImageLoadListener;
/**
* 项目名称: TestChat
* 创建人: 陈锦军
* 创建时间: 2016/11/29 21:57
* QQ: 1981367757
*/
public class LoadPictureUtil {
private static final int LOAD_ALL = 0;
private List<ImageFolder> imageFolderList;
private Activity activity;
private final String[] IMAGE_PROJECTION = { //查询图片需要的数据列
MediaStore.Images.Media.DISPLAY_NAME, //图片的显示名称 aaa.jpg
MediaStore.Images.Media.DATA, //图片的真实路径 /storage/emulated/0/pp/downloader/wallpaper/aaa.jpg
MediaStore.Images.Media.SIZE, //图片的大小,long型 132492
MediaStore.Images.Media.WIDTH, //图片的宽度,int型 1920
MediaStore.Images.Media.HEIGHT, //图片的高度,int型 1080
MediaStore.Images.Media.MIME_TYPE, //图片的类型 image/jpeg
MediaStore.Images.Media.DATE_ADDED}; //图片被添加的时间,long型 1450518608
public void getAllImageFolder(Activity activity, final OnImageLoadListener listener) {
this.activity = activity;
imageFolderList = new ArrayList<>();
LoaderManager loaderManager = activity.getLoaderManager();
loaderManager.initLoader(LoadPictureUtil.LOAD_ALL, null, new LoaderManager.LoaderCallbacks<Cursor>() {
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle args) {
Loader<Cursor> loader = null;
if (id == LOAD_ALL) {
LogUtil.e("加载系统数据");
loader = new CursorLoader(LoadPictureUtil.this.activity, MediaStore.Images.Media.EXTERNAL_CONTENT_URI, IMAGE_PROJECTION, null, null, IMAGE_PROJECTION[6] + " DESC");
}
return loader;
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor data) {
LogUtil.e("加载完成");
imageFolderList.clear();
if (data != null) {
ArrayList<ImageItem> images = new ArrayList<>();
ImageItem imageItem;
while (data.moveToNext()) {
imageItem = new ImageItem();
imageItem.setName(data.getString(data.getColumnIndexOrThrow(IMAGE_PROJECTION[0])));
imageItem.setPath(data.getString(data.getColumnIndexOrThrow(IMAGE_PROJECTION[1])));
imageItem.setSize(data.getLong(data.getColumnIndexOrThrow(IMAGE_PROJECTION[2]))/1024);
imageItem.setWidth(data.getInt(data.getColumnIndexOrThrow(IMAGE_PROJECTION[3])));
imageItem.setHeight(data.getInt(data.getColumnIndexOrThrow(IMAGE_PROJECTION[4])));
imageItem.setImageType(data.getString(data.getColumnIndexOrThrow(IMAGE_PROJECTION[5])));
imageItem.setCreatedTime(data.getLong(data.getColumnIndexOrThrow(IMAGE_PROJECTION[6])));
images.add(imageItem);
ImageFolder imageFolder = new ImageFolder();
File file = FileUtil.newFile(imageItem.getPath());
imageFolder.setName(file.getParentFile().getName());
imageFolder.setPath(file.getParentFile().getAbsolutePath());
if (!imageFolderList.contains(imageFolder)) {
ArrayList<ImageItem> list = new ArrayList<>();
list.add(imageItem);
imageFolder.setDisplay(imageItem);
imageFolder.setAllImages(list);
imageFolderList.add(imageFolder);
} else {
imageFolderList.get(imageFolderList.indexOf(imageFolder)).getAllImages().add(imageItem);
}
}
if (data.getCount() > 0) {
// 新建一个所有图片的文件夹
ImageFolder allImageFolder = new ImageFolder();
allImageFolder.setName("全部图片");
allImageFolder.setPath("/");
allImageFolder.setAllImages(images);
allImageFolder.setDisplay(images.get(0));
LogUtil.e("第一个封面信息\n");
LogUtil.e("name" + images.get(0).getName() + "\n"
+ "path" + images.get(0).getPath() + "\n"
+ "size" + images.get(0).getSize() + "\n"
+ "type" + images.get(0).getImageType() + "\n");
imageFolderList.add(0, allImageFolder);
}
}
listener.onImageLoaded(imageFolderList);
}
@Override
public void onLoaderReset(Loader<Cursor> loader) {
LogUtil.e("onLoaderReset:");
}
});
}
}
| 4,330 |
417 | <gh_stars>100-1000
{"name":{"common":"Argentina","official":"Argentine Republic","native":{"grn":{"common":"Argentina","official":"Argentine Republic"},"spa":{"common":"Argentina","official":"República Argentina"}}},"demonym":"Argentinean","capital":"Buenos Aires","iso_3166_1_alpha2":"AR","iso_3166_1_alpha3":"ARG","iso_3166_1_numeric":"032","currency":{"ARS":{"iso_4217_code":"ARS","iso_4217_numeric":32,"iso_4217_name":"Argentine Peso","iso_4217_minor_unit":2}},"tld":[".ar"],"alt_spellings":["AR","Argentine Republic","República Argentina"],"languages":{"grn":"Guaraní","spa":"Spanish"},"geo":{"continent":{"SA":"South America"},"postal_code":true,"latitude":"34 00 S","latitude_desc":"-37.071964263916016","longitude":"64 00 W","longitude_desc":"-64.85450744628906","max_latitude":"-21.783333","max_longitude":"-53.65","min_latitude":"-58.116667","min_longitude":"-73.533333","area":2780400,"region":"Americas","subregion":"South America","world_region":"AMER","region_code":"019","subregion_code":"005","landlocked":false,"borders":["BOL","BRA","CHL","PRY","URY"],"independent":"Yes"},"dialling":{"calling_code":["54"],"national_prefix":"0","national_number_lengths":[8,9],"national_destination_code_lengths":[2],"international_prefix":"00"},"extra":{"geonameid":3865483,"edgar":"C1","itu":"ARG","marc":"ag","wmo":"AG","ds":"RA","fifa":"ARG","fips":"AR","gaul":12,"ioc":"ARG","cowc":"ARG","cown":160,"fao":9,"imf":213,"ar5":"LAM","address_format":"{{recipient}}\n{{street}}\n{{postalcode}} {{city}}\n{{region}}\n{{country}}","eu_member":null,"vat_rates":null,"emoji":"🇦🇷"}}
| 560 |
817 | #####################################################
# Copyright (c) <NAME> [GitHub D-X-Y], 2021.03 #
#####################################################
# pytest tests/test_loader.py -s #
#####################################################
import unittest
import tempfile
import torch
from xautodl.datasets import get_datasets
def test_simple():
xdir = tempfile.mkdtemp()
train_data, valid_data, xshape, class_num = get_datasets("cifar10", xdir, -1)
print(train_data)
print(valid_data)
xloader = torch.utils.data.DataLoader(
train_data, batch_size=256, shuffle=True, num_workers=4, pin_memory=True
)
print(xloader)
print(next(iter(xloader)))
for i, data in enumerate(xloader):
print(i)
test_simple()
| 286 |
364 | <reponame>xiaobing007/dagli
package com.linkedin.dagli.nn.result;
import com.linkedin.dagli.annotation.equality.ValueEquality;
import com.linkedin.dagli.math.mdarray.MDArray;
import com.linkedin.dagli.math.vector.DenseVector;
import com.linkedin.dagli.transformer.AbstractPreparedTransformer1WithInput;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
/**
* Gets a {@link java.util.List} of {@link DenseVector}s as an output from an {@link NNResult}.
*
* For variable-length sequences, there's presently no way to determine the end-of-sequence, so the sequence length is
* taken as fixed. Although technically implementation-dependent, "extra" vectors beyond the actual end-of-sequence are
* likely to be 0-vectors (no non-zero elements).
*/
@ValueEquality
class VectorSequenceFromNNResult extends
AbstractPreparedTransformer1WithInput<NNResult, List<DenseVector>, VectorSequenceFromNNResult> {
private static final long serialVersionUID = 1;
private final int _outputIndex;
/**
* Creates a new instance that gets the output from a {@link NNResult} corresponding to the given index.
*
* @param outputIndex the index of the ouput to be retrieved
*/
VectorSequenceFromNNResult(int outputIndex) {
_outputIndex = outputIndex;
}
@Override
public List<DenseVector> apply(NNResult nnResult) {
MDArray mdArray = nnResult.getAsMDArray(_outputIndex);
return IntStream.range(0, Math.toIntExact(mdArray.shape()[0]))
.mapToObj(i -> mdArray.subarrayAt(i).asVector())
.collect(Collectors.toList());
}
}
| 521 |
7,766 | package com.springboot.cloud.file.exception;
import com.springboot.cloud.common.core.exception.BaseException;
import com.springboot.cloud.common.core.exception.ErrorType;
/**
* @author fengdan
* @date 2021年07月01日 14:38
*/
public class MinioFileException extends BaseException {
public MinioFileException() {
super(MinioFileType.FILE_UPLOAD_FAILED);
}
public MinioFileException(String message) {
super(MinioFileType.FILE_UPLOAD_FAILED, message);
}
public MinioFileException(ErrorType errorType, String message) {
super(errorType, message);
}
}
| 220 |
684 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.explorer.ui.reports;
import org.activiti.explorer.ExplorerApp;
import org.activiti.explorer.I18nManager;
import org.activiti.explorer.Messages;
import org.activiti.explorer.ViewManager;
import org.activiti.explorer.ui.custom.ToolBar;
import org.activiti.explorer.ui.custom.ToolbarEntry.ToolbarCommand;
/**
* @author <NAME>
*/
public class ReportsMenuBar extends ToolBar {
private static final long serialVersionUID = 1L;
public static final String ENTRY_RUN_REPORTS = "runReports";
public static final String ENTRY_SAVED_REPORTS = "savedResults";
protected I18nManager i18nManager;
protected ViewManager viewManager;
public ReportsMenuBar() {
this.i18nManager = ExplorerApp.get().getI18nManager();
this.viewManager = ExplorerApp.get().getViewManager();
setWidth("100%");
initToolbarEntries();
}
protected void initToolbarEntries() {
addRunReportsToolbarEntry();
addSavedReportsToolbarEntry();
}
protected void addRunReportsToolbarEntry() {
addToolbarEntry(ENTRY_RUN_REPORTS, i18nManager.getMessage(Messages.REPORTING_MENU_RUN_REPORTS), new ToolbarCommand() {
public void toolBarItemSelected() {
viewManager.showRunReportPage();
}
});
}
protected void addSavedReportsToolbarEntry() {
addToolbarEntry(ENTRY_SAVED_REPORTS, i18nManager.getMessage(Messages.REPORTING_MENU_SAVED_REPORTS), new ToolbarCommand() {
public void toolBarItemSelected() {
viewManager.showSavedReportPage();
}
});
}
}
| 698 |
344 | <filename>src/net/dcsctp/packet/bounded_byte_writer_test.cc
/*
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "net/dcsctp/packet/bounded_byte_writer.h"
#include <vector>
#include "api/array_view.h"
#include "rtc_base/buffer.h"
#include "rtc_base/checks.h"
#include "rtc_base/gunit.h"
#include "test/gmock.h"
namespace dcsctp {
namespace {
using ::testing::ElementsAre;
TEST(BoundedByteWriterTest, CanWriteData) {
std::vector<uint8_t> data(14);
BoundedByteWriter<8> writer(data);
writer.Store32<0>(0x01020304);
writer.Store16<4>(0x0506);
writer.Store8<6>(0x07);
writer.Store8<7>(0x08);
uint8_t variable_data[] = {0, 0, 0, 0, 3, 0};
writer.CopyToVariableData(variable_data);
BoundedByteWriter<6> sub = writer.sub_writer<6>(0);
sub.Store32<0>(0x09000000);
sub.Store16<2>(0x0102);
BoundedByteWriter<2> sub2 = writer.sub_writer<2>(4);
sub2.Store8<1>(0x04);
EXPECT_THAT(data, ElementsAre(1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4));
}
} // namespace
} // namespace dcsctp
| 529 |
2,151 | // Copyright (c) 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef QUICHE_SPDY_PLATFORM_API_SPDY_STRING_H_
#define QUICHE_SPDY_PLATFORM_API_SPDY_STRING_H_
#include "net/spdy/platform/impl/spdy_string_impl.h"
namespace spdy {
using SpdyString = SpdyStringImpl;
} // namespace spdy
#endif // QUICHE_SPDY_PLATFORM_API_SPDY_STRING_H_
| 178 |
1,356 | package org.cloud.sonic.controller.models.enums;
import java.io.Serializable;
/**
* @author JayWenStar
* @date 2022/3/13 1:49 下午
*/
public enum ConditionEnum implements SonicEnum<Integer>, Serializable {
/**
* 非条件
*/
NONE(0, "none"),
/**
* if 条件
*/
IF(1, "if"),
/**
* else if 条件
*/
ELSE_IF(2, "else_if"),
/**
* else 条件
*/
ELSE(3, "else"),
/**
* while 条件
*/
WHILE(4, "while");
private final Integer value;
private final String name;
ConditionEnum(int value, String name) {
this.value = value;
this.name = name;
}
@Override
public Integer getValue() {
return value;
}
public String getName() {
return name;
}
}
| 378 |
14,668 | <reponame>chromium/chromium
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/test/base/always_on_top_window_killer_win.h"
#include "base/memory/raw_ptr.h"
#include <Windows.h>
#include <ios>
#include <string>
#include "base/command_line.h"
#include "base/cxx17_backports.h"
#include "base/files/file_path.h"
#include "base/logging.h"
#include "chrome/test/base/process_lineage_win.h"
#include "chrome/test/base/save_desktop_snapshot_win.h"
#include "ui/display/win/screen_win.h"
namespace {
constexpr char kDialogFoundBeforeTest[] =
"There is an always on top dialog on the desktop. This was most likely "
"caused by a previous test and may cause this test to fail. Trying to "
"close it;";
constexpr char kDialogFoundPostTest[] =
"There is an always on top dialog on the desktop after this test timed "
"out. This was most likely caused by this test and may cause future tests "
"to fail, trying to close it;";
constexpr char kWindowFoundBeforeTest[] =
"There is an always on top window on the desktop. This may have been "
"caused by a previous test and may cause this test to fail;";
constexpr char kWindowFoundPostTest[] =
"There is an always on top window on the desktop after this test timed "
"out. This may have been caused by this test or a previous test and may "
"cause flakes;";
// A window enumerator that searches for always-on-top windows. A snapshot of
// the screen is saved if any unexpected on-top windows are found.
class WindowEnumerator {
public:
// |run_type| influences which log message is used. |child_command_line|, only
// specified when |run_type| is AFTER_TEST_TIMEOUT, is the command line of the
// child process that timed out.
WindowEnumerator(RunType run_type,
const base::CommandLine* child_command_line);
WindowEnumerator(const WindowEnumerator&) = delete;
WindowEnumerator& operator=(const WindowEnumerator&) = delete;
void Run();
private:
// An EnumWindowsProc invoked by EnumWindows once for each window.
static BOOL CALLBACK OnWindowProc(HWND hwnd, LPARAM l_param);
// Returns true if |hwnd| is an always-on-top window.
static bool IsTopmostWindow(HWND hwnd);
// Returns the class name of |hwnd| or an empty string in case of error.
static std::wstring GetWindowClass(HWND hwnd);
// Returns true if |class_name| is the name of a system dialog.
static bool IsSystemDialogClass(const std::wstring& class_name);
// Returns true if |class_name| is the name of a window owned by the Windows
// shell.
static bool IsShellWindowClass(const std::wstring& class_name);
// Main processing function run for each window.
BOOL OnWindow(HWND hwnd);
const base::FilePath output_dir_;
const RunType run_type_;
const raw_ptr<const base::CommandLine> child_command_line_;
bool saved_snapshot_ = false;
};
WindowEnumerator::WindowEnumerator(RunType run_type,
const base::CommandLine* child_command_line)
: output_dir_(base::CommandLine::ForCurrentProcess()->GetSwitchValuePath(
kSnapshotOutputDir)),
run_type_(run_type),
child_command_line_(child_command_line) {}
void WindowEnumerator::Run() {
if (run_type_ == RunType::AFTER_TEST_TIMEOUT && !output_dir_.empty()) {
base::FilePath snapshot_file = SaveDesktopSnapshot(output_dir_);
if (!snapshot_file.empty()) {
saved_snapshot_ = true;
std::wostringstream sstream;
sstream << "Screen snapshot saved to file: \"" << snapshot_file.value()
<< "\" after timeout of test";
if (child_command_line_) {
sstream << " process with command line: \""
<< child_command_line_->GetCommandLineString() << "\".";
} else {
sstream << ".";
}
LOG(ERROR) << sstream.str();
}
}
::EnumWindows(&OnWindowProc, reinterpret_cast<LPARAM>(this));
}
// static
BOOL CALLBACK WindowEnumerator::OnWindowProc(HWND hwnd, LPARAM l_param) {
return reinterpret_cast<WindowEnumerator*>(l_param)->OnWindow(hwnd);
}
// static
bool WindowEnumerator::IsTopmostWindow(HWND hwnd) {
const LONG ex_styles = ::GetWindowLong(hwnd, GWL_EXSTYLE);
return (ex_styles & WS_EX_TOPMOST) != 0;
}
// static
std::wstring WindowEnumerator::GetWindowClass(HWND hwnd) {
wchar_t buffer[257]; // Max is 256.
buffer[base::size(buffer) - 1] = L'\0';
int name_len = ::GetClassName(hwnd, &buffer[0], base::size(buffer));
if (name_len <= 0 || static_cast<size_t>(name_len) >= base::size(buffer))
return std::wstring();
return std::wstring(&buffer[0], name_len);
}
// static
bool WindowEnumerator::IsSystemDialogClass(const std::wstring& class_name) {
return class_name == L"#32770";
}
// static
bool WindowEnumerator::IsShellWindowClass(const std::wstring& class_name) {
// 'Button' is the start button, 'Shell_TrayWnd' the taskbar, and
// 'Shell_SecondaryTrayWnd' is the taskbar on non-primary displays.
return class_name == L"Button" || class_name == L"Shell_TrayWnd" ||
class_name == L"Shell_SecondaryTrayWnd";
}
BOOL WindowEnumerator::OnWindow(HWND hwnd) {
const BOOL kContinueIterating = TRUE;
if (!::IsWindowVisible(hwnd) || ::IsIconic(hwnd) || !IsTopmostWindow(hwnd))
return kContinueIterating;
std::wstring class_name = GetWindowClass(hwnd);
if (class_name.empty())
return kContinueIterating;
// Ignore specific windows owned by the shell.
if (IsShellWindowClass(class_name))
return kContinueIterating;
// All other always-on-top windows may be problematic, but in theory tests
// should not be creating an always on top window that outlives the test.
// Prepare details of the command line of the test that timed out (if
// provided), the process owning the window, and the location of a snapshot
// taken of the screen.
std::wstring details;
if (LOG_IS_ON(ERROR)) {
std::wostringstream sstream;
if (!IsSystemDialogClass(class_name))
sstream << " window class name: " << class_name << ";";
if (child_command_line_) {
sstream << " subprocess command line: \""
<< child_command_line_->GetCommandLineString() << "\";";
}
// Save a snapshot of the screen if one hasn't already been saved and an
// output directory was specified.
base::FilePath snapshot_file;
if (!saved_snapshot_ && !output_dir_.empty()) {
snapshot_file = SaveDesktopSnapshot(output_dir_);
if (!snapshot_file.empty())
saved_snapshot_ = true;
}
DWORD process_id = 0;
GetWindowThreadProcessId(hwnd, &process_id);
ProcessLineage lineage = ProcessLineage::Create(process_id);
if (!lineage.IsEmpty())
sstream << " owning process lineage: " << lineage.ToString() << ";";
if (!snapshot_file.empty()) {
sstream << " screen snapshot saved to file: \"" << snapshot_file.value()
<< "\";";
}
details = sstream.str();
}
// System dialogs may be present if a child process triggers an assert(), for
// example.
if (IsSystemDialogClass(class_name)) {
LOG(ERROR) << (run_type_ == RunType::BEFORE_SHARD ? kDialogFoundBeforeTest
: kDialogFoundPostTest)
<< details;
// We don't own the dialog, so we can't destroy it. CloseWindow()
// results in iconifying the window. An alternative may be to focus it,
// then send return and wait for close. As we reboot machines running
// interactive ui tests at least every 12 hours we're going with the
// simple for now.
CloseWindow(hwnd);
} else {
LOG(ERROR) << (run_type_ == RunType::BEFORE_SHARD ? kWindowFoundBeforeTest
: kWindowFoundPostTest)
<< details;
// Try to strip the style and iconify the window.
if (::SetWindowLongPtr(
hwnd, GWL_EXSTYLE,
::GetWindowLong(hwnd, GWL_EXSTYLE) & ~WS_EX_TOPMOST)) {
LOG(ERROR) << "Stripped WS_EX_TOPMOST.";
} else {
PLOG(ERROR) << "Failed to strip WS_EX_TOPMOST";
}
if (::ShowWindow(hwnd, SW_FORCEMINIMIZE))
LOG(ERROR) << "Minimized window.";
else
PLOG(ERROR) << "Failed to minimize window";
}
return kContinueIterating;
}
} // namespace
void KillAlwaysOnTopWindows(RunType run_type,
const base::CommandLine* child_command_line) {
WindowEnumerator(run_type, child_command_line).Run();
}
| 3,142 |
1,139 | <filename>Android/HelloMVP/app/src/main/java/com/journaldev/hellomvp/MainContract.java<gh_stars>1000+
package com.journaldev.hellomvp;
/**
* Created by anupamchugh on 11/08/17.
*/
public interface MainContract {
interface MainView {
void showProgress();
void hideProgress();
void setQuote(String string);
}
interface GetQuoteInteractor {
interface OnFinishedListener {
void onFinished(String string);
}
void getNextQuote(OnFinishedListener onFinishedListener);
}
interface Presenter {
void onButtonClick();
void onDestroy();
}
}
| 253 |
2,996 | // Copyright 2021 The Terasology Foundation
// SPDX-License-Identifier: Apache-2.0
package org.terasology.engine.rendering.gltf.model;
/**
* Details where to source the override values
*/
public class GLTFSparseValues {
private int bufferView;
private int byteOffset;
/**
* @return The offset when reading from the buffer view
*/
public int getByteOffset() {
return byteOffset;
}
/**
* @return The index of the buffer view to source the values from
*/
public int getBufferView() {
return bufferView;
}
}
| 199 |
565 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Integration tests for tvae.
These tests only ensure that the software does not crash and that
the API works as expected in terms of input and output data formats,
but correctness of the data values and the internal behavior of the
model are not checked.
"""
import pandas as pd
from sklearn import datasets
from ctgan.synthesizers.tvae import TVAESynthesizer
def test_tvae(tmpdir):
iris = datasets.load_iris()
data = pd.DataFrame(iris.data, columns=iris.feature_names)
data['class'] = pd.Series(iris.target).map(iris.target_names.__getitem__)
tvae = TVAESynthesizer(epochs=10)
tvae.fit(data, ['class'])
path = str(tmpdir / 'test_tvae.pkl')
tvae.save(path)
tvae = TVAESynthesizer.load(path)
sampled = tvae.sample(100)
assert sampled.shape == (100, 5)
assert isinstance(sampled, pd.DataFrame)
assert set(sampled.columns) == set(data.columns)
assert set(sampled.dtypes) == set(data.dtypes)
def test_drop_last_false():
data = pd.DataFrame({
'1': ['a', 'b', 'c'] * 150,
'2': ['a', 'b', 'c'] * 150
})
tvae = TVAESynthesizer(epochs=300)
tvae.fit(data, ['1', '2'])
sampled = tvae.sample(100)
correct = 0
for _, row in sampled.iterrows():
if row['1'] == row['2']:
correct += 1
assert correct >= 95
# TVAE tests that should be implemented in the future.
def test_continuous():
"""Test training the TVAE synthesizer on a small continuous dataset."""
# verify that the distribution of the samples is close to the distribution of the data
# using a kstest.
def test_categorical():
"""Test training the TVAE synthesizer on a small categorical dataset."""
# verify that the distribution of the samples is close to the distribution of the data
# using a cstest.
def test_mixed():
"""Test training the TVAE synthesizer on a small mixed-type dataset."""
# verify that the distribution of the samples is close to the distribution of the data
# using a kstest for continuous + a cstest for categorical.
def test_loss_function():
data = pd.DataFrame({
'1': [float(i) for i in range(1000)],
'2': [float(2 * i) for i in range(1000)]
})
tvae = TVAESynthesizer(epochs=300)
tvae.fit(data)
num_samples = 1000
sampled = tvae.sample(num_samples)
error = 0
for _, row in sampled.iterrows():
error += abs(2 * row['1'] - row['2'])
avg_error = error / num_samples
assert avg_error < 400
| 956 |
2,151 | /*
* Copyright 2016 Google Inc.
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#include "gm.h"
// Draws big rects with clip (0, 0, 35, 35). The size of the rects is given by big.
static void draw_big_rect(SkCanvas* canvas, SkScalar big, const SkPaint& rectPaint) {
// Looks like this:
// +--+-+----+-+----+
// | | | | | |
// |--+-+----+-+----+
// |--+-+----+-+----+
// | | | | | |
// | | | +-+ |
// +--+-+--+ +--+
// +--+-+--+ +--+
// | | | +-+ |
// | | | | | |
// +--+-+----+-+----+
canvas->clipRect({0, 0, 35, 35});
// Align to pixel boundaries.
canvas->translate(0.5, 0.5);
SkRect horiz = SkRect::MakeLTRB(-big, 5, big, 10);
canvas->drawRect(horiz, rectPaint);
SkRect vert = SkRect::MakeLTRB(5, -big, 10, big);
canvas->drawRect(vert, rectPaint);
SkRect fromLeft = SkRect::MakeLTRB(-big, 20, 17, 25);
canvas->drawRect(fromLeft, rectPaint);
SkRect fromTop = SkRect::MakeLTRB(20, -big, 25, 17);
canvas->drawRect(fromTop, rectPaint);
SkRect fromRight = SkRect::MakeLTRB(28, 20, big, 25);
canvas->drawRect(fromRight, rectPaint);
SkRect fromBottom = SkRect::MakeLTRB(20, 28, 25, big);
canvas->drawRect(fromBottom, rectPaint);
SkRect leftBorder = SkRect::MakeLTRB(-2, -1, 0, 35);
canvas->drawRect(leftBorder, rectPaint);
SkRect topBorder = SkRect::MakeLTRB(-1, -2, 35, 0);
canvas->drawRect(topBorder, rectPaint);
SkRect rightBorder = SkRect::MakeLTRB(34, -1, 36, 35);
canvas->drawRect(rightBorder, rectPaint);
SkRect bottomBorder = SkRect::MakeLTRB(-1, 34, 35, 36);
canvas->drawRect(bottomBorder, rectPaint);
SkPaint outOfBoundsPaint;
outOfBoundsPaint.setColor(SK_ColorRED);
outOfBoundsPaint.setStyle(SkPaint::kStroke_Style);
outOfBoundsPaint.setStrokeWidth(0);
SkRect outOfBounds = SkRect::MakeLTRB(-1, -1, 35, 35);
canvas->drawRect(outOfBounds, outOfBoundsPaint);
}
DEF_SIMPLE_GM(bigrect, canvas, 325, 125) {
// Test with sizes:
// - reasonable size (for comparison),
// - outside the range of int32, and
// - outside the range of SkFixed.
static const SkScalar sizes[] = {SkIntToScalar(100), 5e10f, 1e6f};
for (int i = 0; i < 8; i++) {
for (int j = 0; j < 3; j++) {
canvas->save();
canvas->translate(SkIntToScalar(i*40+5), SkIntToScalar(j*40+5));
SkPaint paint;
paint.setColor(SK_ColorBLUE);
// These are the three parameters that affect the behavior of SkDraw::drawRect.
if (i & 1) {
paint.setStyle(SkPaint::kFill_Style);
} else {
paint.setStyle(SkPaint::kStroke_Style);
}
if (i & 2) {
paint.setStrokeWidth(1);
} else {
paint.setStrokeWidth(0);
}
if (i & 4) {
paint.setAntiAlias(true);
} else {
paint.setAntiAlias(false);
}
const SkScalar big = SkFloatToScalar(sizes[j]);
draw_big_rect(canvas, big, paint);
canvas->restore();
}
}
}
| 1,544 |
1,403 | <gh_stars>1000+
package org.dynmap.bukkit.helper.v116_2;
import org.bukkit.block.Biome;
import org.bukkit.craftbukkit.v1_16_R2.CraftWorld;
import java.io.IOException;
import java.util.Arrays;
import org.bukkit.ChunkSnapshot;
import org.bukkit.World;
import org.dynmap.DynmapChunk;
import org.dynmap.DynmapCore;
import org.dynmap.bukkit.helper.AbstractMapChunkCache;
import org.dynmap.bukkit.helper.BukkitVersionHelper;
import org.dynmap.bukkit.helper.SnapshotCache;
import org.dynmap.bukkit.helper.SnapshotCache.SnapshotRec;
import org.dynmap.renderer.DynmapBlockState;
import org.dynmap.utils.DynIntHashMap;
import org.dynmap.utils.VisibilityLimit;
import net.minecraft.server.v1_16_R2.Chunk;
import net.minecraft.server.v1_16_R2.ChunkCoordIntPair;
import net.minecraft.server.v1_16_R2.ChunkRegionLoader;
import net.minecraft.server.v1_16_R2.ChunkStatus;
import net.minecraft.server.v1_16_R2.DataBits;
import net.minecraft.server.v1_16_R2.DataBitsPacked;
import net.minecraft.server.v1_16_R2.NBTTagCompound;
import net.minecraft.server.v1_16_R2.NBTTagList;
/**
* Container for managing chunks - dependent upon using chunk snapshots, since rendering is off server thread
*/
public class MapChunkCache116_2 extends AbstractMapChunkCache {
public static class NBTSnapshot implements Snapshot {
private static interface Section {
public DynmapBlockState getBlockType(int x, int y, int z);
public int getBlockSkyLight(int x, int y, int z);
public int getBlockEmittedLight(int x, int y, int z);
public boolean isEmpty();
}
private final int x, z;
private final Section[] section;
private final int[] hmap; // Height map
private final int[] biome;
private final Object[] biomebase;
private final long captureFulltime;
private final int sectionCnt;
private final long inhabitedTicks;
private static final int BLOCKS_PER_SECTION = 16 * 16 * 16;
private static final int COLUMNS_PER_CHUNK = 16 * 16;
private static final int V1_15_BIOME_PER_CHUNK = 4 * 4 * 64;
private static final byte[] emptyData = new byte[BLOCKS_PER_SECTION / 2];
private static final byte[] fullData = new byte[BLOCKS_PER_SECTION / 2];
static
{
Arrays.fill(fullData, (byte)0xFF);
}
private static byte[] dataCopy(byte[] v) {
if (Arrays.equals(v, emptyData))
return emptyData;
else if (Arrays.equals(v, fullData))
return fullData;
else
return v.clone();
}
private static class EmptySection implements Section {
@Override
public DynmapBlockState getBlockType(int x, int y, int z) {
return DynmapBlockState.AIR;
}
@Override
public int getBlockSkyLight(int x, int y, int z) {
return 15;
}
@Override
public int getBlockEmittedLight(int x, int y, int z) {
return 0;
}
@Override
public boolean isEmpty() {
return true;
}
}
private static final EmptySection empty_section = new EmptySection();
private static class StdSection implements Section {
DynmapBlockState[] states;
byte[] skylight;
byte[] emitlight;
public StdSection() {
states = new DynmapBlockState[BLOCKS_PER_SECTION];
Arrays.fill(states, DynmapBlockState.AIR);
skylight = emptyData;
emitlight = emptyData;
}
@Override
public DynmapBlockState getBlockType(int x, int y, int z) {
return states[((y & 0xF) << 8) | (z << 4) | x];
}
@Override
public int getBlockSkyLight(int x, int y, int z) {
int off = ((y & 0xF) << 7) | (z << 3) | (x >> 1);
return (skylight[off] >> (4 * (x & 1))) & 0xF;
}
@Override
public int getBlockEmittedLight(int x, int y, int z)
{
int off = ((y & 0xF) << 7) | (z << 3) | (x >> 1);
return (emitlight[off] >> (4 * (x & 1))) & 0xF;
}
@Override
public boolean isEmpty() {
return false;
}
}
/**
* Construct empty chunk snapshot
*
* @param x
* @param z
*/
public NBTSnapshot(int worldheight, int x, int z, long captime, long inhabitedTime)
{
this.x = x;
this.z = z;
this.captureFulltime = captime;
this.biome = new int[COLUMNS_PER_CHUNK];
this.biomebase = new Object[COLUMNS_PER_CHUNK];
this.sectionCnt = worldheight / 16;
/* Allocate arrays indexed by section */
this.section = new Section[this.sectionCnt+1];
/* Fill with empty data */
for (int i = 0; i <= this.sectionCnt; i++) {
this.section[i] = empty_section;
}
/* Create empty height map */
this.hmap = new int[16 * 16];
this.inhabitedTicks = inhabitedTime;
}
public NBTSnapshot(NBTTagCompound nbt, int worldheight) {
this.x = nbt.getInt("xPos");
this.z = nbt.getInt("zPos");
this.captureFulltime = 0;
this.hmap = nbt.getIntArray("HeightMap");
this.sectionCnt = worldheight / 16;
if (nbt.hasKey("InhabitedTime")) {
this.inhabitedTicks = nbt.getLong("InhabitedTime");
}
else {
this.inhabitedTicks = 0;
}
/* Allocate arrays indexed by section */
this.section = new Section[this.sectionCnt+1];
/* Fill with empty data */
for (int i = 0; i <= this.sectionCnt; i++) {
this.section[i] = empty_section;
}
/* Get sections */
NBTTagList sect = nbt.getList("Sections", 10);
for (int i = 0; i < sect.size(); i++) {
NBTTagCompound sec = sect.getCompound(i);
int secnum = sec.getByte("Y");
if (secnum >= this.sectionCnt) {
//Log.info("Section " + (int) secnum + " above world height " + worldheight);
continue;
}
if (secnum < 0)
continue;
//System.out.println("section(" + secnum + ")=" + sec.asString());
// Create normal section to initialize
StdSection cursect = new StdSection();
this.section[secnum] = cursect;
DynmapBlockState[] states = cursect.states;
DynmapBlockState[] palette = null;
// If we've got palette and block states list, process non-empty section
if (sec.hasKeyOfType("Palette", 9) && sec.hasKeyOfType("BlockStates", 12)) {
NBTTagList plist = sec.getList("Palette", 10);
long[] statelist = sec.getLongArray("BlockStates");
palette = new DynmapBlockState[plist.size()];
for (int pi = 0; pi < plist.size(); pi++) {
NBTTagCompound tc = plist.getCompound(pi);
String pname = tc.getString("Name");
if (tc.hasKey("Properties")) {
StringBuilder statestr = new StringBuilder();
NBTTagCompound prop = tc.getCompound("Properties");
for (String pid : prop.getKeys()) {
if (statestr.length() > 0) statestr.append(',');
statestr.append(pid).append('=').append(prop.get(pid).asString());
}
palette[pi] = DynmapBlockState.getStateByNameAndState(pname, statestr.toString());
}
if (palette[pi] == null) {
palette[pi] = DynmapBlockState.getBaseStateByName(pname);
}
if (palette[pi] == null) {
palette[pi] = DynmapBlockState.AIR;
}
}
int recsperblock = (4096 + statelist.length - 1) / statelist.length;
int bitsperblock = 64 / recsperblock;
DataBits db = null;
DataBitsPacked dbp = null;
try {
db = new DataBits(bitsperblock, 4096, statelist);
} catch (Exception x) { // Handle legacy encoded
bitsperblock = (statelist.length * 64) / 4096;
dbp = new DataBitsPacked(bitsperblock, 4096, statelist);
}
if (bitsperblock > 8) { // Not palette
for (int j = 0; j < 4096; j++) {
int v = (db != null) ? db.a(j) : dbp.a(j);
states[j] = DynmapBlockState.getStateByGlobalIndex(v);
}
}
else {
for (int j = 0; j < 4096; j++) {
int v = (db != null) ? db.a(j) : dbp.a(j);
states[j] = (v < palette.length) ? palette[v] : DynmapBlockState.AIR;
}
}
}
if (sec.hasKey("BlockLight")) {
cursect.emitlight = dataCopy(sec.getByteArray("BlockLight"));
}
if (sec.hasKey("SkyLight")) {
cursect.skylight = dataCopy(sec.getByteArray("SkyLight"));
}
}
/* Get biome data */
this.biome = new int[COLUMNS_PER_CHUNK];
this.biomebase = new Object[COLUMNS_PER_CHUNK];
Object[] bbl = BukkitVersionHelper.helper.getBiomeBaseList();
if (nbt.hasKey("Biomes")) {
int[] bb = nbt.getIntArray("Biomes");
if (bb != null) {
// If v1.15+ format
if (bb.length > COLUMNS_PER_CHUNK) {
// For now, just pad the grid with the first 16
for (int i = 0; i < COLUMNS_PER_CHUNK; i++) {
int off = ((i >> 4) & 0xC) + ((i >> 2) & 0x3);
int bv = bb[off + 64]; // Offset to y=64
if (bv < 0) bv = 0;
this.biome[i] = bv;
this.biomebase[i] = bbl[bv];
}
}
else { // Else, older chunks
for (int i = 0; i < bb.length; i++) {
int bv = bb[i];
if (bv < 0) bv = 0;
this.biome[i] = bv;
this.biomebase[i] = bbl[bv];
}
}
}
}
}
public int getX()
{
return x;
}
public int getZ()
{
return z;
}
public DynmapBlockState getBlockType(int x, int y, int z)
{
return section[y >> 4].getBlockType(x, y, z);
}
public int getBlockSkyLight(int x, int y, int z)
{
return section[y >> 4].getBlockSkyLight(x, y, z);
}
public int getBlockEmittedLight(int x, int y, int z)
{
return section[y >> 4].getBlockEmittedLight(x, y, z);
}
public int getHighestBlockYAt(int x, int z)
{
return hmap[z << 4 | x];
}
public final long getCaptureFullTime()
{
return captureFulltime;
}
public boolean isSectionEmpty(int sy)
{
return section[sy].isEmpty();
}
public long getInhabitedTicks() {
return inhabitedTicks;
}
@Override
public Biome getBiome(int x, int z) {
return AbstractMapChunkCache.getBiomeByID(biome[z << 4 | x]);
}
@Override
public Object[] getBiomeBaseFromSnapshot() {
return this.biomebase;
}
}
private NBTTagCompound fetchLoadedChunkNBT(World w, int x, int z) {
CraftWorld cw = (CraftWorld) w;
NBTTagCompound nbt = null;
if (cw.isChunkLoaded(x, z)) {
Chunk c = cw.getHandle().getChunkAt(x, z);
if ((c != null) && c.loaded) {
nbt = ChunkRegionLoader.saveChunk(cw.getHandle(), c);
}
}
if (nbt != null) {
nbt = nbt.getCompound("Level");
if (nbt != null) {
String stat = nbt.getString("Status");
ChunkStatus cs = ChunkStatus.a(stat);
if ((stat == null) || (!cs.b(ChunkStatus.LIGHT))) {
nbt = null;
}
}
}
return nbt;
}
private NBTTagCompound loadChunkNBT(World w, int x, int z) {
CraftWorld cw = (CraftWorld) w;
NBTTagCompound nbt = null;
ChunkCoordIntPair cc = new ChunkCoordIntPair(x, z);
try {
nbt = cw.getHandle().getChunkProvider().playerChunkMap.read(cc);
} catch (IOException iox) {
}
if (nbt != null) {
nbt = nbt.getCompound("Level");
if (nbt != null) {
String stat = nbt.getString("Status");
if ((stat == null) || (stat.equals("full") == false)) {
nbt = null;
if ((stat == null) || stat.equals("") && DynmapCore.migrateChunks()) {
Chunk c = cw.getHandle().getChunkAt(x, z);
if (c != null) {
nbt = fetchLoadedChunkNBT(w, x, z);
cw.getHandle().unloadChunk(c);
}
}
}
}
}
return nbt;
}
@Override
public Snapshot wrapChunkSnapshot(ChunkSnapshot css) {
// TODO Auto-generated method stub
return null;
}
// Load chunk snapshots
@Override
public int loadChunks(int max_to_load) {
if(dw.isLoaded() == false)
return 0;
int cnt = 0;
if(iterator == null)
iterator = chunks.listIterator();
DynmapCore.setIgnoreChunkLoads(true);
// Load the required chunks.
while((cnt < max_to_load) && iterator.hasNext()) {
long startTime = System.nanoTime();
DynmapChunk chunk = iterator.next();
boolean vis = true;
if(visible_limits != null) {
vis = false;
for(VisibilityLimit limit : visible_limits) {
if (limit.doIntersectChunk(chunk.x, chunk.z)) {
vis = true;
break;
}
}
}
if(vis && (hidden_limits != null)) {
for(VisibilityLimit limit : hidden_limits) {
if (limit.doIntersectChunk(chunk.x, chunk.z)) {
vis = false;
break;
}
}
}
/* Check if cached chunk snapshot found */
Snapshot ss = null;
long inhabited_ticks = 0;
DynIntHashMap tileData = null;
int idx = (chunk.x-x_min) + (chunk.z - z_min)*x_dim;
SnapshotRec ssr = SnapshotCache.sscache.getSnapshot(dw.getName(), chunk.x, chunk.z, blockdata, biome, biomeraw, highesty);
if(ssr != null) {
inhabited_ticks = ssr.inhabitedTicks;
if(!vis) {
if(hidestyle == HiddenChunkStyle.FILL_STONE_PLAIN)
ss = STONE;
else if(hidestyle == HiddenChunkStyle.FILL_OCEAN)
ss = OCEAN;
else
ss = EMPTY;
}
else {
ss = ssr.ss;
}
snaparray[idx] = ss;
snaptile[idx] = ssr.tileData;
inhabitedTicks[idx] = inhabited_ticks;
endChunkLoad(startTime, ChunkStats.CACHED_SNAPSHOT_HIT);
continue;
}
// Fetch NTB for chunk if loaded
NBTTagCompound nbt = fetchLoadedChunkNBT(w, chunk.x, chunk.z);
boolean did_load = false;
if (nbt == null) {
// Load NTB for chunk, if it exists
nbt = loadChunkNBT(w, chunk.x, chunk.z);
did_load = true;
}
if (nbt != null) {
NBTSnapshot nss = new NBTSnapshot(nbt, w.getMaxHeight());
ss = nss;
inhabited_ticks = nss.getInhabitedTicks();
if(!vis) {
if(hidestyle == HiddenChunkStyle.FILL_STONE_PLAIN)
ss = STONE;
else if(hidestyle == HiddenChunkStyle.FILL_OCEAN)
ss = OCEAN;
else
ss = EMPTY;
}
}
else {
ss = EMPTY;
}
ssr = new SnapshotRec();
ssr.ss = ss;
ssr.inhabitedTicks = inhabited_ticks;
ssr.tileData = tileData;
SnapshotCache.sscache.putSnapshot(dw.getName(), chunk.x, chunk.z, ssr, blockdata, biome, biomeraw, highesty);
snaparray[idx] = ss;
snaptile[idx] = ssr.tileData;
inhabitedTicks[idx] = inhabited_ticks;
if (nbt == null)
endChunkLoad(startTime, ChunkStats.UNGENERATED_CHUNKS);
else if (did_load)
endChunkLoad(startTime, ChunkStats.UNLOADED_CHUNKS);
else
endChunkLoad(startTime, ChunkStats.LOADED_CHUNKS);
cnt++;
}
DynmapCore.setIgnoreChunkLoads(false);
if(iterator.hasNext() == false) { /* If we're done */
isempty = true;
/* Fill missing chunks with empty dummy chunk */
for(int i = 0; i < snaparray.length; i++) {
if(snaparray[i] == null)
snaparray[i] = EMPTY;
else if(snaparray[i] != EMPTY)
isempty = false;
}
}
return cnt;
}
}
| 9,593 |
389 | <filename>gosu-test/src/test/java/gw/specContrib/statements/usesStatement/MuhJavaClass.java
package gw.specContrib.statements.usesStatement;
/**
*/
public class MuhJavaClass<T>
{
public static <T> T staticFunc( T t ) {
return t;
}
}
| 95 |
3,084 | /*++
Copyright (C) Microsoft Corporation, All Rights Reserved
Module Name:
Internal.h
Abstract:
This module contains the local type definitions for the UMDF Skeleton
driver sample.
Environment:
Windows User-Mode Driver Framework (WUDF)
--*/
#pragma once
#ifndef ARRAY_SIZE
#define ARRAY_SIZE(x) (sizeof(x) / sizeof(x[0]))
#endif
//
// Include the WUDF DDI
//
#include "wudfddi.h"
//
// Use specstrings for in/out annotation of function parameters.
//
#include "specstrings.h"
//
// Forward definitions of classes in the other header files.
//
typedef class CMyDriver *PCMyDriver;
typedef class CMyDevice *PCMyDevice;
//
// Define the tracing flags.
//
// TODO: Choose a different trace control GUID
//
#define WPP_CONTROL_GUIDS \
WPP_DEFINE_CONTROL_GUID( \
MyDriverTraceControl, (e7541cdd,30e8,4b50,aeb0,51927330ae64), \
\
WPP_DEFINE_BIT(MYDRIVER_ALL_INFO) \
)
#define WPP_FLAG_LEVEL_LOGGER(flag, level) \
WPP_LEVEL_LOGGER(flag)
#define WPP_FLAG_LEVEL_ENABLED(flag, level) \
(WPP_LEVEL_ENABLED(flag) && \
WPP_CONTROL(WPP_BIT_ ## flag).Level >= level)
//
// This comment block is scanned by the trace preprocessor to define our
// Trace function.
//
// begin_wpp config
// FUNC Trace{FLAG=MYDRIVER_ALL_INFO}(LEVEL, MSG, ...);
// end_wpp
//
//
// Driver specific #defines
//
// TODO: Change these values to be appropriate for your driver.
//
#define MYDRIVER_TRACING_ID L"Microsoft\\UMDF\\Skeleton"
#define MYDRIVER_CLASS_ID { 0xd4112073, 0xd09b, 0x458f, { 0xa5, 0xaa, 0x35, 0xef, 0x21, 0xee, 0xf5, 0xde } }
//
// Include the type specific headers.
//
#include "comsup.h"
#include "driver.h"
#include "device.h"
| 1,091 |
732 | #include <stdio.h>
void reverse_int_array(int * data, unsigned int dataobjs)
{
int i, t;
for(i=0; i<dataobjs/2; i++)
{
t = *(data+i);
*(data+i) = *(data+dataobjs-1-i);
*(data+dataobjs-1-i) = t;
}
}
void reverse_int_ptr_array(int **ptrs, unsigned int ptrobjs)
{
int *t;
int i;
for(i=0; i<ptrobjs/2; i++)
{
t = *(ptrs+i);
*(ptrs+i) = *(ptrs+ptrobjs-1-i);
*(ptrs+ptrobjs-1-i) = t;
}
}
void
reverse_int_ptr_ptrtest(int **ptrs)
{
reverse_int_ptr_array(ptrs, 2);
reverse_int_array(*(ptrs+0), 4);
reverse_int_array(*(ptrs+1), 4);
}
| 334 |
17,703 | <reponame>dcillera/envoy
#pragma once
#include "envoy/upstream/retry.h"
#include "test/integration/test_host_predicate.h"
#include "gmock/gmock.h"
namespace Envoy {
class TestHostPredicateFactory : public Upstream::RetryHostPredicateFactory {
public:
std::string name() const override { return "envoy.test_host_predicate"; }
Upstream::RetryHostPredicateSharedPtr createHostPredicate(const Protobuf::Message&,
uint32_t) override {
return std::make_shared<testing::NiceMock<TestHostPredicate>>();
}
ProtobufTypes::MessagePtr createEmptyConfigProto() override {
// Using Struct instead of a custom per-filter empty config proto
// This is only allowed in tests.
return ProtobufTypes::MessagePtr{new Envoy::ProtobufWkt::Struct()};
}
};
} // namespace Envoy
| 320 |
884 | {
"documentVersion": "2.3",
"jsonSchemaSemanticVersion": "1.0.0",
"imports": [
{
"corpusPath": "/core/cdsConcepts.cdm.json"
},
{
"corpusPath": "/core/wellKnownCDSAttributeGroups.cdm.json"
},
{
"corpusPath": "/core/applicationCommon/foundationCommon/crmCommon/accelerators/nonProfit/nonProfitCore/Account.cdm.json",
"moniker": "base_Account"
},
{
"corpusPath": "/core/applicationCommon/foundationCommon/crmCommon/accelerators/nonProfit/nonProfitCore/Contact.cdm.json",
"moniker": "base_Contact"
},
{
"corpusPath": "/core/applicationCommon/foundationCommon/crmCommon/projectCommon/projectServiceAutomation/Project.cdm.json",
"moniker": "base_Project"
},
{
"corpusPath": "/core/applicationCommon/BusinessUnit.cdm.json"
},
{
"corpusPath": "/core/applicationCommon/foundationCommon/BookableResource.cdm.json"
},
{
"corpusPath": "/core/applicationCommon/Currency.cdm.json"
},
{
"corpusPath": "/core/applicationCommon/foundationCommon/crmCommon/accelerators/nonProfit/nonProfitCore/DeliveryFramework.cdm.json"
},
{
"corpusPath": "/core/applicationCommon/foundationCommon/crmCommon/projectCommon/projectServiceAutomation/ProjectTeam.cdm.json"
},
{
"corpusPath": "/core/applicationCommon/Team.cdm.json"
},
{
"corpusPath": "/core/applicationCommon/User.cdm.json"
},
{
"corpusPath": "Account.cdm.json"
},
{
"corpusPath": "Contact.cdm.json"
},
{
"corpusPath": "Project.cdm.json"
},
{
"corpusPath": "Schedule.cdm.json"
}
]
} | 732 |
1,031 | import typemap_template_parms.*;
public class typemap_template_parms_runme {
static {
try {
System.loadLibrary("typemap_template_parms");
} catch (UnsatisfiedLinkError e) {
System.err.println("Native code library failed to load. See the chapter on Dynamic Linking Problems in the SWIG Java documentation for help.\n" + e);
System.exit(1);
}
}
public static void main(String argv[]) {
Xint xint = new Xint();
int i = 0;
i = xint.bake();
i = xint.make();
i = xint.lake();
i = xint.rake();
i = xint.take();
}
}
| 228 |
32,544 | package com.baeldung.configurationproperties;
import org.springframework.core.env.Environment;
import org.springframework.beans.factory.annotation.*;
import org.springframework.context.annotation.*;
@Configuration
public class DatabaseConfig {
@Autowired private Environment env;
@Bean(name="dataSource")
public Database dataSource() {
Database dataSource = new Database();
dataSource.setUrl(env.getProperty("jdbc.url"));
dataSource.setUsername(env.getProperty("database.username"));
dataSource.setPassword(env.getProperty("database.password"));
return dataSource;
}
} | 210 |
1,144 | package de.metas.handlingunits.storage.impl;
/*
* #%L
* de.metas.handlingunits.base
* %%
* Copyright (C) 2015 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
import java.math.BigDecimal;
import org.compiere.model.I_M_Transaction;
import org.compiere.model.X_M_Transaction;
import de.metas.handlingunits.storage.IProductStorage;
public class MTransactionProductStorageTest extends AbstractProductStorageTest
{
@Override
protected IProductStorage createStorage(final String qtyStr, final boolean reversal, final boolean outboundTrx)
{
final BigDecimal qty = new BigDecimal(qtyStr);
final String movementType;
if (outboundTrx)
{
movementType = X_M_Transaction.MOVEMENTTYPE_CustomerShipment;
}
else
{
movementType = X_M_Transaction.MOVEMENTTYPE_VendorReceipts;
}
final I_M_Transaction mtrx = helper.createMTransaction(movementType, product, qty);
final MTransactionProductStorage storage = new MTransactionProductStorage(mtrx);
return storage;
}
}
| 509 |
745 |
/*
/~` _ _ _|_. _ _ |_ | _
\_,(_)| | | || ||_|(_||_)|(/_
https://github.com/Naios/continuable
v4.1.0
Copyright(c) 2015 - 2020 <NAME> <denis.blank at outlook dot com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files(the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions :
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
**/
#ifndef CONTINUABLE_TRANSFORMS_FUTURE_HPP_INCLUDED
#define CONTINUABLE_TRANSFORMS_FUTURE_HPP_INCLUDED
#include <utility>
#include <continuable/detail/transforms/future.hpp>
namespace cti {
/// \ingroup Transforms
/// \{
namespace transforms {
/// Returns a transform that if applied to a continuable,
/// it will start the continuation chain and returns the asynchronous
/// result as `std::future<...>`.
///
/// \returns Returns a `std::future<...>` which becomes ready as soon
/// as the the continuation chain has finished.
/// The signature of the future depends on the result type:
/// | Continuation type | Return type |
/// | : ------------------------------- | : -------------------------------- |
/// | `continuable_base with <>` | `std::future<void>` |
/// | `continuable_base with <Arg>` | `std::future<Arg>` |
/// | `continuable_base with <Args...>` | `std::future<std::tuple<Args...>>` |
///
/// \attention If exceptions are used, exceptions that are thrown, are forwarded
/// to the returned future. If there are no exceptions supported,
/// you shall not pass any errors to the end of the asynchronous
/// call chain!
/// Otherwise this will yield a trap that causes application exit.
///
/// \since 2.0.0
inline auto to_future() {
return [](auto&& continuable) {
return detail::transforms::to_future(
std::forward<decltype(continuable)>(continuable));
};
}
} // namespace transforms
/// \}
} // namespace cti
#endif // CONTINUABLE_OPERATIONS_SPLIT_HPP_INCLUDED
| 1,040 |
9,156 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.broker.stats;
import com.google.common.collect.Maps;
import java.lang.management.ManagementFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanInfo;
import javax.management.MBeanServer;
import javax.management.ObjectInstance;
import javax.management.ObjectName;
import org.apache.pulsar.broker.PulsarService;
import org.apache.pulsar.common.stats.Metrics;
public class MBeanStatsGenerator {
private MBeanServer mbs;
public static Collection<Metrics> generate(PulsarService pulsar) {
return new MBeanStatsGenerator(pulsar).generate();
}
// hide
MBeanStatsGenerator(PulsarService pulsar) {
this.mbs = ManagementFactory.getPlatformMBeanServer();
}
private Collection<Metrics> generate() {
List<Metrics> metricsCollection = new ArrayList<Metrics>();
@SuppressWarnings("unchecked")
Set<ObjectInstance> instances = mbs.queryMBeans(null, null);
for (ObjectInstance instance : instances) {
String beanName = instance.getObjectName().toString();
// skip GC MBean to avoid recursion
if (beanName.startsWith("java.lang:type=GarbageCollector")) {
continue;
}
Metrics metrics = convert(instance);
if (metrics != null) {
metricsCollection.add(metrics);
}
}
return metricsCollection;
}
private Metrics convert(ObjectInstance instance) {
ObjectName objName = instance.getObjectName();
MBeanInfo info = null;
try {
info = mbs.getMBeanInfo(objName);
} catch (Exception e) {
// [Bug 6158364] skipping MBean if access failed
return null;
}
Metrics metrics = null;
// create a metrics instance by MBean dimension
metrics = createMetricsByDimension(objName);
// get each of attribute,value from the MBean
for (MBeanAttributeInfo attr : info.getAttributes()) {
Object value;
try {
value = mbs.getAttribute(instance.getObjectName(), attr.getName());
metrics.put(attr.getName(), value);
} catch (Exception e) {
// skip
}
}
return metrics;
}
/**
* Creates a MBean dimension key for metrics.
*
* @param objectName
* @return
*/
private Metrics createMetricsByDimension(ObjectName objectName) {
Map<String, String> dimensionMap = Maps.newHashMap();
dimensionMap.put("MBean", objectName.toString());
// create with current version
return Metrics.create(dimensionMap);
}
}
| 1,381 |
317 | #include "smack.h"
#include <assert.h>
// @expect verified
int main(void) {
int x = 0, y = 4;
while (1) {
x = x + y;
y = y + 4;
assert(x != 30);
}
return 0;
}
| 90 |
1,063 | <gh_stars>1000+
[
{
"category": "``s3``",
"description": "Update s3 client to latest version",
"type": "api-change"
},
{
"category": "``dms``",
"description": "Update dms client to latest version",
"type": "api-change"
},
{
"category": "``rds``",
"description": "Update rds client to latest version",
"type": "api-change"
},
{
"category": "``elbv2``",
"description": "Update elbv2 client to latest version",
"type": "api-change"
},
{
"category": "``application-autoscaling``",
"description": "Update application-autoscaling client to latest version",
"type": "api-change"
}
] | 270 |
4,200 | <filename>plugins/stub-plugin/src/main/java/com/dtolabs/rundeck/plugin/stub/StubNodeLog.java<gh_stars>1000+
package com.dtolabs.rundeck.plugin.stub;
/**
* @author greg
* @since 6/19/17
*/
public class StubNodeLog {
}
| 93 |
864 | /**********************************************************************************************************************
This file is part of the Control Toolbox (https://github.com/ethz-adrl/control-toolbox), copyright by ETH Zurich.
Licensed under the BSD-2 license (see LICENSE file in main directory)
**********************************************************************************************************************/
#pragma once
namespace ct {
namespace core {
namespace tpl {
//! A timer ("stop watch") to record elapsed time based on external time stamps
/*!
* Keeps track of time in a stop watch fashion.
*/
template <typename SCALAR = double>
class ExternallyDrivenTimer
{
public:
//! Default constructor
ExternallyDrivenTimer() { reset(); }
//! Trigger start.
/*!
* Starts the time measurement.
* Can be re-triggered without calling stop(). Simply overrides the start timestamp.
*/
inline void start(const SCALAR& time) { start_time = time; }
//! Trigger stop
/*!
* Stops the time measurement.
*/
inline void stop(const SCALAR& time) { stop_time = time; }
//! Get the elapsed time between calls to start() and stop()
/*!
* @return time
*/
SCALAR getElapsedTime() const { return stop_time - start_time; }
//! Resets the clock.
/*!
* Not needed to be called after start()/stop() calls.
*/
void reset()
{
start_time = (SCALAR)0.0;
stop_time = (SCALAR)0.0;
}
private:
SCALAR start_time; /*!< start time */
SCALAR stop_time; /*!< stop time */
};
}
typedef tpl::ExternallyDrivenTimer<double> ExternallyDrivenTimer;
}
}
| 516 |
1,059 | from utils import youtube_authenticate, get_video_id_by_url, get_channel_id_by_url
def get_comments(youtube, **kwargs):
return youtube.commentThreads().list(
part="snippet",
**kwargs
).execute()
if __name__ == "__main__":
# authenticate to YouTube API
youtube = youtube_authenticate()
# URL can be a channel or a video, to extract comments
url = "https://www.youtube.com/watch?v=jNQXAC9IVRw&ab_channel=jawed"
if "watch" in url:
# that's a video
video_id = get_video_id_by_url(url)
params = {
'videoId': video_id,
'maxResults': 2,
'order': 'relevance', # default is 'time' (newest)
}
else:
# should be a channel
channel_id = get_channel_id_by_url(url)
params = {
'allThreadsRelatedToChannelId': channel_id,
'maxResults': 2,
'order': 'relevance', # default is 'time' (newest)
}
# get the first 2 pages (2 API requests)
n_pages = 2
for i in range(n_pages):
# make API call to get all comments from the channel (including posts & videos)
response = get_comments(youtube, **params)
items = response.get("items")
# if items is empty, breakout of the loop
if not items:
break
for item in items:
comment = item["snippet"]["topLevelComment"]["snippet"]["textDisplay"]
updated_at = item["snippet"]["topLevelComment"]["snippet"]["updatedAt"]
like_count = item["snippet"]["topLevelComment"]["snippet"]["likeCount"]
comment_id = item["snippet"]["topLevelComment"]["id"]
print(f"""\
Comment: {comment}
Likes: {like_count}
Updated At: {updated_at}
==================================\
""")
if "nextPageToken" in response:
# if there is a next page
# add next page token to the params we pass to the function
params["pageToken"] = response["nextPageToken"]
else:
# must be end of comments!!!!
break
print("*"*70)
| 1,001 |
1,567 | #include "utils/mem_pool.h"
#include "gtest/gtest.h"
TEST(MemPoolTest, Basic) {
certain::MemPool pool(3, 5);
std::cout << pool.ToString() << std::endl;
ASSERT_TRUE(pool.Alloc(0) == NULL);
char* p1 = pool.Alloc(3);
char* p2 = pool.Alloc(4);
char* p3 = pool.Alloc(5);
char* p4 = pool.Alloc(3);
char* p5 = pool.Alloc(6);
ASSERT_EQ(pool.pool_alloc_cnt(), 3);
ASSERT_EQ(pool.os_alloc_cnt(), 2);
pool.Free(p1);
pool.Free(p2);
pool.Free(p4);
ASSERT_EQ(pool.pool_alloc_cnt(), 1);
ASSERT_EQ(pool.os_alloc_cnt(), 1);
pool.Free(p3);
pool.Free(p5);
pool.Alloc(3);
pool.Alloc(4);
pool.Alloc(5);
pool.Alloc(3);
pool.Alloc(6);
ASSERT_EQ(pool.pool_alloc_cnt(), 3);
ASSERT_EQ(pool.os_alloc_cnt(), 2);
}
TEST(MemPoolTest, Performance_10000k_Alloc_Free_1KB) {
// need ~1GB memory
int size = 1000000;
certain::MemPool pool(size, 1000);
std::vector<char*> vc;
for (int i = 0; i < size; ++i) {
vc.push_back(pool.Alloc(1000));
}
for (int i = 0; i < size; ++i) {
pool.Free(vc[i]);
}
}
int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
| 538 |
5,169 | {
"name": "AdamPodTest",
"version": "0.1.7",
"summary": "This pod is to test if pods work for us.",
"homepage": "https://judgecardx.com",
"authors": {
"<NAME>": "<EMAIL>"
},
"platforms": {
"ios": "9.0"
},
"source": {
"http": "https://github.com/rajohns08/AdamPodFramework/archive/0.1.6.zip"
},
"vendored_frameworks": [
"AdamPodFramework-0.1.6/AdamPodTest.framework",
"AdamPodFramework-0.1.6/AdamCheckbox.framework"
],
"source_files": "AdamPodFramework-0.1.6/SourceButton.swift",
"license": {
"type": "Commercial",
"text": " Copyright 2012\n Permission is granted to...\n"
},
"pushed_with_swift_version": "3.1"
}
| 322 |
363 | package cn.sddman.download.mvp.v;
import java.util.List;
import cn.sddman.download.mvp.e.TorrentInfoEntity;
public interface TorrentInfoView {
void initTaskListView(List<TorrentInfoEntity> list);
void itemClick(int index);
void startTaskSuccess();
void startTaskFail(String msg);
boolean getIsDown();
void playerViedo(TorrentInfoEntity te);
}
| 130 |
335 | {
"word": "Only",
"definitions": [
"And no one or nothing more besides; solely.",
"No more than (implying that more was expected); merely.",
"No longer ago than.",
"Not until.",
"With the negative or unfortunate result that.",
"In an inevitable but undesirable way."
],
"parts-of-speech": "Adverb"
} | 143 |
821 | /*
* Copyright (C) 2018 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.caliper.worker;
import static com.google.caliper.worker.CaliperProxyActivity.TAG;
import android.util.Log;
import com.google.caliper.bridge.FailureLogMessage;
import com.google.caliper.bridge.KillVmRequest;
import com.google.caliper.bridge.RemoteClasspathMessage;
import com.google.caliper.bridge.StartVmRequest;
import com.google.caliper.bridge.StopProxyRequest;
import com.google.caliper.bridge.VmStoppedMessage;
import com.google.caliper.util.Uuids;
import com.google.caliper.worker.CaliperProxyModule.NativeLibraryDir;
import com.google.caliper.worker.connection.ClientConnectionService;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.ByteStreams;
import com.google.common.io.Closer;
import com.google.common.util.concurrent.AbstractExecutionThreadService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.Service.Listener;
import com.google.common.util.concurrent.Service.State;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.nio.channels.Channels;
import java.nio.channels.SocketChannel;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import javax.inject.Inject;
import javax.inject.Singleton;
/**
* Proxy for the runner that handles starting up worker VMs for it and connecting those VMs back to
* the runner.
*/
@Singleton
final class CaliperProxy extends AbstractExecutionThreadService {
private final InetSocketAddress clientAddress;
private final ClientConnectionService clientConnection;
private final ExecutorService executor;
/**
* The local classpath on this device that should be used for workers. See {@link
* RemoteClasspathMessage}.
*/
private final String classpath;
/**
* The local path to native libraries on this device that should be used for workers. See {@link
* RemoteClasspathMessage}.
*/
private final String nativeLibraryPath;
/** Environment variables to add to the environment of VM processes we start. */
private final ImmutableMap<String, String> processEnv;
private final ConcurrentMap<UUID, ProcessHolder> processes = new ConcurrentHashMap<>();
@Inject
CaliperProxy(
InetSocketAddress clientAddress,
ClientConnectionService clientConnection,
ExecutorService executor,
String classpath,
@NativeLibraryDir String nativeLibraryPath,
ImmutableMap<String, String> processEnv) {
this.clientAddress = clientAddress;
this.clientConnection = clientConnection;
this.executor = executor;
this.classpath = classpath;
this.nativeLibraryPath = nativeLibraryPath;
this.processEnv = processEnv;
}
@Override
public void startUp() throws Exception {
clientConnection.startAsync();
addListener(
new Listener() {
@Override
public void failed(State from, Throwable e) {
notifyError(e);
}
},
MoreExecutors.directExecutor());
clientConnection.awaitRunning();
}
@Override
public void run() throws Exception {
clientConnection.send(RemoteClasspathMessage.create(classpath, nativeLibraryPath));
while (isRunning()) {
final Object request = clientConnection.receive();
if (request == null || request instanceof StopProxyRequest) {
return;
}
if (request instanceof StartVmRequest) {
executor.execute(
new Runnable() {
@Override
public void run() {
try {
startVm((StartVmRequest) request);
} catch (Throwable e) {
notifyError(e);
}
}
});
} else if (request instanceof KillVmRequest) {
executor.execute(
new Runnable() {
@Override
public void run() {
try {
killVm(((KillVmRequest) request).vmId());
} catch (Throwable e) {
notifyError(e);
}
}
});
}
}
}
@Override
public void shutDown() throws Exception {
try {
for (UUID vmId : processes.keySet()) {
killVm(vmId);
}
for (ProcessHolder holder : processes.values()) {
holder.awaitCompletion();
}
} finally {
executor.shutdown();
clientConnection.stopAsync();
}
}
private void startVm(final StartVmRequest request) throws IOException {
ProcessBuilder builder = new ProcessBuilder().command(request.command());
builder.environment().putAll(processEnv);
Process process = builder.start();
UUID vmId = request.vmId();
// Need threads for each of these things since there's sadly no non-blocking way of doing them.
ImmutableList<Future<?>> futures =
ImmutableList.of(
pipeProcessInputStream(request.stdoutId(), process.getInputStream()),
pipeProcessInputStream(request.stderrId(), process.getErrorStream()),
awaitExit(vmId, process));
processes.put(vmId, new ProcessHolder(process, futures));
}
private void killVm(UUID vmId) {
ProcessHolder holder = processes.get(vmId);
if (holder != null) {
// Assuming the process is actually killed, this should lead to the thread waiting for the
// proccess to exit to see it exit and the things that need to happen when it exits should
// happen there.
holder.kill();
}
}
/**
* Opens a socket connection using the given ID and then copies the given {@code InputStream} to
* it, effectively piping the output from the process to the other end of the connection.
*/
private Future<?> pipeProcessInputStream(UUID streamId, final InputStream in) {
return executor.submit(
new Runnable() {
@Override
public void run() {
try {
Closer closer = Closer.create();
try {
SocketChannel channel = closer.register(SocketChannel.open(clientAddress));
Uuids.writeToChannel(streamId, channel);
ByteStreams.copy(Channels.newChannel(in), channel);
} catch (Throwable e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
} catch (IOException e) {
notifyError(e);
}
}
});
}
private Future<?> awaitExit(final UUID vmId, final Process process) {
return executor.submit(
new Runnable() {
@Override
public void run() {
try {
int exitCode = waitForUninterruptibly(process);
if (clientConnection.isRunning()) {
processes.remove(vmId);
clientConnection.send(VmStoppedMessage.create(vmId, exitCode));
}
} catch (IOException e) {
notifyError(e);
}
}
});
}
private void notifyError(Throwable e) {
Log.e(TAG, e.getMessage(), e);
if (clientConnection.isRunning()) {
try {
clientConnection.send(FailureLogMessage.create(e));
} catch (IOException ignore) {
}
}
}
private static int waitForUninterruptibly(Process process) {
boolean interrupted = false;
try {
while (true) {
try {
return process.waitFor();
} catch (InterruptedException e) {
interrupted = true;
}
}
} finally {
if (interrupted) {
Thread.currentThread().interrupt();
}
}
}
/** Holder for a process and futures representing threads associated with the process. */
private static class ProcessHolder {
private final Process process;
private final ImmutableList<Future<?>> futures;
ProcessHolder(Process process, ImmutableList<Future<?>> futures) {
this.process = process;
this.futures = futures;
}
void kill() {
process.destroy();
}
void awaitCompletion() {
for (Future<?> future : futures) {
try {
future.get();
} catch (Exception ignore) {
}
}
}
}
}
| 3,431 |
722 | <filename>inference/flow/api/java/com/huawei/noah/TaskFlow.java<gh_stars>100-1000
/**
* @file
* @brief Java TaskFlow Class Document
*
* @copyright
* @code
* Copyright (C) 2019. Huawei Technologies Co., Ltd. All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
* WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
* @endcode
*/
package com.huawei.noah;
import java.util.ArrayList;
public final class TaskFlow {
static
{
loadLibrary("c++_shared", true);
loadLibrary("OpenCL", true);
loadLibrary("BoltModel", false);
loadLibrary("protobuf", false);
loadLibrary("flow", false);
}
public TaskFlow()
{
this.flowAddr = 0;
}
public TaskFlow(String graphPath,
DataType precision,
AffinityType affinityPolicy,
int cpuThreads,
boolean useGPU)
{
this.flowAddr = createFlow(
graphPath, precision.toString(), affinityPolicy.toString(), cpuThreads, useGPU);
}
public native int taskFlowRegisterFunction(String functionName, long function);
public long taskBuild(String graphPath,
int inputNumber,
int[] inputN,
int[] inputC,
int[] inputH,
int[] inputW,
String[] inputNames,
DataType[] inputDataType,
DataFormat[] inputDataFormat,
float[][] inputData,
int outputNumber,
int[] outputN,
int[] outputC,
int[] outputH,
int[] outputW,
String[] outputNames,
DataType[] outputDataType,
DataFormat[] outputDataFormat)
{
String[] inputDataTypeString = new String[inputNumber];
String[] inputDataFormatString = new String[inputNumber];
for (int i = 0; i < inputNumber; i++) {
inputDataTypeString[i] = inputDataType[i].toString();
inputDataFormatString[i] = inputDataFormat[i].toString();
}
String[] outputDataTypeString = new String[outputNumber];
String[] outputDataFormatString = new String[outputNumber];
for (int i = 0; i < outputNumber; i++) {
outputDataTypeString[i] = outputDataType[i].toString();
outputDataFormatString[i] = outputDataFormat[i].toString();
}
long task_addr = createTask(graphPath, inputNumber, inputN, inputC, inputH, inputW,
inputNames, inputDataTypeString, inputDataFormatString, inputData, outputNumber, outputN,
outputC, outputH, outputW, outputNames, outputDataTypeString, outputDataFormatString);
return task_addr;
}
public void enqueue(long task_addr)
{
taskEnqueue(this.flowAddr, task_addr);
this.tasksAddr.add(new Long(task_addr));
}
public long[] dequeue(boolean block)
{
long[] finished_tasks_addr = tasksDequeue(this.flowAddr, block);
for (int i = 0; i < finished_tasks_addr.length; i++) {
int index = this.tasksAddr.indexOf((new Long(finished_tasks_addr[i])));
if (index != -1) {
this.tasksAddr.remove(index);
}
}
return finished_tasks_addr;
}
public BoltResult getOutput(long task_addr, int outputNumber, String[] outputNames)
{
return getTaskResult(
task_addr, outputNumber, outputNames, BoltResult.class.getName().replace(".", "/"));
}
public void destructor()
{
if (this.flowAddr != 0) {
destroyFlow(this.flowAddr);
this.flowAddr = 0;
}
for (int i = 0; i < this.tasksAddr.size(); i++) {
destroyTask(this.tasksAddr.get(i).longValue());
}
this.tasksAddr.clear();
}
private long flowAddr;
private ArrayList<Long> tasksAddr = new ArrayList<Long>();
private native long createFlow(
String graphPath, String precision, String affinityPolicy, int cpuThreads, boolean useGPU);
private native long createTask(String graphPath,
int inputNumber,
int[] inputN,
int[] inputC,
int[] inputH,
int[] inputW,
String[] inputNames,
String[] inputDataType,
String[] inputDataFormat,
float[][] inputData,
int outputNumber,
int[] outputN,
int[] outputC,
int[] outputH,
int[] outputW,
String[] outputNames,
String[] outputDataType,
String[] outputDataFormat);
private native void taskEnqueue(long flow_addr, long task_addr);
private native long[] tasksDequeue(long flow_addr, boolean block);
private native BoltResult getTaskResult(
long task_addr, int outputNumber, String[] outputNames, String boltResultClassPath);
private native void destroyFlow(long flow_Addr);
private native void destroyTask(long task_addr);
private static void loadLibrary(String libraryName, boolean optional)
{
try {
System.loadLibrary(libraryName);
} catch (UnsatisfiedLinkError e) {
if (!optional) {
e.printStackTrace();
}
}
}
}
| 2,368 |
348 | <reponame>chamberone/Leaflet.PixiOverlay<filename>docs/data/leg-t2/031/03108132.json
{"nom":"Cazaux-Layrisse","circ":"8ème circonscription","dpt":"Haute-Garonne","inscrits":65,"abs":14,"votants":51,"blancs":1,"nuls":1,"exp":49,"res":[{"nuance":"SOC","nom":"M. <NAME>","voix":40},{"nuance":"REM","nom":"M. <NAME>","voix":9}]} | 139 |
1,056 | <filename>java/java.source.base/test/unit/src/org/netbeans/modules/java/source/indexing/APTUtilsTest.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.java.source.indexing;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.io.File;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
import javax.swing.event.ChangeListener;
import org.netbeans.api.annotations.common.NonNull;
import org.netbeans.api.java.classpath.ClassPath;
import org.netbeans.api.java.classpath.JavaClassPathConstants;
import org.netbeans.api.java.queries.AnnotationProcessingQuery;
import org.netbeans.api.java.queries.AnnotationProcessingQuery.Trigger;
import org.netbeans.junit.MockServices;
import org.netbeans.junit.NbTestCase;
import org.netbeans.modules.parsing.api.indexing.IndexingManager;
import org.netbeans.modules.parsing.impl.indexing.CacheFolder;
import org.netbeans.spi.java.classpath.ClassPathFactory;
import org.netbeans.spi.java.classpath.ClassPathImplementation;
import org.netbeans.spi.java.classpath.ClassPathProvider;
import org.netbeans.spi.java.classpath.PathResourceImplementation;
import org.netbeans.spi.java.classpath.support.ClassPathSupport;
import org.netbeans.spi.java.queries.AnnotationProcessingQueryImplementation;
import org.netbeans.spi.java.queries.SourceLevelQueryImplementation2;
import org.openide.filesystems.FileObject;
import org.openide.filesystems.FileUtil;
import org.openide.util.ChangeSupport;
import org.openide.util.Lookup;
/**
*
* @author <NAME>
*/
public class APTUtilsTest extends NbTestCase {
private FileObject root1;
private FileObject root2;
private FileObject root3;
private FileObject root4;
private MutableCP processorPath;
private MockHandler handler;
private Logger imLogger;
private Level imLoggerOrigLevel;
public APTUtilsTest(@NonNull final String name) {
super(name);
}
@Override
protected void setUp() throws Exception {
super.setUp();
clearWorkDir();
final FileObject cache = FileUtil.createFolder(
FileUtil.normalizeFile(
new File(getWorkDir(),"cache"))); //NOI18N
CacheFolder.setCacheFolder(cache);
root1 = FileUtil.createFolder(
FileUtil.normalizeFile(
new File(getWorkDir(),"src1"))); //NOI18N
assertNotNull(root1);
assertTrue(root1.isValid());
assertTrue(root1.isFolder());
root2 = FileUtil.createFolder(
FileUtil.normalizeFile(
new File(getWorkDir(),"src2"))); //NOI18N
assertNotNull(root2);
assertTrue(root2.isValid());
assertTrue(root2.isFolder());
root3 = FileUtil.createFolder(
FileUtil.normalizeFile(
new File(getWorkDir(),"src3"))); //NOI18N
assertNotNull(root3);
assertTrue(root3.isValid());
assertTrue(root3.isFolder());
root4 = FileUtil.createFolder(
FileUtil.normalizeFile(
new File(getWorkDir(),"src4"))); //NOI18N
assertNotNull(root4);
assertTrue(root4.isValid());
assertTrue(root4.isFolder());
processorPath = new MutableCP();
processorPath.add(root1);
CPP.cps = Collections.singletonMap(
JavaClassPathConstants.PROCESSOR_PATH,
Collections.singletonMap(root1, ClassPathFactory.createClassPath(processorPath)));
SLQ.result.setSourceLevel("1.5"); //NOI18N
APQ.result.setAnnotationProcessingEnabled(EnumSet.of(Trigger.ON_SCAN));
MockServices.setServices(CPP.class, SLQ.class, APQ.class);
MockHandler.currentRoot = root1;
handler = new MockHandler();
imLogger = Logger.getLogger(IndexingManager.class.getName());
imLoggerOrigLevel = imLogger.getLevel();
imLogger.setLevel(Level.FINEST);
imLogger.addHandler(handler);
}
@Override
protected void tearDown() throws Exception {
if (imLogger != null) {
imLogger.setLevel(imLoggerOrigLevel);
imLogger.removeHandler(handler);
}
super.tearDown();
}
public void testRefreshedIndexOnceAfterProcessorPathChange() throws InterruptedException {
final APTUtils au = APTUtils.get(root1);
assertNotNull(au);
handler.reset();
processorPath.add(root2);
processorPath.add(root3);
processorPath.add(root4);
assertEquals(1, handler.awaitEvents(10, 2500));
handler.reset();
processorPath.remove(root2);
processorPath.remove(root3);
processorPath.remove(root4);
assertEquals(1, handler.awaitEvents(10, 2500));
}
public void testRefreshedIndexOnceAfterSourceLevelChange() throws InterruptedException {
final APTUtils au = APTUtils.get(root1);
assertNotNull(au);
handler.reset();
SLQ.result.setSourceLevel("1.6"); //NOI18N
SLQ.result.setSourceLevel("1.7"); //NOI18N
SLQ.result.setSourceLevel("1.5"); //NOI18N
SLQ.result.setSourceLevel("1.6"); //NOI18N
assertEquals(1, handler.awaitEvents(10, 2500));
}
public void testRefreshedIndexOnceAfterAnnotationProcessingChange() throws InterruptedException {
final APTUtils au = APTUtils.get(root1);
assertNotNull(au);
handler.reset();
APQ.result.setAnnotationProcessingEnabled(EnumSet.of(Trigger.ON_SCAN, Trigger.IN_EDITOR));
APQ.result.setAnnotationProcessingEnabled(EnumSet.noneOf(Trigger.class));
APQ.result.setAnnotationProcessingEnabled(EnumSet.of(Trigger.ON_SCAN));
APQ.result.setAnnotationProcessingEnabled(EnumSet.of(Trigger.IN_EDITOR));
assertEquals(1, handler.awaitEvents(10, 2500));
}
private static final class MockHandler extends Handler {
static FileObject currentRoot;
//@GuardedBy("this")
private int numberOfEvents;
public synchronized void reset() {
numberOfEvents = 0;
}
@Override
public void publish(LogRecord record) {
if ("Request to add indexing job for root: {0}".equals(record.getMessage()) &&
record.getParameters().length == 1 &&
currentRoot.toURL().equals(record.getParameters()[0])) { //NOI18N
synchronized (this) {
numberOfEvents++;
notifyAll();
}
}
}
public int awaitEvents(int count, long deadline) throws InterruptedException {
long st = System.currentTimeMillis();
synchronized (this) {
while (count != numberOfEvents) {
wait(deadline);
long et = System.currentTimeMillis();
if (et-st > deadline) {
break;
}
}
return numberOfEvents;
}
}
@Override
public void flush() {
}
@Override
public void close() throws SecurityException {
}
}
private static final class MutableCP implements ClassPathImplementation {
private final PropertyChangeSupport listeners = new PropertyChangeSupport(this);
private final List<FileObject> roots = Collections.synchronizedList(new ArrayList<FileObject>());
void add(FileObject... fos) {
roots.addAll(Arrays.asList(fos));
listeners.firePropertyChange(PROP_RESOURCES,null,null);
}
void remove(FileObject... fos) {
roots.removeAll(roots);
listeners.firePropertyChange(PROP_RESOURCES,null,null);
}
@Override
public List<? extends PathResourceImplementation> getResources() {
final List<PathResourceImplementation> res = new ArrayList<PathResourceImplementation>();
synchronized (roots) {
for (FileObject root : roots) {
res.add(ClassPathSupport.createResource(root.toURL()));
}
}
return res;
}
@Override
public void addPropertyChangeListener(PropertyChangeListener listener) {
listeners.addPropertyChangeListener(listener);
}
@Override
public void removePropertyChangeListener(PropertyChangeListener listener) {
listeners.removePropertyChangeListener(listener);
}
}
public static final class CPP implements ClassPathProvider {
static volatile Map<String,Map<FileObject, ClassPath>> cps;
@Override
public ClassPath findClassPath(FileObject file, String type) {
final Map<String,Map<FileObject, ClassPath>> dc = cps;
if (dc == null) {
return null;
}
final Map<FileObject,ClassPath> root2cps = dc.get(type);
if (root2cps == null) {
return null;
}
return root2cps.get(file);
}
}
public static final class MSL implements SourceLevelQueryImplementation2.Result {
private final ChangeSupport support = new ChangeSupport(this);
private volatile String sl;
void setSourceLevel(String sourceLevel) {
sl = sourceLevel;
support.fireChange();
}
@Override
public String getSourceLevel() {
return sl;
}
@Override
public void addChangeListener(ChangeListener listener) {
support.addChangeListener(listener);
}
@Override
public void removeChangeListener(ChangeListener listener) {
support.removeChangeListener(listener);
}
}
public static final class SLQ implements SourceLevelQueryImplementation2 {
static final MSL result = new MSL();
@Override
public Result getSourceLevel(FileObject javaFile) {
return result;
}
}
public static final class MAP implements AnnotationProcessingQuery.Result {
private final ChangeSupport listeners = new ChangeSupport(this);
private volatile Set<? extends Trigger> mode = Collections.emptySet();
void setAnnotationProcessingEnabled(Set<? extends Trigger> newMode) {
this.mode = newMode;
listeners.fireChange();
}
@Override
public Set<? extends Trigger> annotationProcessingEnabled() {
return mode;
}
@Override
public Iterable<? extends String> annotationProcessorsToRun() {
return null;
}
@Override
public URL sourceOutputDirectory() {
return null;
}
@Override
public Map<? extends String, ? extends String> processorOptions() {
return Collections.emptyMap();
}
@Override
public void addChangeListener(ChangeListener l) {
listeners.addChangeListener(l);
}
@Override
public void removeChangeListener(ChangeListener l) {
listeners.removeChangeListener(l);
}
}
public static final class APQ implements AnnotationProcessingQueryImplementation {
static final MAP result = new MAP();
@Override
public AnnotationProcessingQuery.Result getAnnotationProcessingOptions(FileObject file) {
return result;
}
}
}
| 5,268 |
861 | /*******************************************************************************
* Copyright 2014 Univocity Software Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.univocity.parsers.common.processor;
import com.univocity.parsers.common.*;
import com.univocity.parsers.common.processor.core.*;
import com.univocity.parsers.conversions.*;
/**
*
* A {@link RowProcessor} implementation for associating rows extracted from any implementation of {@link AbstractParser} into {@link MasterDetailRecord} instances.
*
* <p> For each row processed, a call to {@link MasterDetailProcessor#isMasterRecord(String[], Context)} will be made to identify whether or not it is a master row.
* <p> The detail rows are automatically associated with the master record in an instance of {@link MasterDetailRecord}.
* <p> When the master record is fully processed (i.e. {@link MasterDetailRecord} contains a master row and all associated detail rows),
* it is sent to the user for processing in {@link MasterDetailProcessor#masterDetailRecordProcessed(MasterDetailRecord, Context)}.
*
* <p> <b>Note</b> this class extends {@link ObjectRowProcessor} and value conversions provided by {@link Conversion} instances are fully supported.
*
* @see MasterDetailRecord
* @see RowPlacement
* @see AbstractParser
* @see ObjectRowListProcessor
* @see RowProcessor
*
* @author Univocity Software Pty Ltd - <a href="mailto:<EMAIL>"><EMAIL></a>
*
*/
public abstract class MasterDetailProcessor extends AbstractMasterDetailProcessor<ParsingContext> {
/**
* Creates a MasterDetailProcessor
*
* @param rowPlacement indication whether the master records are placed in relation its detail records in the input.
*
* <hr><blockquote><pre>
*
* Master record (Totals) Master record (Totals)
* above detail records under detail records
*
* Totals | 100 Item | 60
* Item | 60 Item | 40
* Item | 40 Totals | 100
* </pre></blockquote><hr>
* @param detailProcessor the {@link ObjectRowListProcessor} that processes detail rows.
*/
public MasterDetailProcessor(RowPlacement rowPlacement, ObjectRowListProcessor detailProcessor) {
super(rowPlacement, detailProcessor);
}
public MasterDetailProcessor(ObjectRowListProcessor detailProcessor) {
super(RowPlacement.TOP, detailProcessor);
}
}
| 867 |
4,551 | package org.robolectric.shadows;
import static android.app.DownloadManager.Request;
import static android.os.Build.VERSION_CODES.Q;
import static com.google.common.truth.Truth.assertThat;
import static org.robolectric.Shadows.shadowOf;
import static org.robolectric.shadows.ShadowDownloadManager.ShadowRequest;
import android.app.DownloadManager;
import android.database.Cursor;
import android.net.Uri;
import android.os.Environment;
import android.util.Pair;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.annotation.Config;
@RunWith(AndroidJUnit4.class)
public class ShadowDownloadManagerTest {
private final Uri uri = Uri.parse("http://example.com/foo.mp4");
private final Uri destination = Uri.parse("file:///storage/foo.mp4");
private final Request request = new Request(uri);
private final ShadowRequest shadow = shadowOf(request);
@Test
public void request_shouldGetUri() {
assertThat(shadow.getUri().toString()).isEqualTo("http://example.com/foo.mp4");
}
@Test
public void request_shouldGetDestinationUri() {
request.setDestinationUri(Uri.parse("/storage/media/foo.mp4"));
assertThat(shadow.getDestination().toString()).isEqualTo("/storage/media/foo.mp4");
}
@Test
public void request_shouldGetTitle() {
request.setTitle("Title");
assertThat(shadow.getTitle().toString()).isEqualTo("Title");
}
@Test
public void request_shouldGetDescription() {
request.setDescription("Description");
assertThat(shadow.getDescription().toString()).isEqualTo("Description");
}
@Test
public void request_shouldGetMimeType() {
request.setMimeType("application/json");
assertThat(shadow.getMimeType().toString()).isEqualTo("application/json");
}
@Test
public void request_shouldGetRequestHeaders() {
request.addRequestHeader("Authorization", "Bearer token");
List<Pair<String, String>> headers = shadow.getRequestHeaders();
assertThat(headers).hasSize(1);
assertThat(headers.get(0).first).isEqualTo("Authorization");
assertThat(headers.get(0).second).isEqualTo("Bearer token");
}
@Test
public void request_shouldGetNotificationVisibility() {
request.setNotificationVisibility(Request.VISIBILITY_VISIBLE);
assertThat(shadow.getNotificationVisibility()).isEqualTo(Request.VISIBILITY_VISIBLE);
}
@Test
public void request_shouldGetAllowedNetworkTypes() {
request.setAllowedNetworkTypes(Request.NETWORK_BLUETOOTH);
assertThat(shadow.getAllowedNetworkTypes()).isEqualTo(Request.NETWORK_BLUETOOTH);
}
@Test
public void request_shouldGetAllowedOverRoaming() {
request.setAllowedOverRoaming(true);
assertThat(shadow.getAllowedOverRoaming()).isTrue();
}
@Test
public void request_shouldGetAllowedOverMetered() {
request.setAllowedOverMetered(true);
assertThat(shadow.getAllowedOverMetered()).isTrue();
}
@Test
public void request_shouldGetVisibleInDownloadsUi() {
request.setVisibleInDownloadsUi(true);
assertThat(shadow.getVisibleInDownloadsUi()).isTrue();
}
@Test
public void enqueue_shouldAddRequest() {
ShadowDownloadManager manager = new ShadowDownloadManager();
long id = manager.enqueue(request);
assertThat(manager.getRequestCount()).isEqualTo(1);
assertThat(manager.getRequest(id)).isEqualTo(request);
}
@Test
public void query_shouldReturnCursor() {
ShadowDownloadManager manager = new ShadowDownloadManager();
long id = manager.enqueue(request);
Cursor cursor = manager.query(new DownloadManager.Query().setFilterById(id));
assertThat(cursor.getCount()).isEqualTo(1);
assertThat(cursor.moveToNext()).isTrue();
}
@Test
public void query_shouldReturnColumnIndexes() {
ShadowDownloadManager manager = new ShadowDownloadManager();
long id = manager.enqueue(request.setDestinationUri(destination));
Cursor cursor = manager.query(new DownloadManager.Query().setFilterById(id));
assertThat(cursor.getColumnIndex(DownloadManager.COLUMN_URI)).isAtLeast(0);
assertThat(cursor.getColumnIndex(DownloadManager.COLUMN_LOCAL_URI)).isAtLeast(0);
assertThat(cursor.getColumnIndex(DownloadManager.COLUMN_LOCAL_FILENAME)).isAtLeast(0);
assertThat(cursor.getColumnIndex(DownloadManager.COLUMN_DESCRIPTION)).isAtLeast(0);
assertThat(cursor.getColumnIndex(DownloadManager.COLUMN_REASON)).isAtLeast(0);
assertThat(cursor.getColumnIndex(DownloadManager.COLUMN_STATUS)).isAtLeast(0);
}
@Test
public void query_shouldReturnColumnValues() {
ShadowDownloadManager manager = new ShadowDownloadManager();
long id = manager.enqueue(request.setDestinationUri(destination));
Cursor cursor = manager.query(new DownloadManager.Query().setFilterById(id));
cursor.moveToNext();
assertThat(cursor.getString(cursor.getColumnIndex(DownloadManager.COLUMN_URI)))
.isEqualTo(uri.toString());
assertThat(cursor.getString(cursor.getColumnIndex(DownloadManager.COLUMN_LOCAL_URI)))
.isEqualTo(destination.toString());
}
@Test
public void query_shouldHandleEmptyIds() {
ShadowDownloadManager manager = new ShadowDownloadManager();
assertThat(manager.query(new DownloadManager.Query())).isNotNull();
}
@Test
public void query_shouldReturnAll() {
ShadowDownloadManager manager = new ShadowDownloadManager();
manager.enqueue(request.setDestinationUri(destination));
Uri secondUri = Uri.parse("http://example.com/foo2.mp4");
Uri secondDestination = Uri.parse("file:///storage/foo2.mp4");
Request secondRequest = new Request(secondUri);
manager.enqueue(secondRequest.setDestinationUri(secondDestination));
Cursor cursor = manager.query(new DownloadManager.Query());
cursor.moveToNext();
assertThat(cursor.getString(cursor.getColumnIndex(DownloadManager.COLUMN_URI)))
.isEqualTo(uri.toString());
assertThat(cursor.getString(cursor.getColumnIndex(DownloadManager.COLUMN_LOCAL_URI)))
.isEqualTo(destination.toString());
cursor.moveToNext();
assertThat(cursor.getString(cursor.getColumnIndex(DownloadManager.COLUMN_URI)))
.isEqualTo(secondUri.toString());
assertThat(cursor.getString(cursor.getColumnIndex(DownloadManager.COLUMN_LOCAL_URI)))
.isEqualTo(secondDestination.toString());
}
@Test
public void request_shouldSetDestinationInExternalPublicDir_publicDirectories() throws Exception {
shadow.setDestinationInExternalPublicDir(Environment.DIRECTORY_DOWNLOADS, "foo.mp4");
assertThat(shadow.getDestination().getLastPathSegment()).isEqualTo("foo.mp4");
}
@Config(minSdk = Q)
@Test(expected = IllegalStateException.class)
public void request_shouldNotSetDestinationInExternalPublicDir_privateDirectories()
throws Exception {
shadow.setDestinationInExternalPublicDir("bar", "foo.mp4");
}
}
| 2,298 |
634 | <reponame>halotroop2288/consulo<gh_stars>100-1000
/*
* Copyright 2013-2020 consulo.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package consulo.ui.desktop.internal;
import consulo.disposer.Disposable;
import consulo.ui.FocusManager;
import consulo.ui.event.GlobalFocusListener;
import javax.annotation.Nonnull;
import java.awt.*;
import java.beans.PropertyChangeListener;
/**
* @author VISTALL
* @since 2020-11-21
*/
public class DesktopFocusManager implements FocusManager {
public static final DesktopFocusManager ourInstance = new DesktopFocusManager();
@Nonnull
@Override
public Disposable addListener(@Nonnull GlobalFocusListener focusListener) {
PropertyChangeListener listener = evt -> focusListener.focusChanged();
KeyboardFocusManager keyboardFocusManager = KeyboardFocusManager.getCurrentKeyboardFocusManager();
keyboardFocusManager.addPropertyChangeListener("focusOwner", listener);
return () -> keyboardFocusManager.removePropertyChangeListener("focusOwner", listener);
}
}
| 413 |
3,651 | <reponame>aberdev/orientdb
/*
*
* * Copyright 2010-2016 OrientDB LTD (http://orientdb.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://orientdb.com
*
*/
package com.orientechnologies.common.concur.lock;
import com.orientechnologies.common.types.OModifiableInteger;
import java.util.Iterator;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.atomic.LongAdder;
import java.util.concurrent.locks.AbstractOwnableSynchronizer;
import java.util.concurrent.locks.LockSupport;
/**
* @author <NAME> (a.lomakin-at-orientdb.com)
* @since 8/18/14
*/
public class OReadersWriterSpinLock extends AbstractOwnableSynchronizer {
private static final long serialVersionUID = 7975120282194559960L;
private final transient LongAdder distributedCounter;
private final transient AtomicReference<WNode> tail = new AtomicReference<WNode>();
private final transient ThreadLocal<OModifiableInteger> lockHolds = new InitOModifiableInteger();
private final transient ThreadLocal<WNode> myNode = new InitWNode();
public OReadersWriterSpinLock() {
final WNode wNode = new WNode();
wNode.locked = false;
tail.set(wNode);
distributedCounter = new LongAdder();
}
/**
* Tries to acquire lock during provided interval of time and returns either if provided time
* interval was passed or if lock was acquired.
*
* @param timeout Timeout during of which we should wait for read lock.
* @return <code>true</code> if read lock was acquired.
*/
public boolean tryAcquireReadLock(long timeout) {
final OModifiableInteger lHolds = lockHolds.get();
final int holds = lHolds.intValue();
if (holds > 0) {
// we have already acquire read lock
lHolds.increment();
return true;
} else if (holds < 0) {
// write lock is acquired before, do nothing
return true;
}
distributedCounter.increment();
WNode wNode = tail.get();
final long start = System.nanoTime();
while (wNode.locked) {
distributedCounter.decrement();
while (wNode.locked && wNode == tail.get()) {
wNode.waitingReaders.put(Thread.currentThread(), Boolean.TRUE);
if (wNode.locked && wNode == tail.get()) {
final long parkTimeout = timeout - (System.nanoTime() - start);
if (parkTimeout > 0) {
LockSupport.parkNanos(this, parkTimeout);
} else {
return false;
}
}
wNode = tail.get();
if (System.nanoTime() - start > timeout) {
return false;
}
}
distributedCounter.increment();
wNode = tail.get();
if (System.nanoTime() - start > timeout) {
distributedCounter.decrement();
return false;
}
}
lHolds.increment();
assert lHolds.intValue() == 1;
return true;
}
public void acquireReadLock() {
final OModifiableInteger lHolds = lockHolds.get();
final int holds = lHolds.intValue();
if (holds > 0) {
// we have already acquire read lock
lHolds.increment();
return;
} else if (holds < 0) {
// write lock is acquired before, do nothing
return;
}
distributedCounter.increment();
WNode wNode = tail.get();
while (wNode.locked) {
distributedCounter.decrement();
while (wNode.locked && wNode == tail.get()) {
wNode.waitingReaders.put(Thread.currentThread(), Boolean.TRUE);
if (wNode.locked && wNode == tail.get()) LockSupport.park(this);
wNode = tail.get();
}
distributedCounter.increment();
wNode = tail.get();
}
lHolds.increment();
assert lHolds.intValue() == 1;
}
public void releaseReadLock() {
final OModifiableInteger lHolds = lockHolds.get();
final int holds = lHolds.intValue();
if (holds > 1) {
lHolds.decrement();
return;
} else if (holds < 0) {
// write lock was acquired before, do nothing
return;
}
distributedCounter.decrement();
lHolds.decrement();
assert lHolds.intValue() == 0;
}
public void acquireWriteLock() {
final OModifiableInteger lHolds = lockHolds.get();
if (lHolds.intValue() < 0) {
lHolds.decrement();
return;
}
final WNode node = myNode.get();
node.locked = true;
final WNode pNode = tail.getAndSet(myNode.get());
while (pNode.locked) {
pNode.waitingWriter = Thread.currentThread();
if (pNode.locked) LockSupport.park(this);
}
pNode.waitingWriter = null;
while (distributedCounter.sum() != 0) {
Thread.yield();
}
setExclusiveOwnerThread(Thread.currentThread());
lHolds.decrement();
assert lHolds.intValue() == -1;
}
public void releaseWriteLock() {
final OModifiableInteger lHolds = lockHolds.get();
if (lHolds.intValue() < -1) {
lHolds.increment();
return;
}
setExclusiveOwnerThread(null);
final WNode node = myNode.get();
myNode.set(new WNode());
node.locked = false;
final Thread waitingWriter = node.waitingWriter;
if (waitingWriter != null) LockSupport.unpark(waitingWriter);
while (!node.waitingReaders.isEmpty()) {
final Set<Thread> readers = node.waitingReaders.keySet();
final Iterator<Thread> threadIterator = readers.iterator();
while (threadIterator.hasNext()) {
final Thread reader = threadIterator.next();
threadIterator.remove();
LockSupport.unpark(reader);
}
}
lHolds.increment();
assert lHolds.intValue() == 0;
}
private static final class InitWNode extends ThreadLocal<WNode> {
@Override
protected WNode initialValue() {
return new WNode();
}
}
private static final class InitOModifiableInteger extends ThreadLocal<OModifiableInteger> {
@Override
protected OModifiableInteger initialValue() {
return new OModifiableInteger();
}
}
private static final class WNode {
private final ConcurrentHashMap<Thread, Boolean> waitingReaders =
new ConcurrentHashMap<Thread, Boolean>();
private volatile boolean locked = true;
private volatile Thread waitingWriter;
}
}
| 2,495 |
665 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.core.metamodel.valuesemantics;
import javax.inject.Named;
import org.springframework.stereotype.Component;
import org.apache.isis.applib.adapters.ValueSemanticsAbstract;
import org.apache.isis.applib.adapters.DefaultsProvider;
import org.apache.isis.applib.adapters.EncoderDecoder;
import org.apache.isis.applib.adapters.Parser;
import org.apache.isis.applib.adapters.Renderer;
import org.apache.isis.schema.common.v2.ValueType;
import lombok.val;
/**
* due to auto-boxing also handles the primitive variant
*/
@Component
@Named("isis.val.DoubleValueSemantics")
public class DoubleValueSemantics
extends ValueSemanticsAbstract<Double>
implements
DefaultsProvider<Double>,
EncoderDecoder<Double>,
Parser<Double>,
Renderer<Double> {
@Override
public Class<Double> getCorrespondingClass() {
return Double.class;
}
@Override
public ValueType getSchemaValueType() {
return ValueType.DOUBLE;
}
@Override
public Double getDefaultValue() {
return 0.;
}
// -- ENCODER DECODER
@Override
public String toEncodedString(final Double object) {
return object.toString();
}
@Override
public Double fromEncodedString(final String data) {
return Double.valueOf(data);
}
// -- RENDERER
@Override
public String simpleTextRepresentation(final Context context, final Double value) {
return render(value, getNumberFormat(context)::format);
}
// -- PARSER
@Override
public String parseableTextRepresentation(final Context context, final Double value) {
return value==null
? null
: getNumberFormat(context)
.format(value);
}
@Override
public Double parseTextRepresentation(final Context context, final String text) {
//TODO at least overflow should be detected
val bigDec = super.parseDecimal(context, text);
return bigDec!=null
? bigDec.doubleValue() // simply ignoring loss of precision or overflow
: null;
}
@Override
public int typicalLength() {
//TODO research - legacy value, what motivates this number?
return 10;
}
@Override
public int maxLength() {
//TODO research - legacy value, what motivates this number?
return 25;
}
}
| 1,123 |
2,959 | from unittest.case import TestCase
from unittest.mock import patch
from click import ClickException
from samcli.lib.utils import configuration
class TestConfiguration(TestCase):
def test_config_is_read(self):
self.assertIsInstance(configuration.config, dict)
self.assertIn("app_template_repo_commit", configuration.config)
@patch("samcli.lib.utils.configuration.config")
def test_get_app_template_repo_commit_return_correct_value(self, config_mock):
config_mock.get.return_value = "some_commit_hash"
commit_hash = configuration.get_app_template_repo_commit()
config_mock.get.assert_called_once_with("app_template_repo_commit", None)
self.assertEqual(commit_hash, "some_commit_hash")
@patch("samcli.lib.utils.configuration.config")
def test_get_app_template_repo_commit_error(self, config_mock):
config_mock.get.return_value = None
with self.assertRaises(ClickException):
configuration.get_app_template_repo_commit()
config_mock.get.assert_called_once_with("app_template_repo_commit", None)
| 415 |
361 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alipay.sofa.registry.server.session.listener;
/**
*
* @author shangyu.wh
* @version $Id: PushTaskSender.java, v 0.1 2018-12-18 19:31 shangyu.wh Exp $
*/
import com.alipay.sofa.registry.task.listener.TaskEvent;
/**
*
* @author shangyu.wh
* @version $Id: PushTask.java, v 0.1 2018-12-11 14:22 shangyu.wh Exp $
*/
public interface PushTaskSender {
enum PushDataType {
RECEIVE_DATA, USER_ELEMENT, USER_ELEMENT_MULTI
}
void executePushAsync(TaskEvent event);
PushDataType getPushDataType();
} | 411 |
625 | package java.nio.internal;
public class MemoryBlock {
}
| 18 |
357 | /*
* Copyright © 2018 VMware, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an “AS IS” BASIS, without
* warranties or conditions of any kind, EITHER EXPRESS OR IMPLIED. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
#include "includes.h"
DWORD
VmDirSortedLinkedListCreate(
PFN_SORTED_LINKEDLIST_INSERT_COMPARE pCompareFunc,
PVDIR_SORTED_LINKED_LIST* ppSortedLinkedList
)
{
DWORD dwError = 0;
PVDIR_SORTED_LINKED_LIST pSortedLinkedList = NULL;
if (!pCompareFunc || !ppSortedLinkedList)
{
BAIL_WITH_VMDIR_ERROR(dwError, VMDIR_ERROR_INVALID_PARAMETER);
}
dwError = VmDirAllocateMemory(sizeof(VDIR_SORTED_LINKED_LIST), (PVOID*)&pSortedLinkedList);
BAIL_ON_VMDIR_ERROR(dwError);
dwError = VmDirLinkedListCreate(&pSortedLinkedList->pList);
BAIL_ON_VMDIR_ERROR(dwError);
pSortedLinkedList->pCompareFunc = pCompareFunc;
*ppSortedLinkedList = pSortedLinkedList;
cleanup:
return dwError;
error:
VmDirFreeSortedLinkedList(pSortedLinkedList);
VMDIR_LOG_ERROR(VMDIR_LOG_MASK_ALL, "failed, error (%d)", dwError);
goto cleanup;
}
VOID
VmDirFreeSortedLinkedList(
PVDIR_SORTED_LINKED_LIST pSortedList
)
{
if (pSortedList)
{
VmDirFreeLinkedList(pSortedList->pList);
VMDIR_SAFE_FREE_MEMORY(pSortedList);
}
}
DWORD
VmDirSortedLinkedListInsert(
PVDIR_SORTED_LINKED_LIST pSortedList,
PVOID pElement
)
{
DWORD dwError = 0;
PVDIR_LINKED_LIST pList = NULL;
PVDIR_LINKED_LIST_NODE pCurrNode = NULL;
PVDIR_LINKED_LIST_NODE pNewNode = NULL;
if (!pSortedList || !pElement)
{
BAIL_WITH_VMDIR_ERROR(dwError, VMDIR_ERROR_INVALID_PARAMETER);
}
pList = pSortedList->pList;
if (pList->pHead == NULL || pSortedList->pCompareFunc(pElement, pList->pHead->pElement))
{
dwError = VmDirLinkedListInsertHead(pList, pElement, NULL);
BAIL_ON_VMDIR_ERROR(dwError);
}
else
{
pCurrNode = pList->pHead;
while (pCurrNode->pNext && pSortedList->pCompareFunc(pCurrNode->pNext->pElement, pElement))
{
pCurrNode = pCurrNode->pNext;
}
if (pCurrNode->pNext == NULL)
{
dwError = VmDirLinkedListInsertTail(pList, pElement, NULL);
BAIL_ON_VMDIR_ERROR(dwError);
}
else
{
dwError = VmDirAllocateMemory(sizeof(VDIR_LINKED_LIST_NODE), (PVOID*)&pNewNode);
BAIL_ON_VMDIR_ERROR(dwError);
pNewNode->pElement = pElement;
//Manipulate pNext and pPrev
pNewNode->pNext = pCurrNode->pNext;
pNewNode->pPrev = pCurrNode;
pNewNode->pNext->pPrev = pNewNode;
pCurrNode->pNext = pNewNode;
}
}
cleanup:
return dwError;
error:
VMDIR_SAFE_FREE_MEMORY(pNewNode);
VMDIR_LOG_ERROR(VMDIR_LOG_MASK_ALL, "failed, error (%d)", dwError);
goto cleanup;
}
| 1,637 |
1,350 | <reponame>ppartarr/azure-sdk-for-java
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.cognitiveservices.vision.faceapi.models;
import org.joda.time.DateTime;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* Training status object.
*/
public class TrainingStatus {
/**
* Training status: notstarted, running, succeeded, failed. If the training
* process is waiting to perform, the status is notstarted. If the training
* is ongoing, the status is running. Status succeed means this person
* group is ready for Face - Identify. Status failed is often caused by no
* person or no persisted face exist in the person group. Possible values
* include: 'nonstarted', 'running', 'succeeded', 'failed'.
*/
@JsonProperty(value = "status", required = true)
private TrainingStatusType status;
/**
* A combined UTC date and time string that describes person group created
* time.
*/
@JsonProperty(value = "createdDateTime", required = true)
private DateTime created;
/**
* Person group last modify time in the UTC, could be null value when the
* person group is not successfully trained.
*/
@JsonProperty(value = "lastActionDateTime")
private DateTime lastAction;
/**
* Show failure message when training failed (omitted when training
* succeed).
*/
@JsonProperty(value = "message")
private String message;
/**
* Get the status value.
*
* @return the status value
*/
public TrainingStatusType status() {
return this.status;
}
/**
* Set the status value.
*
* @param status the status value to set
* @return the TrainingStatus object itself.
*/
public TrainingStatus withStatus(TrainingStatusType status) {
this.status = status;
return this;
}
/**
* Get the created value.
*
* @return the created value
*/
public DateTime created() {
return this.created;
}
/**
* Set the created value.
*
* @param created the created value to set
* @return the TrainingStatus object itself.
*/
public TrainingStatus withCreated(DateTime created) {
this.created = created;
return this;
}
/**
* Get the lastAction value.
*
* @return the lastAction value
*/
public DateTime lastAction() {
return this.lastAction;
}
/**
* Set the lastAction value.
*
* @param lastAction the lastAction value to set
* @return the TrainingStatus object itself.
*/
public TrainingStatus withLastAction(DateTime lastAction) {
this.lastAction = lastAction;
return this;
}
/**
* Get the message value.
*
* @return the message value
*/
public String message() {
return this.message;
}
/**
* Set the message value.
*
* @param message the message value to set
* @return the TrainingStatus object itself.
*/
public TrainingStatus withMessage(String message) {
this.message = message;
return this;
}
}
| 1,192 |
340 | <filename>ccron/test/include/ticks_generator_mocks.hpp<gh_stars>100-1000
// Concord
//
// Copyright (c) 2021 VMware, Inc. All Rights Reserved.
//
// This product is licensed to you under the Apache 2.0 license (the
// "License"). You may not use this product except in compliance with the
// Apache 2.0 License.
//
// This product may include a number of subcomponents with separate copyright
// notices and license terms. Your use of these subcomponents is subject to the
// terms and conditions of the subcomponent's license, as noted in the LICENSE
// file.
#pragma once
#include "ccron/ticks_generator.hpp"
#include "IncomingMsgsStorage.hpp"
#include "InternalBFTClient.hpp"
#include "IPendingRequest.hpp"
#include <iterator>
#include <memory>
#include <string>
#include <vector>
namespace concord::cron::test {
struct IncomingMsgsStorageMock : public bftEngine::impl::IncomingMsgsStorage {
void start() override{};
void stop() override{};
bool isRunning() const override { return true; };
bool pushExternalMsg(std::unique_ptr<MessageBase> msg) override { return true; }
bool pushExternalMsg(std::unique_ptr<MessageBase> msg, Callback onMsgPopped) override { return true; }
bool pushExternalMsgRaw(char* msg, size_t size) override { return true; }
bool pushExternalMsgRaw(char* msg, size_t size, Callback onMsgPopped) override { return true; }
void pushInternalMsg(InternalMessage&& msg) override { internal_msgs_.emplace_back(std::move(msg)); }
std::vector<InternalMessage> internal_msgs_;
};
struct InternalBFTClientMock : public IInternalBFTClient {
NodeIdType getClientId() const override { return 42; }
uint64_t sendRequest(uint64_t flags, uint32_t size, const char* request, const std::string& cid) override {
return sendRequest(flags, size, request, cid, std::function<void()>{});
}
// Returns the sent client request sequence number.
uint64_t sendRequest(
uint64_t flags, uint32_t size, const char* request, const std::string& cid, std::function<void()>) override {
auto seq_num = 0;
if (!requests_.empty()) {
seq_num = std::prev(requests_.cend())->first + 1;
}
auto* p = reinterpret_cast<const uint8_t*>(request);
requests_[seq_num] = Request{flags, std::vector<uint8_t>{p, p + size}, cid};
return seq_num;
}
uint32_t numOfConnectedReplicas(uint32_t clusterSize) override { return clusterSize; }
bool isReplicaConnected(uint16_t repId) const override { return true; }
bool isUdp() const override { return false; }
struct Request {
uint64_t flags{0};
std::vector<uint8_t> contents;
std::string cid;
bool operator==(const Request& r) const { return (flags == r.flags && contents == r.contents && cid == r.cid); }
};
// sequence number -> request
std::map<uint64_t, Request> requests_;
};
struct PendingRequestMock : public IPendingRequest {
struct PendingRequest {
NodeIdType client_id_{0};
ReqId req_seq_num_{0};
bool operator==(const PendingRequest& r) const {
return (client_id_ == r.client_id_ && req_seq_num_ == r.req_seq_num_);
}
};
bool isPending(NodeIdType client_id, ReqId req_seq_num) const override {
for (const auto& r : pending_) {
if (r == PendingRequest{client_id, req_seq_num}) {
return true;
}
}
return false;
}
void addPending(NodeIdType client_id, ReqId req_seq_num) {
pending_.push_back(PendingRequest{client_id, req_seq_num});
}
void removePending(NodeIdType client_id, ReqId req_seq_num) {
pending_.erase(std::remove(pending_.begin(), pending_.end(), PendingRequest{client_id, req_seq_num}),
pending_.end());
}
std::vector<PendingRequest> pending_;
};
struct TicksGeneratorForTest : public TicksGenerator {
static std::shared_ptr<TicksGeneratorForTest> create(
const std::shared_ptr<bftEngine::impl::IInternalBFTClient>& bft_client,
const IPendingRequest& pending_req,
const std::shared_ptr<IncomingMsgsStorage>& msgs_storage) {
return std::shared_ptr<TicksGeneratorForTest>{new TicksGeneratorForTest{bft_client, pending_req, msgs_storage}};
}
void evaluateTimers(const std::chrono::steady_clock::time_point& now) { TicksGenerator::evaluateTimers(now); }
private:
TicksGeneratorForTest(const std::shared_ptr<bftEngine::impl::IInternalBFTClient>& bft_client,
const IPendingRequest& pending_req,
const std::shared_ptr<IncomingMsgsStorage>& msgs_storage)
: TicksGenerator{bft_client, pending_req, msgs_storage, TicksGenerator::DoNotStartThread{}} {}
};
} // namespace concord::cron::test
| 1,626 |
634 | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.formatting;
import com.intellij.openapi.util.InvalidDataException;
import com.intellij.psi.codeStyle.CommonCodeStyleSettings;
public enum ForceBraces {
Never(CommonCodeStyleSettings.DO_NOT_FORCE),
IfMultiline(CommonCodeStyleSettings.FORCE_BRACES_IF_MULTILINE),
Always(CommonCodeStyleSettings.FORCE_BRACES_ALWAYS);
private final int myIntValue;
ForceBraces(int value) {
myIntValue = value;
}
public int intValue() {
return myIntValue;
}
public static ForceBraces fromInt(int value) {
for (ForceBraces mode : values()) {
if (mode.intValue() == value) {
return mode;
}
}
throw new InvalidDataException("Unknown brace style integer value " + value);
}
}
| 300 |
777 | // Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/extensions/api/easy_unlock_private/easy_unlock_private_api.h"
#include <memory>
#include <string>
#include <utility>
#include "base/bind.h"
#include "base/command_line.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
#include "base/threading/thread_task_runner_handle.h"
#include "base/values.h"
#include "chrome/browser/extensions/extension_api_unittest.h"
#include "chrome/browser/extensions/extension_function_test_utils.h"
#include "chrome/browser/extensions/extension_system_factory.h"
#include "chrome/browser/extensions/test_extension_prefs.h"
#include "chrome/browser/extensions/test_extension_system.h"
#include "chrome/browser/profiles/profile.h"
#include "chrome/browser/signin/easy_unlock_app_manager.h"
#include "chrome/browser/signin/easy_unlock_service_factory.h"
#include "chrome/browser/signin/easy_unlock_service_regular.h"
#include "chrome/common/extensions/api/easy_unlock_private.h"
#include "chrome/common/extensions/extension_constants.h"
#include "chromeos/dbus/dbus_thread_manager.h"
#include "chromeos/dbus/fake_easy_unlock_client.h"
#include "components/cryptauth/proto/cryptauth_api.pb.h"
#include "components/proximity_auth/switches.h"
#include "device/bluetooth/dbus/bluez_dbus_manager.h"
#include "extensions/browser/api_test_utils.h"
#include "extensions/browser/event_router.h"
#include "extensions/browser/event_router_factory.h"
#include "extensions/browser/extension_prefs.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/cros_system_api/dbus/service_constants.h"
namespace {
namespace api = extensions::api::easy_unlock_private;
using extensions::EasyUnlockPrivateGenerateEcP256KeyPairFunction;
using extensions::EasyUnlockPrivatePerformECDHKeyAgreementFunction;
using extensions::EasyUnlockPrivateCreateSecureMessageFunction;
using extensions::EasyUnlockPrivateUnwrapSecureMessageFunction;
using extensions::EasyUnlockPrivateSetAutoPairingResultFunction;
class TestableGetRemoteDevicesFunction
: public extensions::EasyUnlockPrivateGetRemoteDevicesFunction {
public:
TestableGetRemoteDevicesFunction() {}
// EasyUnlockPrivateGetRemoteDevicesFunction:
std::string GetUserPrivateKey() override { return "user private key"; }
std::vector<cryptauth::ExternalDeviceInfo> GetUnlockKeys() override {
cryptauth::ExternalDeviceInfo unlock_key1;
unlock_key1.set_friendly_device_name("test phone 1");
unlock_key1.set_public_key("public key 1");
unlock_key1.set_bluetooth_address("<KEY>");
unlock_key1.set_unlock_key(true);
cryptauth::ExternalDeviceInfo unlock_key2;
unlock_key2.set_friendly_device_name("test phone 2");
unlock_key2.set_public_key("public key 2");
unlock_key2.set_bluetooth_address("FF:EE:DD:CC:BB:AA");
unlock_key2.set_unlock_key(true);
std::vector<cryptauth::ExternalDeviceInfo> unlock_keys;
unlock_keys.push_back(unlock_key1);
unlock_keys.push_back(unlock_key2);
return unlock_keys;
}
private:
~TestableGetRemoteDevicesFunction() override {}
DISALLOW_COPY_AND_ASSIGN(TestableGetRemoteDevicesFunction);
};
class TestableGetPermitAccessFunction
: public extensions::EasyUnlockPrivateGetPermitAccessFunction {
public:
TestableGetPermitAccessFunction() {}
// EasyUnlockPrivateGetPermitAccessFunction:
void GetKeyPairForExperiment(std::string* user_public_key,
std::string* user_private_key) override {
*user_public_key = "user public key";
*user_private_key = "user private key";
}
private:
~TestableGetPermitAccessFunction() override {}
DISALLOW_COPY_AND_ASSIGN(TestableGetPermitAccessFunction);
};
// Converts a string to a base::BinaryValue value whose buffer contains the
// string data without the trailing '\0'.
std::unique_ptr<base::BinaryValue> StringToBinaryValue(
const std::string& value) {
return base::BinaryValue::CreateWithCopiedBuffer(value.data(),
value.length());
}
// Copies |private_key_source| and |public_key_source| to |private_key_target|
// and |public_key_target|. It is used as a callback for
// |EasyUnlockClient::GenerateEcP256KeyPair| to save the values returned by the
// method.
void CopyKeyPair(std::string* private_key_target,
std::string* public_key_target,
const std::string& private_key_source,
const std::string& public_key_source) {
*private_key_target = private_key_source;
*public_key_target = public_key_source;
}
// Copies |data_source| to |data_target|. Used as a callback to EasyUnlockClient
// methods to save the data returned by the method.
void CopyData(std::string* data_target, const std::string& data_source) {
*data_target = data_source;
}
class EasyUnlockPrivateApiTest : public extensions::ExtensionApiUnittest {
public:
EasyUnlockPrivateApiTest() {}
~EasyUnlockPrivateApiTest() override {}
protected:
void SetUp() override {
chromeos::DBusThreadManager::Initialize();
bluez::BluezDBusManager::Initialize(
chromeos::DBusThreadManager::Get()->GetSystemBus(),
chromeos::DBusThreadManager::Get()->IsUsingFakes());
client_ = chromeos::DBusThreadManager::Get()->GetEasyUnlockClient();
extensions::ExtensionApiUnittest::SetUp();
}
void TearDown() override {
extensions::ExtensionApiUnittest::TearDown();
bluez::BluezDBusManager::Shutdown();
chromeos::DBusThreadManager::Shutdown();
}
// Extracts a single binary value result from a run extension function
// |function| and converts it to string.
// It will fail if the extension doesn't have exactly one binary value result.
// On failure, an empty string is returned.
std::string GetSingleBinaryResultAsString(
UIThreadExtensionFunction* function) {
const base::ListValue* result_list = function->GetResultList();
if (!result_list) {
LOG(ERROR) << "Function has no result list.";
return "";
}
if (result_list->GetSize() != 1u) {
LOG(ERROR) << "Invalid number of results.";
return "";
}
const base::BinaryValue* result_binary_value;
if (!result_list->GetBinary(0, &result_binary_value) ||
!result_binary_value) {
LOG(ERROR) << "Result not a binary value.";
return "";
}
return std::string(result_binary_value->GetBuffer(),
result_binary_value->GetSize());
}
chromeos::EasyUnlockClient* client_;
};
TEST_F(EasyUnlockPrivateApiTest, GenerateEcP256KeyPair) {
scoped_refptr<EasyUnlockPrivateGenerateEcP256KeyPairFunction> function(
new EasyUnlockPrivateGenerateEcP256KeyPairFunction());
function->set_has_callback(true);
ASSERT_TRUE(extension_function_test_utils::RunFunction(
function.get(),
"[]",
browser(),
extension_function_test_utils::NONE));
const base::ListValue* result_list = function->GetResultList();
ASSERT_TRUE(result_list);
ASSERT_EQ(2u, result_list->GetSize());
const base::BinaryValue* public_key;
ASSERT_TRUE(result_list->GetBinary(0, &public_key));
ASSERT_TRUE(public_key);
const base::BinaryValue* private_key;
ASSERT_TRUE(result_list->GetBinary(1, &private_key));
ASSERT_TRUE(private_key);
EXPECT_TRUE(chromeos::FakeEasyUnlockClient::IsEcP256KeyPair(
std::string(private_key->GetBuffer(), private_key->GetSize()),
std::string(public_key->GetBuffer(), public_key->GetSize())));
}
TEST_F(EasyUnlockPrivateApiTest, PerformECDHKeyAgreement) {
scoped_refptr<EasyUnlockPrivatePerformECDHKeyAgreementFunction> function(
new EasyUnlockPrivatePerformECDHKeyAgreementFunction());
function->set_has_callback(true);
std::string private_key_1;
std::string public_key_1_unused;
client_->GenerateEcP256KeyPair(
base::Bind(&CopyKeyPair, &private_key_1, &public_key_1_unused));
std::string private_key_2_unused;
std::string public_key_2;
client_->GenerateEcP256KeyPair(
base::Bind(&CopyKeyPair, &private_key_2_unused, &public_key_2));
std::string expected_result;
client_->PerformECDHKeyAgreement(
private_key_1,
public_key_2,
base::Bind(&CopyData, &expected_result));
ASSERT_GT(expected_result.length(), 0u);
std::unique_ptr<base::ListValue> args(new base::ListValue);
args->Append(StringToBinaryValue(private_key_1));
args->Append(StringToBinaryValue(public_key_2));
ASSERT_TRUE(extension_function_test_utils::RunFunction(
function.get(), std::move(args), browser(),
extension_function_test_utils::NONE));
EXPECT_EQ(expected_result, GetSingleBinaryResultAsString(function.get()));
}
TEST_F(EasyUnlockPrivateApiTest, CreateSecureMessage) {
scoped_refptr<EasyUnlockPrivateCreateSecureMessageFunction> function(
new EasyUnlockPrivateCreateSecureMessageFunction());
function->set_has_callback(true);
chromeos::EasyUnlockClient::CreateSecureMessageOptions create_options;
create_options.key = "KEY";
create_options.associated_data = "ASSOCIATED_DATA";
create_options.public_metadata = "PUBLIC_METADATA";
create_options.verification_key_id = "VERIFICATION_KEY_ID";
create_options.decryption_key_id = "DECRYPTION_KEY_ID";
create_options.encryption_type = easy_unlock::kEncryptionTypeAES256CBC;
create_options.signature_type = easy_unlock::kSignatureTypeHMACSHA256;
std::string expected_result;
client_->CreateSecureMessage(
"PAYLOAD",
create_options,
base::Bind(&CopyData, &expected_result));
ASSERT_GT(expected_result.length(), 0u);
std::unique_ptr<base::ListValue> args(new base::ListValue);
args->Append(StringToBinaryValue("PAYLOAD"));
args->Append(StringToBinaryValue("KEY"));
auto options = base::MakeUnique<base::DictionaryValue>();
options->Set("associatedData", StringToBinaryValue("ASSOCIATED_DATA"));
options->Set("publicMetadata", StringToBinaryValue("PUBLIC_METADATA"));
options->Set("verificationKeyId",
StringToBinaryValue("VERIFICATION_KEY_ID"));
options->Set("decryptionKeyId",
StringToBinaryValue("DECRYPTION_KEY_ID"));
options->SetString(
"encryptType",
api::ToString(api::ENCRYPTION_TYPE_AES_256_CBC));
options->SetString(
"signType",
api::ToString(api::SIGNATURE_TYPE_HMAC_SHA256));
args->Append(std::move(options));
ASSERT_TRUE(extension_function_test_utils::RunFunction(
function.get(), std::move(args), browser(),
extension_function_test_utils::NONE));
EXPECT_EQ(expected_result, GetSingleBinaryResultAsString(function.get()));
}
TEST_F(EasyUnlockPrivateApiTest, CreateSecureMessage_EmptyOptions) {
scoped_refptr<EasyUnlockPrivateCreateSecureMessageFunction> function(
new EasyUnlockPrivateCreateSecureMessageFunction());
function->set_has_callback(true);
chromeos::EasyUnlockClient::CreateSecureMessageOptions create_options;
create_options.key = "KEY";
create_options.encryption_type = easy_unlock::kEncryptionTypeNone;
create_options.signature_type = easy_unlock::kSignatureTypeHMACSHA256;
std::string expected_result;
client_->CreateSecureMessage(
"PAYLOAD",
create_options,
base::Bind(&CopyData, &expected_result));
ASSERT_GT(expected_result.length(), 0u);
std::unique_ptr<base::ListValue> args(new base::ListValue);
args->Append(StringToBinaryValue("PAYLOAD"));
args->Append(StringToBinaryValue("KEY"));
auto options = base::MakeUnique<base::DictionaryValue>();
args->Append(std::move(options));
ASSERT_TRUE(extension_function_test_utils::RunFunction(
function.get(), std::move(args), browser(),
extension_function_test_utils::NONE));
EXPECT_EQ(expected_result, GetSingleBinaryResultAsString(function.get()));
}
TEST_F(EasyUnlockPrivateApiTest, CreateSecureMessage_AsymmetricSign) {
scoped_refptr<EasyUnlockPrivateCreateSecureMessageFunction> function(
new EasyUnlockPrivateCreateSecureMessageFunction());
function->set_has_callback(true);
chromeos::EasyUnlockClient::CreateSecureMessageOptions create_options;
create_options.key = "KEY";
create_options.associated_data = "ASSOCIATED_DATA";
create_options.verification_key_id = "VERIFICATION_KEY_ID";
create_options.encryption_type = easy_unlock::kEncryptionTypeNone;
create_options.signature_type = easy_unlock::kSignatureTypeECDSAP256SHA256;
std::string expected_result;
client_->CreateSecureMessage(
"PAYLOAD",
create_options,
base::Bind(&CopyData, &expected_result));
ASSERT_GT(expected_result.length(), 0u);
std::unique_ptr<base::ListValue> args(new base::ListValue);
args->Append(StringToBinaryValue("PAYLOAD"));
args->Append(StringToBinaryValue("KEY"));
auto options = base::MakeUnique<base::DictionaryValue>();
options->Set("associatedData",
StringToBinaryValue("ASSOCIATED_DATA"));
options->Set("verificationKeyId",
StringToBinaryValue("VERIFICATION_KEY_ID"));
options->SetString(
"signType",
api::ToString(api::SIGNATURE_TYPE_ECDSA_P256_SHA256));
args->Append(std::move(options));
ASSERT_TRUE(extension_function_test_utils::RunFunction(
function.get(), std::move(args), browser(),
extension_function_test_utils::NONE));
EXPECT_EQ(expected_result, GetSingleBinaryResultAsString(function.get()));
}
TEST_F(EasyUnlockPrivateApiTest, UnwrapSecureMessage) {
scoped_refptr<EasyUnlockPrivateUnwrapSecureMessageFunction> function(
new EasyUnlockPrivateUnwrapSecureMessageFunction());
function->set_has_callback(true);
chromeos::EasyUnlockClient::UnwrapSecureMessageOptions unwrap_options;
unwrap_options.key = "KEY";
unwrap_options.associated_data = "ASSOCIATED_DATA";
unwrap_options.encryption_type = easy_unlock::kEncryptionTypeAES256CBC;
unwrap_options.signature_type = easy_unlock::kSignatureTypeHMACSHA256;
std::string expected_result;
client_->UnwrapSecureMessage(
"MESSAGE",
unwrap_options,
base::Bind(&CopyData, &expected_result));
ASSERT_GT(expected_result.length(), 0u);
std::unique_ptr<base::ListValue> args(new base::ListValue);
args->Append(StringToBinaryValue("MESSAGE"));
args->Append(StringToBinaryValue("KEY"));
auto options = base::MakeUnique<base::DictionaryValue>();
options->Set("associatedData", StringToBinaryValue("ASSOCIATED_DATA"));
options->SetString(
"encryptType",
api::ToString(api::ENCRYPTION_TYPE_AES_256_CBC));
options->SetString(
"signType",
api::ToString(api::SIGNATURE_TYPE_HMAC_SHA256));
args->Append(std::move(options));
ASSERT_TRUE(extension_function_test_utils::RunFunction(
function.get(), std::move(args), browser(),
extension_function_test_utils::NONE));
EXPECT_EQ(expected_result, GetSingleBinaryResultAsString(function.get()));
}
TEST_F(EasyUnlockPrivateApiTest, UnwrapSecureMessage_EmptyOptions) {
scoped_refptr<EasyUnlockPrivateUnwrapSecureMessageFunction> function(
new EasyUnlockPrivateUnwrapSecureMessageFunction());
function->set_has_callback(true);
chromeos::EasyUnlockClient::UnwrapSecureMessageOptions unwrap_options;
unwrap_options.key = "KEY";
unwrap_options.encryption_type = easy_unlock::kEncryptionTypeNone;
unwrap_options.signature_type = easy_unlock::kSignatureTypeHMACSHA256;
std::string expected_result;
client_->UnwrapSecureMessage(
"MESSAGE",
unwrap_options,
base::Bind(&CopyData, &expected_result));
ASSERT_GT(expected_result.length(), 0u);
std::unique_ptr<base::ListValue> args(new base::ListValue);
args->Append(StringToBinaryValue("MESSAGE"));
args->Append(StringToBinaryValue("KEY"));
auto options = base::MakeUnique<base::DictionaryValue>();
args->Append(std::move(options));
ASSERT_TRUE(extension_function_test_utils::RunFunction(
function.get(), std::move(args), browser(),
extension_function_test_utils::NONE));
EXPECT_EQ(expected_result, GetSingleBinaryResultAsString(function.get()));
}
TEST_F(EasyUnlockPrivateApiTest, UnwrapSecureMessage_AsymmetricSign) {
scoped_refptr<EasyUnlockPrivateUnwrapSecureMessageFunction> function(
new EasyUnlockPrivateUnwrapSecureMessageFunction());
function->set_has_callback(true);
chromeos::EasyUnlockClient::UnwrapSecureMessageOptions unwrap_options;
unwrap_options.key = "KEY";
unwrap_options.associated_data = "ASSOCIATED_DATA";
unwrap_options.encryption_type = easy_unlock::kEncryptionTypeNone;
unwrap_options.signature_type = easy_unlock::kSignatureTypeECDSAP256SHA256;
std::string expected_result;
client_->UnwrapSecureMessage(
"MESSAGE",
unwrap_options,
base::Bind(&CopyData, &expected_result));
ASSERT_GT(expected_result.length(), 0u);
std::unique_ptr<base::ListValue> args(new base::ListValue);
args->Append(StringToBinaryValue("MESSAGE"));
args->Append(StringToBinaryValue("KEY"));
auto options = base::MakeUnique<base::DictionaryValue>();
options->Set("associatedData",
StringToBinaryValue("ASSOCIATED_DATA"));
options->SetString(
"signType",
api::ToString(api::SIGNATURE_TYPE_ECDSA_P256_SHA256));
args->Append(std::move(options));
ASSERT_TRUE(extension_function_test_utils::RunFunction(
function.get(), std::move(args), browser(),
extension_function_test_utils::NONE));
EXPECT_EQ(expected_result, GetSingleBinaryResultAsString(function.get()));
}
struct AutoPairingResult {
AutoPairingResult() : success(false) {}
void SetResult(bool success, const std::string& error) {
this->success = success;
this->error = error;
}
bool success;
std::string error;
};
// Test factory to register EasyUnlockService.
std::unique_ptr<KeyedService> BuildTestEasyUnlockService(
content::BrowserContext* context) {
std::unique_ptr<EasyUnlockServiceRegular> service(
new EasyUnlockServiceRegular(Profile::FromBrowserContext(context)));
service->Initialize(
EasyUnlockAppManager::Create(extensions::ExtensionSystem::Get(context),
-1 /* manifest id */, base::FilePath()));
return std::move(service);
}
// A fake EventRouter that logs event it dispatches for testing.
class FakeEventRouter : public extensions::EventRouter {
public:
FakeEventRouter(
Profile* profile,
std::unique_ptr<extensions::TestExtensionPrefs> extension_prefs)
: EventRouter(profile, extension_prefs->prefs()),
extension_prefs_(std::move(extension_prefs)),
event_count_(0) {}
void DispatchEventToExtension(
const std::string& extension_id,
std::unique_ptr<extensions::Event> event) override {
++event_count_;
last_extension_id_ = extension_id;
last_event_name_ = event ? event->event_name : std::string();
}
int event_count() const { return event_count_; }
const std::string& last_extension_id() const { return last_extension_id_; }
const std::string& last_event_name() const { return last_event_name_; }
private:
std::unique_ptr<extensions::TestExtensionPrefs> extension_prefs_;
int event_count_;
std::string last_extension_id_;
std::string last_event_name_;
};
// FakeEventRouter factory function
std::unique_ptr<KeyedService> FakeEventRouterFactoryFunction(
content::BrowserContext* profile) {
std::unique_ptr<extensions::TestExtensionPrefs> extension_prefs(
new extensions::TestExtensionPrefs(base::ThreadTaskRunnerHandle::Get()));
return base::MakeUnique<FakeEventRouter>(static_cast<Profile*>(profile),
std::move(extension_prefs));
}
TEST_F(EasyUnlockPrivateApiTest, AutoPairing) {
FakeEventRouter* event_router = static_cast<FakeEventRouter*>(
extensions::EventRouterFactory::GetInstance()->SetTestingFactoryAndUse(
profile(), &FakeEventRouterFactoryFunction));
EasyUnlockServiceFactory::GetInstance()->SetTestingFactoryAndUse(
profile(), &BuildTestEasyUnlockService);
AutoPairingResult result;
// Dispatch OnStartAutoPairing event on EasyUnlockService::StartAutoPairing.
EasyUnlockService* service = EasyUnlockService::Get(profile());
service->StartAutoPairing(base::Bind(&AutoPairingResult::SetResult,
base::Unretained(&result)));
EXPECT_EQ(1, event_router->event_count());
EXPECT_EQ(extension_misc::kEasyUnlockAppId,
event_router->last_extension_id());
EXPECT_EQ(
extensions::api::easy_unlock_private::OnStartAutoPairing::kEventName,
event_router->last_event_name());
// Test SetAutoPairingResult call with failure.
scoped_refptr<EasyUnlockPrivateSetAutoPairingResultFunction> function(
new EasyUnlockPrivateSetAutoPairingResultFunction());
ASSERT_TRUE(extension_function_test_utils::RunFunction(
function.get(),
"[{\"success\":false, \"errorMessage\":\"fake_error\"}]",
browser(),
extension_function_test_utils::NONE));
EXPECT_FALSE(result.success);
EXPECT_EQ("fake_error", result.error);
// Test SetAutoPairingResult call with success.
service->StartAutoPairing(base::Bind(&AutoPairingResult::SetResult,
base::Unretained(&result)));
function = new EasyUnlockPrivateSetAutoPairingResultFunction();
ASSERT_TRUE(extension_function_test_utils::RunFunction(
function.get(),
"[{\"success\":true}]",
browser(),
extension_function_test_utils::NONE));
EXPECT_TRUE(result.success);
EXPECT_TRUE(result.error.empty());
}
// Checks that the chrome.easyUnlockPrivate.getRemoteDevices API returns the
// natively synced devices if the kEnableBluetoothLowEnergyDiscovery switch is
// set.
TEST_F(EasyUnlockPrivateApiTest, GetRemoteDevicesExperimental) {
base::CommandLine::ForCurrentProcess()->AppendSwitch(
proximity_auth::switches::kEnableBluetoothLowEnergyDiscovery);
EasyUnlockServiceFactory::GetInstance()->SetTestingFactoryAndUse(
profile(), &BuildTestEasyUnlockService);
scoped_refptr<TestableGetRemoteDevicesFunction> function(
new TestableGetRemoteDevicesFunction());
std::unique_ptr<base::Value> value(
extensions::api_test_utils::RunFunctionAndReturnSingleResult(
function.get(), "[]", profile()));
ASSERT_TRUE(value.get());
ASSERT_EQ(base::Value::Type::LIST, value->GetType());
base::ListValue* list_value = static_cast<base::ListValue*>(value.get());
EXPECT_EQ(2u, list_value->GetSize());
base::Value* remote_device1;
base::Value* remote_device2;
ASSERT_TRUE(list_value->Get(0, &remote_device1));
ASSERT_TRUE(list_value->Get(1, &remote_device2));
EXPECT_EQ(base::Value::Type::DICTIONARY, remote_device1->GetType());
EXPECT_EQ(base::Value::Type::DICTIONARY, remote_device2->GetType());
std::string name1, name2;
EXPECT_TRUE(static_cast<base::DictionaryValue*>(remote_device1)
->GetString("name", &name1));
EXPECT_TRUE(static_cast<base::DictionaryValue*>(remote_device2)
->GetString("name", &name2));
EXPECT_EQ("test phone 1", name1);
EXPECT_EQ("test phone 2", name2);
}
// Checks that the chrome.easyUnlockPrivate.getRemoteDevices API returns the
// stored value if the kEnableBluetoothLowEnergyDiscovery switch is not set.
TEST_F(EasyUnlockPrivateApiTest, GetRemoteDevicesNonExperimental) {
EasyUnlockServiceFactory::GetInstance()->SetTestingFactoryAndUse(
profile(), &BuildTestEasyUnlockService);
scoped_refptr<TestableGetRemoteDevicesFunction> function(
new TestableGetRemoteDevicesFunction());
std::unique_ptr<base::Value> value(
extensions::api_test_utils::RunFunctionAndReturnSingleResult(
function.get(), "[]", profile()));
ASSERT_TRUE(value.get());
ASSERT_EQ(base::Value::Type::LIST, value->GetType());
base::ListValue* list_value = static_cast<base::ListValue*>(value.get());
EXPECT_EQ(0u, list_value->GetSize());
}
// Checks that the chrome.easyUnlockPrivate.getPermitAccess API returns the
// native permit access if the kEnableBluetoothLowEnergyDiscovery switch is
// set.
TEST_F(EasyUnlockPrivateApiTest, GetPermitAccessExperimental) {
base::CommandLine::ForCurrentProcess()->AppendSwitch(
proximity_auth::switches::kEnableBluetoothLowEnergyDiscovery);
EasyUnlockServiceFactory::GetInstance()->SetTestingFactoryAndUse(
profile(), &BuildTestEasyUnlockService);
scoped_refptr<TestableGetPermitAccessFunction> function(
new TestableGetPermitAccessFunction());
std::unique_ptr<base::Value> value(
extensions::api_test_utils::RunFunctionAndReturnSingleResult(
function.get(), "[]", profile()));
ASSERT_TRUE(value);
ASSERT_EQ(base::Value::Type::DICTIONARY, value->GetType());
base::DictionaryValue* permit_access =
static_cast<base::DictionaryValue*>(value.get());
std::string user_public_key, user_private_key;
EXPECT_TRUE(permit_access->GetString("id", &user_public_key));
EXPECT_TRUE(permit_access->GetString("data", &user_private_key));
EXPECT_EQ("user public key", user_public_key);
EXPECT_EQ("user private key", user_private_key);
}
// Checks that the chrome.easyUnlockPrivate.getPermitAccess API returns the
// stored value if the kEnableBluetoothLowEnergyDiscovery switch is not set.
TEST_F(EasyUnlockPrivateApiTest, GetPermitAccessNonExperimental) {
EasyUnlockServiceFactory::GetInstance()->SetTestingFactoryAndUse(
profile(), &BuildTestEasyUnlockService);
scoped_refptr<TestableGetPermitAccessFunction> function(
new TestableGetPermitAccessFunction());
std::unique_ptr<base::Value> value(
extensions::api_test_utils::RunFunctionAndReturnSingleResult(
function.get(), "[]", profile()));
EXPECT_FALSE(value);
}
} // namespace
| 9,108 |
837 | package me.saket.dank.ui.submission;
import static io.reactivex.android.schedulers.AndroidSchedulers.mainThread;
import static io.reactivex.schedulers.Schedulers.io;
import static me.saket.dank.utils.Views.executeOnMeasure;
import android.graphics.Bitmap;
import android.graphics.drawable.Animatable;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.support.annotation.CheckResult;
import android.support.annotation.Nullable;
import android.util.Size;
import android.view.Gravity;
import android.view.View;
import com.bumptech.glide.Priority;
import com.bumptech.glide.load.resource.gif.GifDrawable;
import com.bumptech.glide.request.RequestOptions;
import com.jakewharton.rxrelay2.PublishRelay;
import com.jakewharton.rxrelay2.Relay;
import net.dean.jraw.models.SubmissionPreview;
import javax.inject.Inject;
import butterknife.BindView;
import butterknife.ButterKnife;
import dagger.Lazy;
import io.reactivex.Completable;
import io.reactivex.Observable;
import io.reactivex.functions.Consumer;
import me.saket.dank.R;
import me.saket.dank.ui.UiEvent;
import me.saket.dank.ui.submission.events.SubmissionImageLoadStarted;
import me.saket.dank.ui.submission.events.SubmissionImageLoadSucceeded;
import me.saket.dank.urlparser.MediaLink;
import me.saket.dank.utils.Animations;
import me.saket.dank.utils.Optional;
import me.saket.dank.utils.Views;
import me.saket.dank.widgets.InboxUI.SimpleExpandablePageStateChangeCallbacks;
import me.saket.dank.widgets.ScrollingRecyclerViewSheet;
import me.saket.dank.widgets.ZoomableImageView;
/**
* Manages showing of content image in {@link SubmissionPageLayout}. Only supports showing a single image right now.
*/
public class SubmissionImageHolder {
@BindView(R.id.submission_image_scroll_hint) View imageScrollHintView;
@BindView(R.id.submission_image) ZoomableImageView imageView;
@BindView(R.id.submission_comment_list_parent_sheet) ScrollingRecyclerViewSheet commentListParentSheet;
private final Lazy<SubmissionImageLoader> imageLoader;
private Relay<UiEvent> uiEvents;
private SubmissionPageLifecycleStreams lifecycle;
private SubmissionPageLayout submissionPageLayout;
private Size deviceDisplaySize;
private Relay<Drawable> imageStream = PublishRelay.create();
private ZoomableImageView.OnPanChangeListener imagePanListener;
@Inject
public SubmissionImageHolder(Lazy<SubmissionImageLoader> imageLoader) {
this.imageLoader = imageLoader;
}
/**
* God knows why (if he/she exists), ButterKnife is failing to bind
* <var>contentLoadProgressView</var>, so it's being manually supplied.
*/
public void setup(
Relay<UiEvent> uiEvents,
SubmissionPageLifecycleStreams lifecycleStreams,
View submissionLayout,
SubmissionPageLayout submissionPageLayout,
Size deviceDisplaySize)
{
this.uiEvents = uiEvents;
this.lifecycle = lifecycleStreams;
this.submissionPageLayout = submissionPageLayout;
this.deviceDisplaySize = deviceDisplaySize;
ButterKnife.bind(this, submissionLayout);
imageView.setGravity(Gravity.TOP);
// Reset everything when the page is collapsed.
lifecycleStreams.onPageCollapseOrDestroy()
.subscribe(resetViews());
}
@CheckResult
public Observable<Optional<Bitmap>> streamImageBitmaps() {
return imageStream.map(SubmissionImageHolder::bitmapFromDrawable);
}
private Consumer<Object> resetViews() {
return o -> {
if (imagePanListener != null) {
imageView.removeOnImagePanChangeListener(imagePanListener);
}
imageView.resetState();
imageScrollHintView.setVisibility(View.GONE);
};
}
@CheckResult
public Completable load(MediaLink mediaLink, @Nullable SubmissionPreview redditSuppliedThumbnails) {
uiEvents.accept(SubmissionImageLoadStarted.create());
RequestOptions imageLoadOptions = RequestOptions.priorityOf(Priority.IMMEDIATE);
return imageLoader.get().load(imageView.getContext(), mediaLink, redditSuppliedThumbnails, io(), imageLoadOptions)
.observeOn(mainThread())
.doOnSuccess(drawable -> {
imageView.setImageDrawable(drawable);
if (drawable instanceof Animatable) {
((Animatable) drawable).start();
}
})
.doOnSuccess(o -> uiEvents.accept(SubmissionImageLoadSucceeded.create()))
.flatMap(drawable -> Views.rxWaitTillMeasured(imageView.view()).toSingleDefault(drawable))
.doOnSuccess(drawable -> {
float widthResizeFactor = deviceDisplaySize.getWidth() / (float) drawable.getIntrinsicWidth();
float imageHeight = drawable.getIntrinsicHeight() * widthResizeFactor;
float visibleImageHeight = Math.min(imageHeight, imageView.getHeight());
commentListParentSheet.post(() -> {
int imageHeightMinusToolbar = (int) (visibleImageHeight - commentListParentSheet.getTop());
commentListParentSheet.setScrollingEnabled(true);
commentListParentSheet.setMaxScrollY(imageHeightMinusToolbar);
if (submissionPageLayout.shouldExpandMediaSmoothly()) {
if (submissionPageLayout.isExpanded()) {
commentListParentSheet.smoothScrollTo(imageHeightMinusToolbar);
} else {
lifecycle.onPageExpand()
.take(1)
.takeUntil(lifecycle.onPageCollapseOrDestroy())
.subscribe(o -> commentListParentSheet.smoothScrollTo(imageHeightMinusToolbar));
}
} else {
commentListParentSheet.scrollTo(imageHeightMinusToolbar);
}
if (imageHeight > visibleImageHeight) {
// Image is scrollable. Let the user know about this.
showImageScrollHint(imageHeight, visibleImageHeight);
}
});
})
.doOnSuccess(imageStream)
.toCompletable();
}
/**
* Show a tooltip at the bottom of the image, hinting the user that the image is long and can be scrolled.
*/
private void showImageScrollHint(float imageHeight, float visibleImageHeight) {
imageScrollHintView.setVisibility(View.VISIBLE);
imageScrollHintView.setAlpha(0f);
// Postpone till measure because we need the height.
executeOnMeasure(imageScrollHintView, () -> {
Runnable hintEntryAnimationRunnable = () -> {
imageScrollHintView.setTranslationY(imageScrollHintView.getHeight() / 2);
imageScrollHintView.animate()
.alpha(1f)
.translationY(0f)
.setDuration(300)
.setInterpolator(Animations.INTERPOLATOR)
.start();
};
// Show the hint only when the page has expanded.
if (submissionPageLayout.isExpanded()) {
hintEntryAnimationRunnable.run();
} else {
submissionPageLayout.addStateChangeCallbacks(new SimpleExpandablePageStateChangeCallbacks() {
@Override
public void onPageExpanded() {
hintEntryAnimationRunnable.run();
submissionPageLayout.removeStateChangeCallbacks(this);
}
});
}
});
// Hide the tooltip once the user starts scrolling the image.
imagePanListener = new ZoomableImageView.OnPanChangeListener() {
private boolean hidden = false;
@Override
public void onPanChange(float scrollY) {
if (hidden) {
return;
}
float distanceScrolledY = Math.abs(scrollY);
float distanceScrollableY = imageHeight - visibleImageHeight;
float scrolledPercentage = distanceScrolledY / distanceScrollableY;
// Hide it after the image has been scrolled 10% of its height.
if (scrolledPercentage > 0.1f) {
hidden = true;
imageScrollHintView.animate()
.alpha(0f)
.translationY(-imageScrollHintView.getHeight() / 2)
.setDuration(300)
.setInterpolator(Animations.INTERPOLATOR)
.withEndAction(() -> imageScrollHintView.setVisibility(View.GONE))
.start();
}
}
};
imageView.addOnImagePanChangeListener(imagePanListener);
}
private static Optional<Bitmap> bitmapFromDrawable(Drawable drawable) {
if (drawable instanceof BitmapDrawable) {
return Optional.of(((BitmapDrawable) drawable).getBitmap());
} else if (drawable instanceof GifDrawable) {
// First frame of a GIF can be null if it has already started playing
// and the first frame has been thrown away.
return Optional.ofNullable(((GifDrawable) drawable).getFirstFrame());
} else {
throw new IllegalStateException("Unknown Drawable: " + drawable);
}
}
}
| 3,330 |
6,663 | <filename>Cython/Utility/FunctionArguments.c
//////////////////// ArgTypeTest.proto ////////////////////
#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact) \
((likely(__Pyx_IS_TYPE(obj, type) | (none_allowed && (obj == Py_None)))) ? 1 : \
__Pyx__ArgTypeTest(obj, type, name, exact))
static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact); /*proto*/
//////////////////// ArgTypeTest ////////////////////
static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact)
{
__Pyx_TypeName type_name;
__Pyx_TypeName obj_type_name;
if (unlikely(!type)) {
PyErr_SetString(PyExc_SystemError, "Missing type object");
return 0;
}
else if (exact) {
#if PY_MAJOR_VERSION == 2
if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1;
#endif
}
else {
if (likely(__Pyx_TypeCheck(obj, type))) return 1;
}
type_name = __Pyx_PyType_GetName(type);
obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj));
PyErr_Format(PyExc_TypeError,
"Argument '%.200s' has incorrect type (expected " __Pyx_FMT_TYPENAME
", got " __Pyx_FMT_TYPENAME ")", name, type_name, obj_type_name);
__Pyx_DECREF_TypeName(type_name);
__Pyx_DECREF_TypeName(obj_type_name);
return 0;
}
//////////////////// RaiseArgTupleInvalid.proto ////////////////////
static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact,
Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); /*proto*/
//////////////////// RaiseArgTupleInvalid ////////////////////
// __Pyx_RaiseArgtupleInvalid raises the correct exception when too
// many or too few positional arguments were found. This handles
// Py_ssize_t formatting correctly.
static void __Pyx_RaiseArgtupleInvalid(
const char* func_name,
int exact,
Py_ssize_t num_min,
Py_ssize_t num_max,
Py_ssize_t num_found)
{
Py_ssize_t num_expected;
const char *more_or_less;
if (num_found < num_min) {
num_expected = num_min;
more_or_less = "at least";
} else {
num_expected = num_max;
more_or_less = "at most";
}
if (exact) {
more_or_less = "exactly";
}
PyErr_Format(PyExc_TypeError,
"%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)",
func_name, more_or_less, num_expected,
(num_expected == 1) ? "" : "s", num_found);
}
//////////////////// RaiseKeywordRequired.proto ////////////////////
static void __Pyx_RaiseKeywordRequired(const char* func_name, PyObject* kw_name); /*proto*/
//////////////////// RaiseKeywordRequired ////////////////////
static void __Pyx_RaiseKeywordRequired(const char* func_name, PyObject* kw_name) {
PyErr_Format(PyExc_TypeError,
#if PY_MAJOR_VERSION >= 3
"%s() needs keyword-only argument %U", func_name, kw_name);
#else
"%s() needs keyword-only argument %s", func_name,
PyString_AS_STRING(kw_name));
#endif
}
//////////////////// RaiseDoubleKeywords.proto ////////////////////
static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); /*proto*/
//////////////////// RaiseDoubleKeywords ////////////////////
static void __Pyx_RaiseDoubleKeywordsError(
const char* func_name,
PyObject* kw_name)
{
PyErr_Format(PyExc_TypeError,
#if PY_MAJOR_VERSION >= 3
"%s() got multiple values for keyword argument '%U'", func_name, kw_name);
#else
"%s() got multiple values for keyword argument '%s'", func_name,
PyString_AsString(kw_name));
#endif
}
//////////////////// RaiseMappingExpected.proto ////////////////////
static void __Pyx_RaiseMappingExpectedError(PyObject* arg); /*proto*/
//////////////////// RaiseMappingExpected ////////////////////
static void __Pyx_RaiseMappingExpectedError(PyObject* arg) {
__Pyx_TypeName arg_type_name = __Pyx_PyType_GetName(Py_TYPE(arg));
PyErr_Format(PyExc_TypeError,
"'" __Pyx_FMT_TYPENAME "' object is not a mapping", arg_type_name);
__Pyx_DECREF_TypeName(arg_type_name);
}
//////////////////// KeywordStringCheck.proto ////////////////////
static int __Pyx_CheckKeywordStrings(PyObject *kw, const char* function_name, int kw_allowed); /*proto*/
//////////////////// KeywordStringCheck ////////////////////
// __Pyx_CheckKeywordStrings raises an error if non-string keywords
// were passed to a function, or if any keywords were passed to a
// function that does not accept them.
//
// The "kw" argument is either a dict (for METH_VARARGS) or a tuple
// (for METH_FASTCALL).
static int __Pyx_CheckKeywordStrings(
PyObject *kw,
const char* function_name,
int kw_allowed)
{
PyObject* key = 0;
Py_ssize_t pos = 0;
#if CYTHON_COMPILING_IN_PYPY
/* PyPy appears to check keywords at call time, not at unpacking time => not much to do here */
if (!kw_allowed && PyDict_Next(kw, &pos, &key, 0))
goto invalid_keyword;
return 1;
#else
if (CYTHON_METH_FASTCALL && likely(PyTuple_Check(kw))) {
if (unlikely(PyTuple_GET_SIZE(kw) == 0))
return 1;
if (!kw_allowed) {
key = PyTuple_GET_ITEM(kw, 0);
goto invalid_keyword;
}
#if PY_VERSION_HEX < 0x03090000
// On CPython >= 3.9, the FASTCALL protocol guarantees that keyword
// names are strings (see https://bugs.python.org/issue37540)
for (pos = 0; pos < PyTuple_GET_SIZE(kw); pos++) {
key = PyTuple_GET_ITEM(kw, pos);
if (unlikely(!PyUnicode_Check(key)))
goto invalid_keyword_type;
}
#endif
return 1;
}
while (PyDict_Next(kw, &pos, &key, 0)) {
#if PY_MAJOR_VERSION < 3
if (unlikely(!PyString_Check(key)))
#endif
if (unlikely(!PyUnicode_Check(key)))
goto invalid_keyword_type;
}
if (!kw_allowed && unlikely(key))
goto invalid_keyword;
return 1;
invalid_keyword_type:
PyErr_Format(PyExc_TypeError,
"%.200s() keywords must be strings", function_name);
return 0;
#endif
invalid_keyword:
#if PY_MAJOR_VERSION < 3
PyErr_Format(PyExc_TypeError,
"%.200s() got an unexpected keyword argument '%.200s'",
function_name, PyString_AsString(key));
#else
PyErr_Format(PyExc_TypeError,
"%s() got an unexpected keyword argument '%U'",
function_name, key);
#endif
return 0;
}
//////////////////// ParseKeywords.proto ////////////////////
static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject *const *kwvalues,
PyObject **argnames[],
PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,
const char* function_name); /*proto*/
//////////////////// ParseKeywords ////////////////////
//@requires: RaiseDoubleKeywords
// __Pyx_ParseOptionalKeywords copies the optional/unknown keyword
// arguments from kwds into the dict kwds2. If kwds2 is NULL, unknown
// keywords will raise an invalid keyword error.
//
// When not using METH_FASTCALL, kwds is a dict and kwvalues is NULL.
// Otherwise, kwds is a tuple with keyword names and kwvalues is a C
// array with the corresponding values.
//
// Three kinds of errors are checked: 1) non-string keywords, 2)
// unexpected keywords and 3) overlap with positional arguments.
//
// If num_posargs is greater 0, it denotes the number of positional
// arguments that were passed and that must therefore not appear
// amongst the keywords as well.
//
// This method does not check for required keyword arguments.
static int __Pyx_ParseOptionalKeywords(
PyObject *kwds,
PyObject *const *kwvalues,
PyObject **argnames[],
PyObject *kwds2,
PyObject *values[],
Py_ssize_t num_pos_args,
const char* function_name)
{
PyObject *key = 0, *value = 0;
Py_ssize_t pos = 0;
PyObject*** name;
PyObject*** first_kw_arg = argnames + num_pos_args;
int kwds_is_tuple = CYTHON_METH_FASTCALL && likely(PyTuple_Check(kwds));
while (1) {
if (kwds_is_tuple) {
if (pos >= PyTuple_GET_SIZE(kwds)) break;
key = PyTuple_GET_ITEM(kwds, pos);
value = kwvalues[pos];
pos++;
}
else
{
if (!PyDict_Next(kwds, &pos, &key, &value)) break;
}
name = first_kw_arg;
while (*name && (**name != key)) name++;
if (*name) {
values[name-argnames] = value;
continue;
}
name = first_kw_arg;
#if PY_MAJOR_VERSION < 3
if (likely(PyString_Check(key))) {
while (*name) {
if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key))
&& _PyString_Eq(**name, key)) {
values[name-argnames] = value;
break;
}
name++;
}
if (*name) continue;
else {
// not found after positional args, check for duplicate
PyObject*** argname = argnames;
while (argname != first_kw_arg) {
if ((**argname == key) || (
(CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key))
&& _PyString_Eq(**argname, key))) {
goto arg_passed_twice;
}
argname++;
}
}
} else
#endif
if (likely(PyUnicode_Check(key))) {
while (*name) {
int cmp = (
#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
(__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 :
#endif
// In Py2, we may need to convert the argument name from str to unicode for comparison.
PyUnicode_Compare(**name, key)
);
if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
if (cmp == 0) {
values[name-argnames] = value;
break;
}
name++;
}
if (*name) continue;
else {
// not found after positional args, check for duplicate
PyObject*** argname = argnames;
while (argname != first_kw_arg) {
int cmp = (**argname == key) ? 0 :
#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
(__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 :
#endif
// need to convert argument name from bytes to unicode for comparison
PyUnicode_Compare(**argname, key);
if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
if (cmp == 0) goto arg_passed_twice;
argname++;
}
}
} else
goto invalid_keyword_type;
if (kwds2) {
if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad;
} else {
goto invalid_keyword;
}
}
return 0;
arg_passed_twice:
__Pyx_RaiseDoubleKeywordsError(function_name, key);
goto bad;
invalid_keyword_type:
PyErr_Format(PyExc_TypeError,
"%.200s() keywords must be strings", function_name);
goto bad;
invalid_keyword:
#if PY_MAJOR_VERSION < 3
PyErr_Format(PyExc_TypeError,
"%.200s() got an unexpected keyword argument '%.200s'",
function_name, PyString_AsString(key));
#else
PyErr_Format(PyExc_TypeError,
"%s() got an unexpected keyword argument '%U'",
function_name, key);
#endif
bad:
return -1;
}
//////////////////// MergeKeywords.proto ////////////////////
static int __Pyx_MergeKeywords(PyObject *kwdict, PyObject *source_mapping); /*proto*/
//////////////////// MergeKeywords ////////////////////
//@requires: RaiseDoubleKeywords
//@requires: Optimize.c::dict_iter
static int __Pyx_MergeKeywords(PyObject *kwdict, PyObject *source_mapping) {
PyObject *iter, *key = NULL, *value = NULL;
int source_is_dict, result;
Py_ssize_t orig_length, ppos = 0;
iter = __Pyx_dict_iterator(source_mapping, 0, PYIDENT("items"), &orig_length, &source_is_dict);
if (unlikely(!iter)) {
// slow fallback: try converting to dict, then iterate
PyObject *args;
if (unlikely(!PyErr_ExceptionMatches(PyExc_AttributeError))) goto bad;
PyErr_Clear();
args = PyTuple_Pack(1, source_mapping);
if (likely(args)) {
PyObject *fallback = PyObject_Call((PyObject*)&PyDict_Type, args, NULL);
Py_DECREF(args);
if (likely(fallback)) {
iter = __Pyx_dict_iterator(fallback, 1, PYIDENT("items"), &orig_length, &source_is_dict);
Py_DECREF(fallback);
}
}
if (unlikely(!iter)) goto bad;
}
while (1) {
result = __Pyx_dict_iter_next(iter, orig_length, &ppos, &key, &value, NULL, source_is_dict);
if (unlikely(result < 0)) goto bad;
if (!result) break;
if (unlikely(PyDict_Contains(kwdict, key))) {
__Pyx_RaiseDoubleKeywordsError("function", key);
result = -1;
} else {
result = PyDict_SetItem(kwdict, key, value);
}
Py_DECREF(key);
Py_DECREF(value);
if (unlikely(result < 0)) goto bad;
}
Py_XDECREF(iter);
return 0;
bad:
Py_XDECREF(iter);
return -1;
}
/////////////// fastcall.proto ///////////////
// We define various functions and macros with two variants:
//..._FASTCALL and ..._VARARGS
// The first is used when METH_FASTCALL is enabled and the second is used
// otherwise. If the Python implementation does not support METH_FASTCALL
// (because it's an old version of CPython or it's not CPython at all),
// then the ..._FASTCALL macros simply alias ..._VARARGS
#define __Pyx_Arg_VARARGS(args, i) PyTuple_GET_ITEM(args, i)
#define __Pyx_NumKwargs_VARARGS(kwds) PyDict_Size(kwds)
#define __Pyx_KwValues_VARARGS(args, nargs) NULL
#define __Pyx_GetKwValue_VARARGS(kw, kwvalues, s) __Pyx_PyDict_GetItemStrWithError(kw, s)
#define __Pyx_KwargsAsDict_VARARGS(kw, kwvalues) PyDict_Copy(kw)
#if CYTHON_METH_FASTCALL
#define __Pyx_Arg_FASTCALL(args, i) args[i]
#define __Pyx_NumKwargs_FASTCALL(kwds) PyTuple_GET_SIZE(kwds)
#define __Pyx_KwValues_FASTCALL(args, nargs) (&args[nargs])
static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s);
#define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw)
#else
#define __Pyx_Arg_FASTCALL __Pyx_Arg_VARARGS
#define __Pyx_NumKwargs_FASTCALL __Pyx_NumKwargs_VARARGS
#define __Pyx_KwValues_FASTCALL __Pyx_KwValues_VARARGS
#define __Pyx_GetKwValue_FASTCALL __Pyx_GetKwValue_VARARGS
#define __Pyx_KwargsAsDict_FASTCALL __Pyx_KwargsAsDict_VARARGS
#endif
#if CYTHON_COMPILING_IN_CPYTHON
#define __Pyx_ArgsSlice_VARARGS(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_VARARGS(args, start), stop - start)
#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) __Pyx_PyTuple_FromArray(&__Pyx_Arg_FASTCALL(args, start), stop - start)
#else
/* Not CPython, so certainly no METH_FASTCALL support */
#define __Pyx_ArgsSlice_VARARGS(args, start, stop) PyTuple_GetSlice(args, start, stop)
#define __Pyx_ArgsSlice_FASTCALL(args, start, stop) PyTuple_GetSlice(args, start, stop)
#endif
/////////////// fastcall ///////////////
//@requires: ObjectHandling.c::TupleAndListFromArray
//@requires: StringTools.c::UnicodeEquals
#if CYTHON_METH_FASTCALL
// kwnames: tuple with names of keyword arguments
// kwvalues: C array with values of keyword arguments
// s: str with the keyword name to look for
static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s)
{
// Search the kwnames array for s and return the corresponding value.
// We do two loops: a first one to compare pointers (which will find a
// match if the name in kwnames is interned, given that s is interned
// by Cython). A second loop compares the actual strings.
Py_ssize_t i, n = PyTuple_GET_SIZE(kwnames);
for (i = 0; i < n; i++)
{
if (s == PyTuple_GET_ITEM(kwnames, i)) return kwvalues[i];
}
for (i = 0; i < n; i++)
{
int eq = __Pyx_PyUnicode_Equals(s, PyTuple_GET_ITEM(kwnames, i), Py_EQ);
if (unlikely(eq != 0)) {
if (unlikely(eq < 0)) return NULL; // error
return kwvalues[i];
}
}
return NULL; // not found (no exception set)
}
#endif
| 7,632 |
319 | <filename>machine-learning/machine-learning/src/main/java/org/openimaj/ml/timeseries/TimeSeries.java
/**
* Copyright (c) 2011, The University of Southampton and the individual contributors.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of the University of Southampton nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.openimaj.ml.timeseries;
import org.openimaj.ml.timeseries.converter.TimeSeriesConverter;
import org.openimaj.ml.timeseries.processor.TimeSeriesProcessor;
import org.openimaj.ml.timeseries.processor.interpolation.TimeSeriesInterpolation;
import org.openimaj.util.pair.IndependentPair;
/**
* A time series defines data at discrete points in time. The time series has
* the ability to return data at a specific point in time, return neighbours
* within some window, closest neighbours or n neighbours before and after a
* time.
* <p>
* These values can be used by a {@link TimeSeriesInterpolation} to get specific
* moments in time
*
* @author <NAME> (<EMAIL>)
*
* @param <DATA>
* the type of the data at each point in time
* @param <SINGLE_TYPE>
* the type of the an element at a single point in time
* @param <RETURNTYPE>
* the time series returned by the get
*
*/
public abstract class TimeSeries<DATA, SINGLE_TYPE, RETURNTYPE extends TimeSeries<DATA, SINGLE_TYPE, RETURNTYPE>>
implements
Iterable<IndependentPair<Long, SINGLE_TYPE>>
{
/**
* Same as calling {@link #get(long, int, int)} with spans as 0
*
* @param time
* @return the requested data or null.
*/
public RETURNTYPE get(long time) {
return get(time, 0, 0);
}
/**
* returns the DATA at a specific point in time and those before and after
* to the number requested. This method may not return data for the specific
* requested time if it does not exists. If the time series is completely
* empty this function may return at an empty array however if at least 1
* data point exists and either nbefore and nafter are bigger than 1 then at
* least 1 datapoint will be returned.
* <p>
* Data should be returned in order
*
* @param time
* @param nbefore
* @param nafter
* @return all data found with these parameters
*/
public abstract RETURNTYPE get(long time, int nbefore, int nafter);
/**
* Same as {@link #get(long, int, int)} but instead of createing the output
* DATA instance, an existing data instance is handed which is filled. For
* convenience this output is also returned
* <p>
* Data should be returned in order
*
* @param time
* @param nbefore
* @param nafter
* @param output
* @return all data found with these parameters
*/
public abstract RETURNTYPE get(long time, int nbefore, int nafter, RETURNTYPE output);
/**
* returns the RETURNTYPE at a specific point in time and those before and
* after within the specified thresholds. This method may not return data
* for the specific requested time if it does not exists. Similarly this
* method may return an empty array if no time data is available within the
* window specified.
* <p>
* Data should be returned in order
*
* @param time
* @param threshbefore
* @param threshafter
* @return all data found with these parameters
*/
public abstract RETURNTYPE get(long time, long threshbefore, long threshafter);
/**
* returns the RETURNTYPE between the specified time periods. This method
* may not return data for the specific requested time if it does not
* exists. Similarly this method may return an empty array if no time data
* is available within the window specified.
* <p>
* Data should be returned in order
*
* @param start
* @param end
* @return all data found with these parameters
*/
public abstract RETURNTYPE get(long start, long end);
/**
* Set the data associated with each time. This function explicitly assumes
* that time.length == data.length and there exists a single data instance
* per time instance
*
* @param time
* instances of time
* @param data
* instances of data
* @throws TimeSeriesSetException
*/
public abstract void set(long[] time, DATA data) throws TimeSeriesSetException;
/**
* @return all times
*/
public abstract long[] getTimes();
/**
* @return all data
*/
public abstract DATA getData();
/**
* @return an empty new instance of this timeseries type
*/
public abstract RETURNTYPE newInstance();
/**
* @return the number of valid time steps in this timeseries
*/
public abstract int size();
/**
* @param interpolate
* assign this timeseries to the internal one, efforts should be
* made to copy the data, not simply assign it
*/
public abstract void internalAssign(RETURNTYPE interpolate);
/**
* @param times
* @param data
*/
public void internalAssign(long[] times, DATA data) {
try {
this.set(times, data);
} catch (final TimeSeriesSetException e) {
}
}
/**
* @return clone this time series
*/
@SuppressWarnings("unchecked")
public RETURNTYPE copy() {
final RETURNTYPE t = newInstance();
t.internalAssign((RETURNTYPE) this);
return t;
}
/**
* process using the provided processor, return
*
* @param tsp
* @return a new instance processed
*/
public RETURNTYPE process(TimeSeriesProcessor<DATA, SINGLE_TYPE, RETURNTYPE> tsp) {
final RETURNTYPE copy = copy();
tsp.process(copy);
return copy;
}
@SuppressWarnings("unchecked")
private RETURNTYPE self() {
return (RETURNTYPE) this;
}
/**
* Process using the provided processor
*
* @param tsp
* @return this object processed inplace
*/
public RETURNTYPE processInplace(TimeSeriesProcessor<DATA, SINGLE_TYPE, RETURNTYPE> tsp) {
tsp.process(self());
return self();
}
/**
* Convert a {@link TimeSeries}
*
* @param <OUTDATA>
* @param <OUTSING>
* @param <OUTRET>
* @param converter
* the converter
* @return the converted timeseries
*/
public <OUTDATA, OUTSING, OUTRET extends TimeSeries<OUTDATA, OUTSING, OUTRET>> OUTRET convert(
TimeSeriesConverter<DATA, SINGLE_TYPE, RETURNTYPE, OUTDATA, OUTSING, OUTRET> converter)
{
return converter.convert(self());
}
/**
* Convert a {@link TimeSeries}
*
* @param <OUTDATA>
* @param <OUTSING>
* @param <OUTRET>
* @param converter
* the converter
* @param tsp
* the processor
* @return the converted timeseries
*/
public <OUTDATA, OUTSING, OUTRET extends TimeSeries<OUTDATA, OUTSING, OUTRET>> OUTRET convert(
TimeSeriesConverter<DATA, SINGLE_TYPE, RETURNTYPE, OUTDATA, OUTSING, OUTRET> converter,
TimeSeriesProcessor<OUTDATA, OUTSING, OUTRET> tsp)
{
return converter.convert(self(), tsp);
}
@Override
public abstract String toString();
}
| 2,532 |
356 | /*
* Copyright 2015-2018 <NAME>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.igormaznitsa.mindmap.swing.services;
import com.igormaznitsa.mindmap.plugins.api.HasOptions;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.swing.ButtonGroup;
import javax.swing.Icon;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JCheckBoxMenuItem;
import javax.swing.JComboBox;
import javax.swing.JEditorPane;
import javax.swing.JLabel;
import javax.swing.JMenu;
import javax.swing.JMenuItem;
import javax.swing.JPanel;
import javax.swing.JPasswordField;
import javax.swing.JPopupMenu;
import javax.swing.JRadioButton;
import javax.swing.JRadioButtonMenuItem;
import javax.swing.JScrollPane;
import javax.swing.JSeparator;
import javax.swing.JSlider;
import javax.swing.JSpinner;
import javax.swing.JTextArea;
import javax.swing.JTextField;
import javax.swing.JToggleButton;
import javax.swing.JToolBar;
public interface UIComponentFactory {
@Nonnull
JPanel makePanel();
@Nonnull
JPanel makePanelWithOptions(@Nonnull HasOptions optionsProcessor);
@Nonnull
JComboBox makeComboBox();
@Nonnull
JSpinner makeSpinner();
@Nonnull
JButton makeButton();
@Nonnull
JToggleButton makeToggleButton();
@Nonnull
JRadioButton makeRadioButton();
@Nonnull
JToolBar makeToolBar();
@Nonnull
JScrollPane makeScrollPane();
@Nonnull
JCheckBox makeCheckBox();
@Nonnull
JLabel makeLabel();
@Nonnull
JPopupMenu makePopupMenu();
@Nonnull
JTextArea makeTextArea();
@Nonnull
JPasswordField makePasswordField();
@Nonnull
JTextField makeTextField();
@Nonnull
JEditorPane makeEditorPane();
@Nonnull
JMenuItem makeMenuItem(@Nonnull String text, @Nullable Icon icon);
@Nonnull
JRadioButtonMenuItem makeRadioButtonMenuItem(@Nonnull String text, @Nullable Icon icon, boolean selected);
@Nonnull
JCheckBoxMenuItem makeCheckboxMenuItem(@Nonnull String text, @Nullable Icon icon, boolean selected);
@Nonnull
ButtonGroup makeButtonGroup();
@Nonnull
JSeparator makeMenuSeparator();
@Nonnull
JMenu makeMenu(@Nonnull String text);
@Nonnull
JSlider makeSlider();
}
| 905 |
679 | /**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
#include "precompiled_svtools.hxx"
#include "tabbargeometry.hxx"
#include <basegfx/range/b2drange.hxx>
#include <basegfx/matrix/b2dhommatrix.hxx>
#include <basegfx/numeric/ftools.hxx>
#include <vcl/window.hxx>
#include <algorithm>
// the width (or height, depending on alignment) of the scroll buttons
#define BUTTON_FLOW_WIDTH 20
// the space between the scroll buttons and the items
#define BUTTON_FLOW_SPACE 2
// outer space to apply between the tab bar borders and any content. Note that those refer to a "normalized" geometry,
// i.e. if the tab bar were aligned at the top
#define OUTER_SPACE_LEFT 2
#define OUTER_SPACE_TOP 4
#define OUTER_SPACE_RIGHT 4
#define OUTER_SPACE_BOTTOM 2
// outer space to apply between the area for the items, and the actual items. They refer to a normalized geometry.
#define ITEMS_INSET_LEFT 4
#define ITEMS_INSET_TOP 3
#define ITEMS_INSET_RIGHT 4
#define ITEMS_INSET_BOTTOM 0
//......................................................................................................................
namespace svt
{
//......................................................................................................................
//==================================================================================================================
//= helper
//==================================================================================================================
namespace
{
//--------------------------------------------------------------------------------------------------------------
static void lcl_transform( Rectangle& io_rRect, const ::basegfx::B2DHomMatrix& i_rTransformation )
{
::basegfx::B2DRange aRect( io_rRect.Left(), io_rRect.Top(), io_rRect.Right(), io_rRect.Bottom() );
aRect.transform( i_rTransformation );
io_rRect.Left() = long( aRect.getMinX() );
io_rRect.Top() = long( aRect.getMinY() );
io_rRect.Right() = long( aRect.getMaxX() );
io_rRect.Bottom() = long( aRect.getMaxY() );
}
//--------------------------------------------------------------------------------------------------------------
/** transforms the given, possible rotated playground,
*/
void lcl_rotate( const Rectangle& i_rReference, Rectangle& io_rArea, const bool i_bRight )
{
// step 1: move the to-be-upper-left corner (left/bottom) of the rectangle to (0,0)
::basegfx::B2DHomMatrix aTransformation;
aTransformation.translate(
i_bRight ? -i_rReference.Left() : -i_rReference.Right(),
i_bRight ? -i_rReference.Bottom() : -i_rReference.Top()
);
// step 2: rotate by -90 degrees
aTransformation.rotate( i_bRight ? +F_PI2 : -F_PI2 );
// note:
// on the screen, the ordinate goes top-down, while basegfx calculates in a system where the
// ordinate goes bottom-up; thus the "wrong" sign before F_PI2 here
// step 3: move back to original coordinates
aTransformation.translate( i_rReference.Left(), i_rReference.Top() );
// apply transformation
lcl_transform( io_rArea, aTransformation );
}
}
//------------------------------------------------------------------------------------------------------------------
void lcl_mirrorHorizontally( const Rectangle& i_rReferenceArea, Rectangle& io_rArea )
{
io_rArea.Left() = i_rReferenceArea.Left() + i_rReferenceArea.Right() - io_rArea.Left();
io_rArea.Right() = i_rReferenceArea.Left() + i_rReferenceArea.Right() - io_rArea.Right();
::std::swap( io_rArea.Left(), io_rArea.Right() );
}
//------------------------------------------------------------------------------------------------------------------
void lcl_mirrorVertically( const Rectangle& i_rReferenceArea, Rectangle& io_rArea )
{
io_rArea.Top() = i_rReferenceArea.Top() + i_rReferenceArea.Bottom() - io_rArea.Top();
io_rArea.Bottom() = i_rReferenceArea.Top() + i_rReferenceArea.Bottom() - io_rArea.Bottom();
::std::swap( io_rArea.Top(), io_rArea.Bottom() );
}
//==================================================================================================================
//= NormalizedArea
//==================================================================================================================
//------------------------------------------------------------------------------------------------------------------
NormalizedArea::NormalizedArea()
:m_aReference()
{
}
//------------------------------------------------------------------------------------------------------------------
NormalizedArea::NormalizedArea( const Rectangle& i_rReference, const bool i_bIsVertical )
:m_aReference( i_bIsVertical ? Rectangle( i_rReference.TopLeft(), Size( i_rReference.GetHeight(), i_rReference.GetWidth() ) ) : i_rReference )
{
}
//------------------------------------------------------------------------------------------------------------------
Rectangle NormalizedArea::getTransformed( const Rectangle& i_rArea, const TabAlignment i_eTargetAlignment ) const
{
Rectangle aResult( i_rArea );
if ( ( i_eTargetAlignment == TABS_RIGHT )
|| ( i_eTargetAlignment == TABS_LEFT )
)
{
lcl_rotate( m_aReference, aResult, true );
if ( i_eTargetAlignment == TABS_LEFT )
{
Rectangle aReference( m_aReference );
aReference.Transpose();
lcl_mirrorHorizontally( aReference, aResult );
}
}
else
if ( i_eTargetAlignment == TABS_BOTTOM )
{
lcl_mirrorVertically( m_aReference, aResult );
}
return aResult;
}
//------------------------------------------------------------------------------------------------------------------
Rectangle NormalizedArea::getNormalized( const Rectangle& i_rArea, const TabAlignment i_eTargetAlignment ) const
{
Rectangle aResult( i_rArea );
if ( ( i_eTargetAlignment == TABS_RIGHT )
|| ( i_eTargetAlignment == TABS_LEFT )
)
{
Rectangle aReference( m_aReference );
lcl_rotate( m_aReference, aReference, true );
if ( i_eTargetAlignment == TABS_LEFT )
{
lcl_mirrorHorizontally( aReference, aResult );
}
lcl_rotate( aReference, aResult, false );
}
else
if ( i_eTargetAlignment == TABS_BOTTOM )
{
lcl_mirrorVertically( m_aReference, aResult );
}
return aResult;
}
//==================================================================================================================
//= TabBarGeometry
//==================================================================================================================
//------------------------------------------------------------------------------------------------------------------
TabBarGeometry::TabBarGeometry( const TabItemContent i_eItemContent )
:m_eTabItemContent( i_eItemContent )
,m_aItemsInset()
,m_aButtonBackRect()
,m_aItemsRect()
,m_aButtonForwardRect()
{
m_aItemsInset.Left() = ITEMS_INSET_LEFT;
m_aItemsInset.Top() = ITEMS_INSET_TOP;
m_aItemsInset.Right() = ITEMS_INSET_RIGHT;
m_aItemsInset.Bottom() = ITEMS_INSET_BOTTOM;
}
//------------------------------------------------------------------------------------------------------------------
TabBarGeometry::~TabBarGeometry()
{
}
//------------------------------------------------------------------------------------------------------------------
bool TabBarGeometry::impl_fitItems( ItemDescriptors& io_rItems ) const
{
if ( io_rItems.empty() )
// nothing to do, "no items" perfectly fit into any space we have ...
return true;
// the available size
Size aOutputSize( getItemsRect().GetSize() );
// shrunk by the outer space
aOutputSize.Width() -= m_aItemsInset.Right();
aOutputSize.Height() -= m_aItemsInset.Bottom();
const Rectangle aFitInto( Point( 0, 0 ), aOutputSize );
TabItemContent eItemContent( getItemContent() );
if ( eItemContent == TABITEM_AUTO )
{
// the "content modes" to try
TabItemContent eTryThis[] =
{
TABITEM_IMAGE_ONLY, // assumed to have the smallest rects
TABITEM_TEXT_ONLY,
TABITEM_IMAGE_AND_TEXT // assumed to have the largest rects
};
// determine which of the different version fits
eItemContent = eTryThis[0];
size_t nTryIndex = 2;
while ( nTryIndex > 0 )
{
const Point aBottomRight( io_rItems.rbegin()->GetRect( eTryThis[ nTryIndex ] ).BottomRight() );
if ( aFitInto.IsInside( aBottomRight ) )
{
eItemContent = eTryThis[ nTryIndex ];
break;
}
--nTryIndex;
}
}
// propagate to the items
for ( ItemDescriptors::iterator item = io_rItems.begin();
item != io_rItems.end();
++item
)
{
item->eContent = eItemContent;
}
const ItemDescriptor& rLastItem( *io_rItems.rbegin() );
const Point aLastItemBottomRight( rLastItem.GetCurrentRect().BottomRight() );
return aFitInto.Left() <= aLastItemBottomRight.X()
&& aFitInto.Right() >= aLastItemBottomRight.X();
}
//------------------------------------------------------------------------------------------------------------------
Size TabBarGeometry::getOptimalSize( ItemDescriptors& io_rItems, const bool i_bMinimalSize ) const
{
if ( io_rItems.empty() )
return Size(
m_aItemsInset.Left() + m_aItemsInset.Right(),
m_aItemsInset.Top() + m_aItemsInset.Bottom()
);
// the rect of the last item
const Rectangle& rLastItemRect( i_bMinimalSize ? io_rItems.rbegin()->aIconOnlyArea : io_rItems.rbegin()->aCompleteArea );
return Size(
rLastItemRect.Left() + 1 + m_aItemsInset.Right(),
rLastItemRect.Top() + 1 + rLastItemRect.Bottom() + m_aItemsInset.Bottom()
);
}
//------------------------------------------------------------------------------------------------------------------
void TabBarGeometry::relayout( const Size& i_rActualOutputSize, ItemDescriptors& io_rItems )
{
// assume all items fit
Point aButtonBackPos( OUTER_SPACE_LEFT, OUTER_SPACE_TOP );
m_aButtonBackRect = Rectangle( aButtonBackPos, Size( 1, 1 ) );
m_aButtonBackRect.SetEmpty();
Point aButtonForwardPos( i_rActualOutputSize.Width(), OUTER_SPACE_TOP );
m_aButtonForwardRect = Rectangle( aButtonForwardPos, Size( 1, 1 ) );
m_aButtonForwardRect.SetEmpty();
Point aItemsPos( OUTER_SPACE_LEFT, 0 );
Size aItemsSize( i_rActualOutputSize.Width() - OUTER_SPACE_LEFT - OUTER_SPACE_RIGHT, i_rActualOutputSize.Height() );
m_aItemsRect = Rectangle( aItemsPos, aItemsSize );
if ( !impl_fitItems( io_rItems ) )
{
// assumption was wrong, the items do not fit => calculate rects for the scroll buttons
const Size aButtonSize( BUTTON_FLOW_WIDTH, i_rActualOutputSize.Height() - OUTER_SPACE_TOP - OUTER_SPACE_BOTTOM );
aButtonBackPos = Point( OUTER_SPACE_LEFT, OUTER_SPACE_TOP );
m_aButtonBackRect = Rectangle( aButtonBackPos, aButtonSize );
aButtonForwardPos = Point( i_rActualOutputSize.Width() - BUTTON_FLOW_WIDTH - OUTER_SPACE_RIGHT, OUTER_SPACE_TOP );
m_aButtonForwardRect = Rectangle( aButtonForwardPos, aButtonSize );
aItemsPos.X() = aButtonBackPos.X() + aButtonSize.Width() + BUTTON_FLOW_SPACE;
aItemsSize.Width() = aButtonForwardPos.X() - BUTTON_FLOW_SPACE - aItemsPos.X();
m_aItemsRect = Rectangle( aItemsPos, aItemsSize );
// fit items, again. In the TABITEM_AUTO case, the smaller playground for the items might lead to another
// item content.
impl_fitItems( io_rItems );
}
}
//------------------------------------------------------------------------------------------------------------------
Point TabBarGeometry::getFirstItemPosition() const
{
return Point( m_aItemsInset.Left(), m_aItemsInset.Top() );
}
//......................................................................................................................
} // namespace svt
//......................................................................................................................
| 5,245 |
1,738 | <reponame>jeikabu/lumberyard
/*
* All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
* its licensors.
*
* For complete copyright and license terms please see the LICENSE at the root of this
* distribution (the "License"). All use of this software is governed by the License,
* or, if provided, by the license below or the license accompanying this file. Do not
* remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
*/
// Original file Copyright Crytek GMBH or its affiliates, used under license.
#ifndef CRYINCLUDE_TOOLS_SPHERICALHARMONICS_PRT_IMAGEFACTORY_H
#define CRYINCLUDE_TOOLS_SPHERICALHARMONICS_PRT_IMAGEFACTORY_H
#pragma once
#if defined(OFFLINE_COMPUTATION)
#include <exception>
#include "IImageReader.h"
namespace NImage
{
class CHDRImageReader;
#if defined(USE_D3DX)
class CCommonImageReader;
#endif
//!< singleton to abstract image factory
class CImageFactory
{
public:
typedef enum EImageType
{
IMAGE_TYPE_HDR24, //!< HDR map with 3xfloat 32
#if defined(USE_D3DX)
IMAGE_TYPE_COMMON, //!< normal format supported by DirectX
#endif
IMAGE_TYPE_TIFF, //!< tiff format(not supported by directx)
}EImageType;
//!< singleton stuff
static CImageFactory* Instance();
const NSH::CSmartPtr<const IImageReader, CSHAllocator<> > GetImageReader(const EImageType cImageType);
private:
//!< singleton stuff
CImageFactory(){}
CImageFactory(const CImageFactory&);
CImageFactory& operator= (const CImageFactory&);
};
}
#endif
#endif // CRYINCLUDE_TOOLS_SPHERICALHARMONICS_PRT_IMAGEFACTORY_H
| 702 |
887 | <gh_stars>100-1000
/*
* Copyright (C) 2015 Open Source Robotics Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#ifndef _GAZEBO_LOGICAL_CAMERAVISUAL_HH_
#define _GAZEBO_LOGICAL_CAMERAVISUAL_HH_
#include <string>
#include "gazebo/msgs/MessageTypes.hh"
#include "gazebo/rendering/Visual.hh"
#include "gazebo/util/system.hh"
namespace gazebo
{
namespace rendering
{
/// \addtogroup gazebo_rendering Rendering
/// \{
/// \brief Logical camera visualization
///
/// This class is used to visualize a logical camera generated from
/// a LogicalCameraSensor. The sensor's frustum is drawn in the 3D
/// environment.
class GZ_RENDERING_VISIBLE LogicalCameraVisual : public Visual
{
/// \brief Constructor
/// \param[in] _name Name of the Visual
/// \param[in] _vis Pointer to the parent Visual
public: LogicalCameraVisual(const std::string &_name, VisualPtr _vis);
/// \brief Destructor
public: virtual ~LogicalCameraVisual();
/// \brief Load the Visual
/// \param[in] _msg Message describing the camera sensor.
public: void Load(const msgs::LogicalCameraSensor &_msg);
using Visual::Load;
// Documentation inherited
protected: virtual void Fini();
/// \brief Update the visual
private: void Update();
};
/// \}
}
}
#endif
| 635 |
386 | /*********************************************************************************
* *
* The MIT License (MIT) *
* *
* Copyright (c) 2015-2021 aoju.org and other contributors. *
* *
* Permission is hereby granted, free of charge, to any person obtaining a copy *
* of this software and associated documentation files (the "Software"), to deal *
* in the Software without restriction, including without limitation the rights *
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell *
* copies of the Software, and to permit persons to whom the Software is *
* furnished to do so, subject to the following conditions: *
* *
* The above copyright notice and this permission notice shall be included in *
* all copies or substantial portions of the Software. *
* *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR *
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE *
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER *
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, *
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN *
* THE SOFTWARE. *
* *
********************************************************************************/
package org.aoju.bus.image.galaxy;
import org.aoju.bus.logger.Logger;
import java.net.InetAddress;
import java.util.Arrays;
/**
* @author <NAME>
* @version 6.3.2
* @since JDK 1.8+
*/
public class Capacity<V> implements Cloneable, java.io.Serializable {
static final boolean DISABLED = isFalse(Capacity.class.getName());
private static final int DEFAULT_CAPACITY = 32;
private static final int MINIMUM_CAPACITY = 4;
private static final int MAXIMUM_CAPACITY = 1 << 30;
private static final byte FREE = 0;
private static final byte FULL = 1;
private static final byte REMOVED = -1;
private transient int[] keys;
private transient Object[] values;
private transient byte[] states;
private transient int free;
private transient int size;
public Capacity() {
init(DEFAULT_CAPACITY);
}
public Capacity(int expectedMaxSize) {
if (expectedMaxSize < 0)
throw new IllegalArgumentException(
"expectedMaxSize is negative: " + expectedMaxSize);
init(capacity(expectedMaxSize));
}
private static boolean isFalse(String name) {
try {
String s = System.getProperty(name);
return ((null != s) && s.equalsIgnoreCase("false"));
} catch (IllegalArgumentException | NullPointerException e) {
}
return false;
}
public static String hostNameOf(InetAddress inetAddress) {
if (DISABLED)
return inetAddress.getHostAddress();
String hostAddress = inetAddress.getHostAddress();
Logger.debug("rDNS {} -> ...", hostAddress);
long start = System.nanoTime();
String hostName = inetAddress.getHostName();
long end = System.nanoTime();
Logger.debug("rDNS {} -> {} in {} ms", hostAddress, hostName, (end - start) / 1000);
return hostName;
}
private int capacity(int expectedMaxSize) {
int minCapacity = expectedMaxSize << 1;
if (minCapacity > MAXIMUM_CAPACITY)
return MAXIMUM_CAPACITY;
int capacity = MINIMUM_CAPACITY;
while (capacity < minCapacity)
capacity <<= 1;
return capacity;
}
private void init(int initCapacity) {
keys = new int[initCapacity];
values = new Object[initCapacity];
states = new byte[initCapacity];
free = initCapacity >>> 1;
}
public int size() {
return size;
}
public boolean isEmpty() {
return size == 0;
}
public V get(int key) {
byte[] states = this.states;
int[] keys = this.keys;
int mask = keys.length - 1;
int i = key & mask;
while (states[i] != FREE) {
if (keys[i] == key)
return (V) values[i];
i = (i + 1) & mask;
}
return null;
}
public boolean containsKey(int key) {
byte[] states = this.states;
int[] keys = this.keys;
int mask = keys.length - 1;
int i = key & mask;
while (states[i] != FREE) {
if (keys[i] == key)
return states[i] > FREE;
i = (i + 1) & mask;
}
return false;
}
public V put(int key, V value) {
byte[] states = this.states;
int[] keys = this.keys;
int mask = keys.length - 1;
int i = key & mask;
while (states[i] > FREE) {
if (keys[i] == key) {
V oldValue = (V) values[i];
values[i] = value;
return oldValue;
}
i = (i + 1) & mask;
}
byte oldState = states[i];
states[i] = FULL;
keys[i] = key;
values[i] = value;
++size;
if (oldState == FREE && --free < 0)
resize(Math.max(capacity(size), keys.length));
return null;
}
public void trimToSize() {
resize(capacity(size));
}
public void rehash() {
resize(keys.length);
}
private void resize(int newLength) {
if (newLength > MAXIMUM_CAPACITY)
throw new IllegalStateException("Capacity exhausted.");
int[] oldKeys = keys;
Object[] oldValues = values;
byte[] oldStates = states;
int[] newKeys = new int[newLength];
Object[] newValues = new Object[newLength];
byte[] newStates = new byte[newLength];
int mask = newLength - 1;
for (int j = 0; j < oldKeys.length; j++) {
if (oldStates[j] > 0) {
int key = oldKeys[j];
int i = key & mask;
while (newStates[i] != FREE)
i = (i + 1) & mask;
newStates[i] = FULL;
newKeys[i] = key;
newValues[i] = oldValues[j];
oldValues[j] = null;
}
}
keys = newKeys;
values = newValues;
states = newStates;
free = (newLength >>> 1) - size;
}
public V remove(int key) {
byte[] states = this.states;
int[] keys = this.keys;
int mask = keys.length - 1;
int i = key & mask;
while (states[i] != FREE) {
if (keys[i] == key) {
if (states[i] < FREE)
return null;
states[i] = REMOVED;
V oldValue = (V) values[i];
values[i] = null;
size--;
return oldValue;
}
i = (i + 1) & mask;
}
return null;
}
public void clear() {
Arrays.fill(values, null);
Arrays.fill(states, FREE);
size = 0;
free = keys.length >>> 1;
}
public Object clone() {
try {
Capacity<V> m = (Capacity<V>) super.clone();
m.states = states.clone();
m.keys = keys.clone();
m.values = values.clone();
return m;
} catch (CloneNotSupportedException e) {
throw new InternalError();
}
}
public boolean accept(Visitor<V> visitor) {
for (int i = 0; i < states.length; i++)
if (states[i] > FREE)
if (!visitor.visit(keys[i], (V) values[i]))
return false;
return true;
}
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
s.defaultWriteObject();
byte[] states = this.states;
int[] keys = this.keys;
Object[] values = this.values;
s.writeInt(size);
for (int i = 0; i < states.length; i++) {
if (states[i] > FREE) {
s.writeInt(keys[i]);
s.writeObject(values[i]);
}
}
}
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
s.defaultReadObject();
int count = s.readInt();
init(capacity(count));
size = count;
free -= count;
byte[] states = this.states;
int[] keys = this.keys;
Object[] values = this.values;
int mask = keys.length - 1;
while (count-- > 0) {
int key = s.readInt();
int i = key & mask;
while (states[i] != FREE)
i = (i + 1) & mask;
states[i] = FULL;
keys[i] = key;
values[i] = s.readObject();
}
}
public interface Visitor<V> {
boolean visit(int key, V value);
}
}
| 4,698 |
2,504 | <reponame>nefeithu/behaviac
// ---------------------------------------------------------------------
// THIS FILE IS AUTO-GENERATED BY BEHAVIAC DESIGNER, SO PLEASE DON'T MODIFY IT BY YOURSELF!
// ---------------------------------------------------------------------
#ifndef _BEHAVIAC_MEMBER_VISITOR_H_
#define _BEHAVIAC_MEMBER_VISITOR_H_
#include "behaviac_agent_headers.h"
// Agent property and method handlers
struct PROPERTY_TYPE_FirstAgent_p1 { };
template<> inline FirstStruct& FirstAgent::_Get_Property_<PROPERTY_TYPE_FirstAgent_p1>()
{
return this->p1;
}
struct METHOD_TYPE_FirstAgent_GetP1s1 { };
template<> inline int FirstAgent::_Execute_Method_<METHOD_TYPE_FirstAgent_GetP1s1>()
{
return this->FirstAgent::GetP1s1();
}
#endif // _BEHAVIAC_MEMBER_VISITOR_H_
| 265 |
884 | <reponame>slyoldfox/blaze-persistence
/*
* Copyright 2014 - 2021 Blazebit.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.blazebit.persistence.view.impl.proxy;
import java.lang.reflect.Field;
//CHECKSTYLE:OFF: IllegalImport
import sun.misc.Unsafe;
//CHECKSTYLE:ON: IllegalImport
/**
*
* @author <NAME>
* @since 1.0.6
*/
public final class UnsafeHelper {
private static final Unsafe UNSAFE;
private UnsafeHelper() {
}
static {
Field f;
try {
f = Unsafe.class.getDeclaredField("theUnsafe");
f.setAccessible(true);
UNSAFE = (Unsafe) f.get(null);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static Class<?> define(String name, byte[] bytes, final Class<?> declaringClass) {
try {
ClassLoader newLoader = declaringClass.getClassLoader();
return UNSAFE.defineClass(name, bytes, 0, bytes.length, newLoader, null);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
| 588 |
3,013 | <reponame>swt2c/wave<gh_stars>1000+
# Plot / Events / Routing
# Handle #events on a #plot card using routing.
# ---
from h2o_wave import main, app, on, handle_on, Q, ui, data
@on('pricing.select_marks')
async def show_selected_marks(q: Q, marks: any):
q.page['details'].content = f'You selected {marks}'
await q.page.save()
@app('/demo')
async def serve(q: Q):
if not q.client.initialized:
q.client.initialized = True
q.page['pricing'] = ui.plot_card(
box='1 1 4 5',
title='Interval',
data=data(fields='product price', rows=[
['spam', 1.49],
['eggs', 2.49],
['ham', 1.99],
], pack=True),
plot=ui.plot([ui.mark(type='interval', x='=product', y='=price', y_min=0)]),
events=['select_marks']
)
q.page['details'] = ui.markdown_card(
box='1 6 4 2',
title='Selected Product',
content='Nothing selected.',
)
await q.page.save()
else:
await handle_on(q)
| 540 |
3,294 | // This is a part of the Microsoft Foundation Classes C++ library.
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// This source code is only intended as a supplement to the
// Microsoft Foundation Classes Reference and related
// electronic documentation provided with the library.
// See these sources for detailed information regarding the
// Microsoft Foundation Classes product.
#include "stdafx.h"
#include "OutlookDemo.h"
#include "PreviewPane.h"
#ifdef _DEBUG
#define new DEBUG_NEW
#undef THIS_FILE
static char THIS_FILE[] = __FILE__;
#endif
const int nHorzMargin = 2;
const int nVertMargin = 2;
/////////////////////////////////////////////////////////////////////////////
// CPreviewPane
IMPLEMENT_DYNCREATE(CPreviewPane, CView)
CPreviewPane::CPreviewPane()
{
}
CPreviewPane::~CPreviewPane()
{
}
BEGIN_MESSAGE_MAP(CPreviewPane, CView)
ON_WM_ERASEBKGND()
END_MESSAGE_MAP()
/////////////////////////////////////////////////////////////////////////////
// CPreviewPane drawing
void CPreviewPane::OnDraw(CDC* pDC)
{
CRect rectClient;
GetClientRect(rectClient);
CRect rectCaption = rectClient;
rectCaption.bottom = rectCaption.top + afxGlobalData.GetTextHeight() + 4;
CRect rectMessage = rectClient;
rectMessage.top = rectCaption.bottom;
pDC->FillRect(rectCaption, &afxGlobalData.brBarFace);
pDC->FillRect(rectMessage, &afxGlobalData.brWindow);
int nOldBkMode = pDC->SetBkMode(TRANSPARENT);
COLORREF clrOldText = pDC->SetTextColor(afxGlobalData.clrBarText);
CFont* pOldFont = (CFont*) pDC->SelectStockObject(DEFAULT_GUI_FONT);
CRect rectText = rectCaption;
rectText.left += 2 * nHorzMargin;
rectText.right -= 2 * nHorzMargin + rectText.Height();
CString strCaption = _T("Preview area...");
pDC->DrawText(strCaption, rectText, DT_END_ELLIPSIS | DT_SINGLELINE | DT_VCENTER);
pDC->SetTextColor(afxGlobalData.clrWindowText);
rectText = rectMessage;
rectText.DeflateRect(nHorzMargin, nVertMargin);
CString strText = _T("Message body");
pDC->DrawText(strText, rectText, DT_WORDBREAK | DT_END_ELLIPSIS);
pDC->SelectObject(pOldFont);
pDC->SetTextColor(clrOldText);
pDC->SetBkMode(nOldBkMode);
}
/////////////////////////////////////////////////////////////////////////////
// CPreviewPane diagnostics
#ifdef _DEBUG
void CPreviewPane::AssertValid() const
{
CView::AssertValid();
}
void CPreviewPane::Dump(CDumpContext& dc) const
{
CView::Dump(dc);
}
#endif //_DEBUG
/////////////////////////////////////////////////////////////////////////////
// CPreviewPane message handlers
BOOL CPreviewPane::OnEraseBkgnd(CDC* /*pDC*/)
{
return TRUE;
}
| 863 |
531 | /*
* Copyright (c) 2015-2021, www.dibo.ltd (<EMAIL>).
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
* <p>
* https://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package diboot.core.test.binder.service.impl;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import diboot.core.test.binder.entity.TestUploadFile;
import diboot.core.test.binder.mapper.TestUploadFileMapper;
import diboot.core.test.binder.service.TestUploadFileService;
import org.springframework.stereotype.Service;
/**
* TestUploadFile相关Service
* @author <EMAIL>
* @version 2021/08/27
*/
@Service
public class TestUploadFileServiceImpl extends ServiceImpl<TestUploadFileMapper, TestUploadFile> implements TestUploadFileService {
}
| 356 |
1,362 | package org.mitre.synthea.engine;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.mitre.synthea.engine.ExpressedSymptom.SymptomInfo;
import org.mitre.synthea.engine.ExpressedSymptom.SymptomSource;
import org.mitre.synthea.world.agents.Person;
public class ExpressedConditionRecord implements Cloneable, Serializable {
private static final long serialVersionUID = 4322116644425686900L;
// this class contains basic info regarding an expressed conditions.
// such as the onset time and end time
public class ConditionPeriod implements Cloneable, Serializable {
private static final long serialVersionUID = 4322116644425686901L;
private Long onsetTime;
private Long endTime;
public ConditionPeriod(Long onsetTime) {
this.onsetTime = onsetTime;
this.endTime = null;
}
public ConditionPeriod(Long onsetTime, Long endTime) {
this.onsetTime = onsetTime;
this.endTime = endTime;
}
public ConditionPeriod clone() {
return new ConditionPeriod(this.onsetTime, this.endTime);
}
public Long getEndTime() {
return endTime;
}
public void setEndTime(Long endTime) {
this.endTime = endTime;
}
public Long getOnsetTime() {
return onsetTime;
}
}
/**
* A condition with a set of onset and end time entries.
*/
public class OnsetCondition implements Cloneable, Serializable {
private static final long serialVersionUID = 4322116644425686902L;
// name of the condition
private String name;
private List<ConditionPeriod> timeInfos;
public OnsetCondition(String name) {
this.name = name;
timeInfos = new LinkedList<ConditionPeriod>();
}
/**
* Create a shallow copy of this object.
*/
public OnsetCondition clone() {
OnsetCondition data = new OnsetCondition(this.name);
data.timeInfos.addAll(this.timeInfos);
return data;
}
public String getName() {
return name;
}
public List<ConditionPeriod> getTimeInfos() {
return timeInfos;
}
/**
* Get the last recorded onset time.
*/
public Long getLastOnsetTime() {
if (timeInfos.isEmpty()) {
return null;
} else {
int size = timeInfos.size();
return timeInfos.get(size - 1).getOnsetTime();
}
}
/**
* Get the last recorded end time.
*/
public Long getLastEndTime() {
if (timeInfos.isEmpty()) {
return null;
} else {
int size = timeInfos.size();
return timeInfos.get(size - 1).getEndTime();
}
}
public void addNewEntry(long onsetTime) {
ConditionPeriod entry = new ConditionPeriod(Long.valueOf(onsetTime), null);
timeInfos.add(entry);
}
/**
* Set the end time the last entry.
*/
public void endLastEntry(long time) {
int size = timeInfos.size();
if (size > 0) {
timeInfos.get(size - 1).setEndTime(Long.valueOf(time));
}
}
}
/**
* Used to record condition onset by modules.
*/
public class ModuleConditions implements Cloneable, Serializable {
private static final long serialVersionUID = 4322116644425686903L;
// source from which the conditions are onset
private String source;
/** Data structure for storing onset conditions (init_time, end_time).*/
private Map<String, OnsetCondition> onsetConditions;
/** Data structure for storing mapping from state to condition names
* This is useful when facing ConditionEnd.conditionOnSet attribute*/
private Map<String, String> state2conditionMapping;
/**
* Create new instance for the specified module name.
*/
public ModuleConditions(String source) {
this.source = source;
onsetConditions = new ConcurrentHashMap<String, OnsetCondition>();
state2conditionMapping = new ConcurrentHashMap<String, String>();
}
/**
* Create a shallow copy of this instance.
*/
public ModuleConditions clone() {
ModuleConditions data = new ModuleConditions(this.source);
data.state2conditionMapping.putAll(this.state2conditionMapping);
data.onsetConditions.putAll(this.onsetConditions);
return data;
}
/**
* Record the onset of a condition.
*/
public void onsetCondition(String condition, String state, long time) {
if (!onsetConditions.containsKey(condition)) {
onsetConditions.put(condition, new OnsetCondition(condition));
}
OnsetCondition onsetCondition = onsetConditions.get(condition);
onsetCondition.addNewEntry(time);
state2conditionMapping.put(state, condition);
}
/**
* Record the end of a condition.
*/
public void endCondition(String condition, long time) {
if (onsetConditions.containsKey(condition)) {
onsetConditions.get(condition).endLastEntry(time);
}
}
/**
* Get the last recorded onset time.
*/
public Long getConditionLastOnsetTime(String condition) {
if (onsetConditions.containsKey(condition)) {
return onsetConditions.get(condition).getLastOnsetTime();
}
return null;
}
/**
* Get the last recorded end time.
*/
public Long getConditionLastEndTime(String condition) {
if (onsetConditions.containsKey(condition)) {
return onsetConditions.get(condition).getLastEndTime();
}
return null;
}
/**
* Get the condition for the supplied state.
*/
public String getConditionFromState(String state) {
if (state2conditionMapping.containsKey(state)) {
return state2conditionMapping.get(state);
}
return null;
}
/**
* Get the recorded conditions and onset/end information.
* @return a map of condition name to onset/end records.
*/
public Map<String, OnsetCondition> getOnsetConditions() {
return onsetConditions;
}
}
// this class represents a condition with its associated symptoms
public class ConditionWithSymptoms implements Cloneable, Serializable {
private static final long serialVersionUID = 4322116644425686904L;
private String conditionName;
private Long onsetTime;
private Long endTime;
// Data structure for storing symptoms and associated values during the condition
private Map<String, List<Integer>> symptoms;
/**
* Create a new instance for the supplied condition name, onset and end times.
*/
public ConditionWithSymptoms(String name, Long onsetTime, Long endTime) {
this.conditionName = name;
this.onsetTime = onsetTime;
this.endTime = endTime;
this.symptoms = new ConcurrentHashMap<String, List<Integer>>();
}
/**
* Create a shallow copy of this instance.
*/
public ConditionWithSymptoms clone() {
ConditionWithSymptoms data = new ConditionWithSymptoms(conditionName, onsetTime, endTime);
data.symptoms.putAll(this.symptoms);
return data;
}
/**
* Record a symptom for the supplied module.
* @param name symptom name.
* @param symptomSource module origin of the symptom.
*/
public void addSymptoms(String name, SymptomSource symptomSource) {
Map<Long, SymptomInfo> timedTypedSymptoms = symptomSource.getTimeInfos();
// get the value that correspond to the all times belonging
// to the interval [begin, end] of the condition if any.
List<Long> allTimes = new ArrayList<Long>();
for (Long time : timedTypedSymptoms.keySet()) {
boolean greatThanBegin = time >= onsetTime;
boolean lowThanEnd = (endTime != null && time <= endTime) || (endTime == null);
if (greatThanBegin && lowThanEnd) {
allTimes.add(time);
}
}
if (allTimes.size() > 0) {
Collections.sort(allTimes);
if (!symptoms.containsKey(name)) {
symptoms.put(name, new ArrayList<Integer>());
}
for (Long time : allTimes) {
Integer value = timedTypedSymptoms.get(time).getValue();
symptoms.get(name).add(value);
}
}
}
public Long getOnsetTime() {
return onsetTime;
}
public Long getEndTime() {
return endTime;
}
public String getConditionName() {
return conditionName;
}
public Map<String, List<Integer>> getSymptoms() {
return symptoms;
}
}
// a map: module.name -> Conditions
private Map<String, ModuleConditions> sources;
Person person;
public ExpressedConditionRecord(Person person) {
this.person = person;
sources = new ConcurrentHashMap<String, ModuleConditions>();
}
/**
* Create a shallow clone of this instance.
*/
public ExpressedConditionRecord clone() {
ExpressedConditionRecord data = new ExpressedConditionRecord(this.person);
data.sources.putAll(this.sources);
return data;
}
public Map<String, ModuleConditions> getSources() {
return sources;
}
/**
* Method that is used to update the onsetConditions field when
* a ConditionOnset state is processed.
*/
public void onConditionOnset(String module, String state, String condition, long time) {
if (!sources.containsKey(module)) {
sources.put(module, new ModuleConditions(module));
}
ModuleConditions moduleConditions = sources.get(module);
moduleConditions.onsetCondition(condition, state, time);
}
/**
* Method that is used to retrieve the last time a condition
* has been onset from a given module.
*/
public Long getConditionLastOnsetTimeFromModule(String module, String condition) {
Long result = null;
if (sources.containsKey(module)) {
ModuleConditions moduleConditions = sources.get(module);
result = moduleConditions.getConditionLastOnsetTime(condition);
}
return result;
}
/**
* Method that is used to retrieve the last time a ConditionEnd state
* has been processed for a given condition from a given module.
*/
public Long getConditionLastEndTimeFromModule(String module, String condition) {
Long result = null;
if (sources.containsKey(module)) {
ModuleConditions moduleConditions = sources.get(module);
result = moduleConditions.getConditionLastEndTime(condition);
}
return result;
}
/**
* Method for retrieving the condition name from a state name.
* Useful when dealing with ConditionEnd.conditionOnSet attribute.
*/
public String getConditionFromState(String module, String state) {
String result = null;
boolean isModulePresent = sources.containsKey(module);
if (isModulePresent) {
result = sources.get(module).getConditionFromState(state);
}
return result;
}
/**
* Method that is used to update the onsetConditions field when
* a ConditionEnd state is processed.
*/
public void onConditionEnd(String module, String condition, long time) {
boolean isModulePresent = sources.containsKey(module);
if (isModulePresent) {
sources.get(module).endCondition(condition, time);
}
}
/**
* Get the symptoms that were expressed as parts of
* the conditions the person suffers from.
* The returned data is a map of [time: List of ConditionWithSymtoms].
* It captures the conditions a person has suffered from together
* with the related symptoms at different age/time.
*/
public Map<Long, List<ConditionWithSymptoms>> getConditionSymptoms() {
Map<String, ExpressedSymptom> symptoms = person.getExpressedSymptoms();
Map<Long, List<ConditionWithSymptoms>> result;
result = new ConcurrentHashMap<Long, List<ConditionWithSymptoms>>();
for (String module : sources.keySet()) {
ModuleConditions moduleConditions = sources.get(module);
for (String condition : moduleConditions.getOnsetConditions().keySet()) {
List<ConditionPeriod> infos = moduleConditions.getOnsetConditions().get(
condition).getTimeInfos();
for (ConditionPeriod entry : infos) {
Long begin = entry.getOnsetTime();
Long end = entry.getEndTime();
if (!result.containsKey(begin)) {
result.put(begin, new LinkedList<ConditionWithSymptoms>());
}
ConditionWithSymptoms conditionWithSymptoms = new ConditionWithSymptoms(
condition, begin, end
);
for (String type : symptoms.keySet()) {
ExpressedSymptom expressedSymptom = symptoms.get(type);
if (expressedSymptom.getSources().containsKey(module)) {
SymptomSource symptomSource = expressedSymptom.getSources().get(module);
conditionWithSymptoms.addSymptoms(type, symptomSource);
}
}
result.get(begin).add(conditionWithSymptoms);
}
}
}
return result;
}
}
| 4,608 |
703 | #include <ToolsFoundation/ToolsFoundationPCH.h>
#include <ToolsFoundation/Object/DocumentObjectManager.h>
#include <ToolsFoundation/Object/DocumentObjectVisitor.h>
ezDocumentObjectVisitor::ezDocumentObjectVisitor(
const ezDocumentObjectManager* pManager, const char* szChildrenProperty /*= "Children"*/, const char* szRootProperty /*= "Children"*/)
: m_pManager(pManager)
, m_sChildrenProperty(szChildrenProperty)
, m_sRootProperty(szRootProperty)
{
const ezAbstractProperty* pRootProp = m_pManager->GetRootObject()->GetType()->FindPropertyByName(szRootProperty);
EZ_ASSERT_DEV(pRootProp, "Given root property '{0}' does not exist on root object", szRootProperty);
EZ_ASSERT_DEV(pRootProp->GetCategory() == ezPropertyCategory::Set || pRootProp->GetCategory() == ezPropertyCategory::Array,
"Traverser only works on arrays and sets.");
// const ezAbstractProperty* pChildProp = pRootProp->GetSpecificType()->FindPropertyByName(szChildrenProperty);
// EZ_ASSERT_DEV(pChildProp, "Given child property '{0}' does not exist", szChildrenProperty);
// EZ_ASSERT_DEV(pChildProp->GetCategory() == ezPropertyCategory::Set || pRootProp->GetCategory() == ezPropertyCategory::Array, "Traverser
// only works on arrays and sets.");
}
void ezDocumentObjectVisitor::Visit(const ezDocumentObject* pObject, bool bVisitStart, VisitorFunction function)
{
const char* szProperty = m_sChildrenProperty;
if (pObject == nullptr || pObject == m_pManager->GetRootObject())
{
pObject = m_pManager->GetRootObject();
szProperty = m_sRootProperty;
}
if (!bVisitStart || function(pObject))
{
TraverseChildren(pObject, szProperty, function);
}
}
void ezDocumentObjectVisitor::TraverseChildren(const ezDocumentObject* pObject, const char* szProperty, VisitorFunction& function)
{
const ezInt32 iChildren = pObject->GetTypeAccessor().GetCount(szProperty);
for (ezInt32 i = 0; i < iChildren; i++)
{
ezVariant obj = pObject->GetTypeAccessor().GetValue(szProperty, i);
EZ_ASSERT_DEBUG(obj.IsValid() && obj.IsA<ezUuid>(), "null obj found during traversal.");
const ezDocumentObject* pChild = m_pManager->GetObject(obj.Get<ezUuid>());
if (function(pChild))
{
TraverseChildren(pChild, m_sChildrenProperty, function);
}
}
}
| 764 |
402 | <reponame>mortensen/flow
package com.vaadin.flow.uitest.ui;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.junit.Assert;
import org.junit.Test;
import org.openqa.selenium.JavascriptExecutor;
import com.vaadin.flow.component.html.testbench.AnchorElement;
import com.vaadin.flow.component.html.testbench.DivElement;
import com.vaadin.flow.component.html.testbench.NativeButtonElement;
import com.vaadin.flow.testutil.ChromeBrowserTest;
import com.vaadin.testbench.TestBenchElement;
public class InertComponentIT extends ChromeBrowserTest {
@Test
public void modalComponentAdded_inertButtonClicked_noNewComponentAdded() {
open();
final long initialBoxCount = getBoxCount();
Optional<NativeButtonElement> newModalBoxButton = getNewModalBoxButton();
newModalBoxButton.ifPresent(NativeButtonElement::click);
validateBoxCount(initialBoxCount + 1, "Expected a new modal box.");
newModalBoxButton.ifPresent(NativeButtonElement::click);
validateBoxCount(initialBoxCount + 1,
"Expected no new boxes as the button is now inert.");
List<NativeButtonElement> removeButtons = getAll(
NativeButtonElement.class, InertComponentView.REMOVE)
.collect(Collectors.toList());
removeButtons.get(removeButtons.size() - 1).click();
validateBoxCount(initialBoxCount,
"Expected the modal box was removed.");
newModalBoxButton.ifPresent(NativeButtonElement::click);
validateBoxCount(initialBoxCount + 1,
"Expected a new modal box when button no longer inert.");
}
@Test
public void modalComponentAdded_removedFromDom_othersStillInert() {
open();
final long initialBoxCount = getBoxCount();
Optional<NativeButtonElement> newModalBoxButton = getNewModalBoxButton();
newModalBoxButton.ifPresent(NativeButtonElement::click);
validateBoxCount(initialBoxCount + 1, "Expected a new modal box.");
// Remove the modal box from DOM
((JavascriptExecutor) getDriver())
.executeScript("document.body.removeChild("
+ "((v = document.querySelectorAll('[id^=\""
+ InertComponentView.BOX
+ "-\"]')) => v[v.length - 1])());");
validateBoxCount(initialBoxCount,
"Expected the modal box was removed from DOM.");
newModalBoxButton.ifPresent(NativeButtonElement::click);
validateBoxCount(initialBoxCount,
"Expected no new box as UI still inert.");
}
@Test
public void modalComponentAdded_routerLinkClicked_noNavigation() {
open();
final long initialBoxCount = getBoxCount();
Optional<AnchorElement> linkToAnotherPage = getAll(AnchorElement.class,
InertComponentView.LINK).findFirst();
Assert.assertTrue(linkToAnotherPage.isPresent());
getNewModalBoxButton().ifPresent(NativeButtonElement::click);
validateBoxCount(initialBoxCount + 1, "Expected a new modal box.");
linkToAnotherPage.get().click();
validateBoxCount(initialBoxCount + 1,
"Expected to stay on the same page.");
}
private Optional<NativeButtonElement> getNewModalBoxButton() {
return getAll(NativeButtonElement.class,
InertComponentView.NEW_MODAL_BOX).findFirst();
}
private long getBoxCount() {
return getAll(DivElement.class, InertComponentView.BOX).count();
}
private <T extends TestBenchElement> Stream<T> getAll(Class<T> elementClass,
String idPrefix) {
return $(elementClass).all().stream()
.filter(e -> e.getAttribute("id").startsWith(idPrefix));
}
private void validateBoxCount(long initialBoxCount, String message) {
Assert.assertEquals(message, initialBoxCount, getBoxCount());
}
}
| 1,584 |
14,668 | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/omnibox/browser/history_index_restore_observer.h"
HistoryIndexRestoreObserver::HistoryIndexRestoreObserver(base::OnceClosure task)
: task_(std::move(task)), succeeded_(false) {}
HistoryIndexRestoreObserver::~HistoryIndexRestoreObserver() {}
void HistoryIndexRestoreObserver::OnCacheRestoreFinished(bool success) {
succeeded_ = success;
std::move(task_).Run();
}
| 170 |
679 | /**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
// MARKER(update_precomp.py): autogen include statement, do not remove
#include "precompiled_filter.hxx"
#include <com/sun/star/task/XStatusIndicator.hpp>
#include <unotools/ucbstreamhelper.hxx>
#define CGM_BREAK_ACTION 0xffffffff
#include <osl/endian.h>
#include <vcl/virdev.hxx>
#include <vcl/graph.hxx>
#include <tools/stream.hxx>
#include <chart.hxx>
#include <main.hxx>
#include <elements.hxx>
#include <outact.hxx>
using namespace ::com::sun::star;
// ---------------------------------------------------------------
void CGM::ImplCGMInit()
{
mbIsFinished = mbPicture = mbMetaFile = mbPictureBody = sal_False;
mnActCount = 0;
mnOutdx = 28000;
mnOutdy = 21000;
mpBuf = NULL;
mpChart = NULL;
mpBitmapInUse = NULL;
pCopyOfE = new CGMElements( *this );
pElement = new CGMElements( *this );
}
// ---------------------------------------------------------------
#ifdef CGM_EXPORT_IMPRESS
CGM::CGM( sal_uInt32 nMode, uno::Reference< frame::XModel > & rModel ) :
mpGraphic ( NULL ),
mpCommentOut ( NULL ),
mbStatus ( sal_True ),
mpOutAct ( new CGMImpressOutAct( *this, rModel ) ),
mnMode ( nMode )
{
mnMode |= CGM_EXPORT_IMPRESS;
ImplCGMInit();
}
#endif
// ---------------------------------------------------------------
void CGM::ImplComment( sal_uInt32 Level, const char* Description )
{
if ( mpCommentOut )
{
if ( Level == CGM_DESCRIPTION )
{
*mpCommentOut << " " << Description << "\n";
}
else
{
sal_Int8 nFirst, nSecond, i, nCount = 0;
if ( mnActCount < 10000 )
nCount++;
if ( mnActCount < 1000 )
nCount++;
if ( mnActCount < 100 )
nCount++;
if ( mnActCount < 10 )
nCount++;
for ( i = 0; i <= nCount; i++ )
*mpCommentOut << " ";
mpCommentOut->WriteNumber( mnActCount );
switch( Level & 0xff )
{
case CGM_UNKNOWN_LEVEL :
*mpCommentOut << " L?";
break;
case CGM_UNKNOWN_COMMAND :
*mpCommentOut << " UNKNOWN COMMAND";
break;
case CGM_GDSF_ONLY :
*mpCommentOut << " LI";
break;
default:
*mpCommentOut << " L";
mpCommentOut->WriteNumber( Level & 0xff );
break;
}
*mpCommentOut << " C";
mpCommentOut->WriteNumber( mnElementClass );
*mpCommentOut << " ID-0x";
nFirst = ( mnElementID > 0x9F ) ? (sal_Int8)( mnElementID >> 4 ) + 'A' - 10: (sal_Int8)( mnElementID >> 4 ) + '0';
nSecond = ( ( mnElementID & 15 ) > 9 ) ? (sal_Int8)( mnElementID & 15 ) + 'A' - 10 : (sal_Int8)( mnElementID & 15 ) + '0';
*mpCommentOut << nFirst << nSecond;
*mpCommentOut << " Size";
nCount = 1;
if ( mnElementSize < 1000000 )
nCount++;
if ( mnElementSize < 100000 )
nCount++;
if ( mnElementSize < 10000 )
nCount++;
if ( mnElementSize < 1000 )
nCount++;
if ( mnElementSize < 100 )
nCount++;
if ( mnElementSize < 10 )
nCount++;
for ( i = 0; i < nCount; i++ )
*mpCommentOut << " ";
mpCommentOut->WriteNumber( mnElementSize );
*mpCommentOut << " " << Description << "\n";
}
}
}
// ---------------------------------------------------------------
CGM::~CGM()
{
#ifdef CGM_EXPORT_META
if ( mpGraphic )
{
mpGDIMetaFile->Stop();
mpGDIMetaFile->SetPrefMapMode( MapMode() );
mpGDIMetaFile->SetPrefSize( Size( static_cast< long >( mnOutdx ), static_cast< long >( mnOutdy ) ) );
delete mpVirDev;
*mpGraphic = Graphic( *mpGDIMetaFile );
}
#endif
sal_Int8* pBuf = (sal_Int8*)maDefRepList.First();
while( pBuf )
{
delete pBuf;
pBuf = (sal_Int8*)maDefRepList.Next();
}
maDefRepList.Clear();
delete mpBitmapInUse;
delete mpCommentOut;
delete mpChart;
delete mpOutAct;
delete pCopyOfE;
delete pElement;
delete [] mpBuf;
};
// ---------------------------------------------------------------
sal_uInt32 CGM::GetBackGroundColor()
{
return ( pElement ) ? pElement->aColorTable[ 0 ] : 0;
}
// ---------------------------------------------------------------
sal_uInt32 CGM::ImplGetUI16( sal_uInt32 /*nAlign*/ )
{
sal_uInt8* pSource = mpSource + mnParaSize;
mnParaSize += 2;
return ( pSource[ 0 ] << 8 ) + pSource[ 1 ];
};
// ---------------------------------------------------------------
sal_uInt8 CGM::ImplGetByte( sal_uInt32 nSource, sal_uInt32 nPrecision )
{
return (sal_uInt8)( nSource >> ( ( nPrecision - 1 ) << 3 ) );
};
// ---------------------------------------------------------------
long CGM::ImplGetI( sal_uInt32 nPrecision )
{
sal_uInt8* pSource = mpSource + mnParaSize;
mnParaSize += nPrecision;
switch( nPrecision )
{
case 1 :
{
return (char)*pSource;
}
case 2 :
{
return (sal_Int16)( ( pSource[ 0 ] << 8 ) | pSource[ 1 ] );
}
case 3 :
{
return ( ( pSource[ 0 ] << 24 ) | ( pSource[ 1 ] << 16 ) | pSource[ 2 ] << 8 ) >> 8;
}
case 4:
{
return (sal_Int32)( ( pSource[ 0 ] << 24 ) | ( pSource[ 1 ] << 16 ) | ( pSource[ 2 ] << 8 ) | ( pSource[ 3 ] ) );
}
default:
mbStatus = sal_False;
return 0;
}
}
// ---------------------------------------------------------------
sal_uInt32 CGM::ImplGetUI( sal_uInt32 nPrecision )
{
sal_uInt8* pSource = mpSource + mnParaSize;
mnParaSize += nPrecision;
switch( nPrecision )
{
case 1 :
return (sal_Int8)*pSource;
case 2 :
{
return (sal_uInt16)( ( pSource[ 0 ] << 8 ) | pSource[ 1 ] );
}
case 3 :
{
return ( pSource[ 0 ] << 16 ) | ( pSource[ 1 ] << 8 ) | pSource[ 2 ];
}
case 4:
{
return (sal_uInt32)( ( pSource[ 0 ] << 24 ) | ( pSource[ 1 ] << 16 ) | ( pSource[ 2 ] << 8 ) | ( pSource[ 3 ] ) );
}
default:
mbStatus = sal_False;
return 0;
}
}
// ---------------------------------------------------------------
void CGM::ImplGetSwitch4( sal_uInt8* pSource, sal_uInt8* pDest )
{
for ( int i = 0; i < 4; i++ )
{
pDest[ i ] = pSource[ i ^ 3 ]; // Little Endian <-> Big Endian switch
}
}
// ---------------------------------------------------------------
void CGM::ImplGetSwitch8( sal_uInt8* pSource, sal_uInt8* pDest )
{
for ( int i = 0; i < 8; i++ )
{
pDest[ i ] = pSource[ i ^ 7 ]; // Little Endian <-> Big Endian switch
}
}
// ---------------------------------------------------------------
double CGM::ImplGetFloat( RealPrecision eRealPrecision, sal_uInt32 nRealSize )
{
void* pPtr;
sal_uInt8 aBuf[8];
sal_Bool bCompatible;
double nRetValue;
double fDoubleBuf;
float fFloatBuf;
#ifdef OSL_BIGENDIAN
bCompatible = sal_True;
#else
bCompatible = sal_False;
#endif
if ( bCompatible )
pPtr = mpSource + mnParaSize;
else
{
if ( nRealSize == 4 )
ImplGetSwitch4( mpSource + mnParaSize, &aBuf[0] );
else
ImplGetSwitch8( mpSource + mnParaSize, &aBuf[0] );
pPtr = &aBuf;
}
if ( eRealPrecision == RP_FLOAT )
{
if ( nRealSize == 4 )
{
memcpy( (void*)&fFloatBuf, pPtr, 4 );
nRetValue = (double)fFloatBuf;
}
else
{
memcpy( (void*)&fDoubleBuf, pPtr, 8 );
nRetValue = fDoubleBuf;
}
}
else // ->RP_FIXED
{
long nVal;
int nSwitch = ( bCompatible ) ? 0 : 1 ;
if ( nRealSize == 4 )
{
sal_uInt16* pShort = (sal_uInt16*)pPtr;
nVal = pShort[ nSwitch ];
nVal <<= 16;
nVal |= pShort[ nSwitch ^ 1 ];
nRetValue = (double)nVal;
nRetValue /= 65536;
}
else
{
long* pLong = (long*)pPtr;
nRetValue = (double)abs( pLong[ nSwitch ] );
nRetValue *= 65536;
nVal = (sal_uInt32)( pLong[ nSwitch ^ 1 ] );
nVal >>= 16;
nRetValue += (double)nVal;
if ( pLong[ nSwitch ] < 0 )
{
nRetValue -= nRetValue;
}
nRetValue /= 65536;
}
}
mnParaSize += nRealSize;
return nRetValue;
}
// ---------------------------------------------------------------
sal_uInt32 CGM::ImplGetPointSize()
{
if ( pElement->eVDCType == VDC_INTEGER )
return pElement->nVDCIntegerPrecision << 1;
else
return pElement->nVDCRealSize << 1;
}
// ---------------------------------------------------------------
inline double CGM::ImplGetIX()
{
return ( ( ImplGetI( pElement->nVDCIntegerPrecision ) + mnVDCXadd ) * mnVDCXmul );
}
// ---------------------------------------------------------------
inline double CGM::ImplGetFX()
{
return ( ( ImplGetFloat( pElement->eVDCRealPrecision, pElement->nVDCRealSize ) + mnVDCXadd ) * mnVDCXmul );
}
// ---------------------------------------------------------------
inline double CGM::ImplGetIY()
{
return ( ( ImplGetI( pElement->nVDCIntegerPrecision ) + mnVDCYadd ) * mnVDCYmul );
}
// ---------------------------------------------------------------
inline double CGM::ImplGetFY()
{
return ( ( ImplGetFloat( pElement->eVDCRealPrecision, pElement->nVDCRealSize ) + mnVDCYadd ) * mnVDCYmul );
}
// ---------------------------------------------------------------
void CGM::ImplGetPoint( FloatPoint& rFloatPoint, sal_Bool bMap )
{
if ( pElement->eVDCType == VDC_INTEGER )
{
rFloatPoint.X = ImplGetIX();
rFloatPoint.Y = ImplGetIY();
}
else // ->floating points
{
rFloatPoint.X = ImplGetFX();
rFloatPoint.Y = ImplGetFY();
}
if ( bMap )
ImplMapPoint( rFloatPoint );
}
// ---------------------------------------------------------------
void CGM::ImplGetRectangle( FloatRect& rFloatRect, sal_Bool bMap )
{
if ( pElement->eVDCType == VDC_INTEGER )
{
rFloatRect.Left = ImplGetIX();
rFloatRect.Bottom = ImplGetIY();
rFloatRect.Right = ImplGetIX();
rFloatRect.Top = ImplGetIY();
}
else // ->floating points
{
rFloatRect.Left = ImplGetFX();
rFloatRect.Bottom = ImplGetFY();
rFloatRect.Right = ImplGetFX();
rFloatRect.Top = ImplGetFY();
}
if ( bMap )
{
ImplMapX( rFloatRect.Left );
ImplMapX( rFloatRect.Right );
ImplMapY( rFloatRect.Top );
ImplMapY( rFloatRect.Bottom );
rFloatRect.Justify();
}
}
// ---------------------------------------------------------------
void CGM::ImplGetRectangleNS( FloatRect& rFloatRect )
{
if ( pElement->eVDCType == VDC_INTEGER )
{
rFloatRect.Left = ImplGetI( pElement->nVDCIntegerPrecision );
rFloatRect.Bottom = ImplGetI( pElement->nVDCIntegerPrecision );
rFloatRect.Right = ImplGetI( pElement->nVDCIntegerPrecision );
rFloatRect.Top = ImplGetI( pElement->nVDCIntegerPrecision );
}
else // ->floating points
{
rFloatRect.Left = ImplGetFloat( pElement->eVDCRealPrecision, pElement->nVDCRealSize );
rFloatRect.Bottom = ImplGetFloat( pElement->eVDCRealPrecision, pElement->nVDCRealSize );
rFloatRect.Right = ImplGetFloat( pElement->eVDCRealPrecision, pElement->nVDCRealSize );
rFloatRect.Top = ImplGetFloat( pElement->eVDCRealPrecision, pElement->nVDCRealSize );
}
}
// ---------------------------------------------------------------
sal_uInt32 CGM::ImplGetBitmapColor( sal_Bool bDirect )
{
// the background color is always a direct color
sal_uInt32 nTmp;
if ( ( pElement->eColorSelectionMode == CSM_DIRECT ) || bDirect )
{
sal_uInt32 nColor = ImplGetByte( ImplGetUI( pElement->nColorPrecision ), 1 );
sal_uInt32 nDiff = pElement->nColorValueExtent[ 3 ] - pElement->nColorValueExtent[ 0 ] + 1;
if ( !nDiff )
nDiff++;
nColor = ( ( nColor - pElement->nColorValueExtent[ 0 ] ) << 8 ) / nDiff;
nTmp = nColor << 16 & 0xff0000;
nColor = ImplGetByte( ImplGetUI( pElement->nColorPrecision ), 1 );
nDiff = pElement->nColorValueExtent[ 4 ] - pElement->nColorValueExtent[ 1 ] + 1;
if ( !nDiff )
nDiff++;
nColor = ( ( nColor - pElement->nColorValueExtent[ 1 ] ) << 8 ) / nDiff;
nTmp |= nColor << 8 & 0xff00;
nColor = ImplGetByte( ImplGetUI( pElement->nColorPrecision ), 1 );
nDiff = pElement->nColorValueExtent[ 5 ] - pElement->nColorValueExtent[ 2 ] + 1;
if ( !nDiff )
nDiff++;
nColor = ( ( nColor - pElement->nColorValueExtent[ 2 ] ) << 8 ) / nDiff;
nTmp |= (sal_uInt8)nColor;
}
else
{
sal_uInt32 nIndex = ImplGetUI( pElement->nColorIndexPrecision );
nTmp = pElement->aColorTable[ (sal_uInt8)( nIndex ) ] ;
}
return nTmp;
}
// ---------------------------------------------------------------
// call this function each time after the mapmode settings has been changed
void CGM::ImplSetMapMode()
{
int nAngReverse = 1;
mnVDCdx = pElement->aVDCExtent.Right - pElement->aVDCExtent.Left;
mnVDCXadd = -pElement->aVDCExtent.Left;
mnVDCXmul = 1;
if ( mnVDCdx < 0 )
{
nAngReverse ^= 1;
mnVDCdx = -mnVDCdx;
mnVDCXmul = -1;
}
mnVDCdy = pElement->aVDCExtent.Bottom - pElement->aVDCExtent.Top;
mnVDCYadd = -pElement->aVDCExtent.Top;
mnVDCYmul = 1;
if ( mnVDCdy < 0 )
{
nAngReverse ^= 1;
mnVDCdy = -mnVDCdy;
mnVDCYmul = -1;
}
if ( nAngReverse )
mbAngReverse = sal_True;
else
mbAngReverse = sal_False;
double fQuo1 = mnVDCdx / mnVDCdy;
double fQuo2 = mnOutdx / mnOutdy;
if ( fQuo2 < fQuo1 )
{
mnXFraction = mnOutdx / mnVDCdx;
mnYFraction = mnOutdy * ( fQuo2 / fQuo1 ) / mnVDCdy;
}
else
{
mnXFraction = mnOutdx * ( fQuo1 / fQuo2 ) / mnVDCdx;
mnYFraction = mnOutdy / mnVDCdy;
}
}
// ---------------------------------------------------------------
void CGM::ImplMapDouble( double& nNumb )
{
if ( pElement->eDeviceViewPortMap == DVPM_FORCED )
{
// point is 1mm * ScalingFactor
switch ( pElement->eDeviceViewPortMode )
{
case DVPM_FRACTION :
{
nNumb *= ( mnXFraction + mnYFraction ) / 2;
}
break;
case DVPM_METRIC :
{
// nNumb *= ( 100 * pElement->nDeviceViewPortScale );
nNumb *= ( mnXFraction + mnYFraction ) / 2;
if ( pElement->nDeviceViewPortScale < 0 )
nNumb = -nNumb;
}
break;
case DVPM_DEVICE :
{
}
break;
default:
break;
}
}
else
{
}
}
// ---------------------------------------------------------------
void CGM::ImplMapX( double& nNumb )
{
if ( pElement->eDeviceViewPortMap == DVPM_FORCED )
{
// point is 1mm * ScalingFactor
switch ( pElement->eDeviceViewPortMode )
{
case DVPM_FRACTION :
{
nNumb *= mnXFraction;
}
break;
case DVPM_METRIC :
{
// nNumb *= ( 100 * pElement->nDeviceViewPortScale );
nNumb *= mnXFraction;
if ( pElement->nDeviceViewPortScale < 0 )
nNumb = -nNumb;
}
break;
case DVPM_DEVICE :
{
}
break;
default:
break;
}
}
else
{
}
}
// ---------------------------------------------------------------
void CGM::ImplMapY( double& nNumb )
{
if ( pElement->eDeviceViewPortMap == DVPM_FORCED )
{
// point is 1mm * ScalingFactor
switch ( pElement->eDeviceViewPortMode )
{
case DVPM_FRACTION :
{
nNumb *= mnYFraction;
}
break;
case DVPM_METRIC :
{
// nNumb *= ( 100 * pElement->nDeviceViewPortScale );
nNumb *= mnYFraction;
if ( pElement->nDeviceViewPortScale < 0 )
nNumb = -nNumb;
}
break;
case DVPM_DEVICE :
{
}
break;
default:
break;
}
}
else
{
}
}
// ---------------------------------------------------------------
// convert a point to the current VC mapmode (1/100TH mm)
void CGM::ImplMapPoint( FloatPoint& rFloatPoint )
{
if ( pElement->eDeviceViewPortMap == DVPM_FORCED )
{
// point is 1mm * ScalingFactor
switch ( pElement->eDeviceViewPortMode )
{
case DVPM_FRACTION :
{
rFloatPoint.X *= mnXFraction;
rFloatPoint.Y *= mnYFraction;
}
break;
case DVPM_METRIC :
{
rFloatPoint.X *= mnXFraction;
rFloatPoint.Y *= mnYFraction;
if ( pElement->nDeviceViewPortScale < 0 )
{
rFloatPoint.X = -rFloatPoint.X;
rFloatPoint.Y = -rFloatPoint.Y;
}
}
break;
case DVPM_DEVICE :
{
}
break;
default:
break;
}
}
else
{
}
}
// ---------------------------------------------------------------
void CGM::ImplDoClass()
{
#ifdef CGM_USER_BREAKPOINT
#ifdef WNT
if ( mnActCount == CGM_BREAK_ACTION )
_asm int 0x3;
#endif
#endif
switch ( mnElementClass )
{
case 0 : ImplDoClass0(); break;
case 1 : ImplDoClass1(); break;
case 2 : ImplDoClass2(); break;
case 3 : ImplDoClass3(); break;
case 4 :
{
ImplDoClass4();
mnAct4PostReset = 0;
}
break;
case 5 : ImplDoClass5(); break;
case 6 : ImplDoClass6(); break;
case 7 : ImplDoClass7(); break;
case 8 : ImplDoClass8(); break;
case 9 : ImplDoClass9(); break;
case 15 :ImplDoClass15(); break;
default : ComOut( CGM_UNKNOWN_COMMAND, "" ); break;
}
mnActCount++;
};
// ---------------------------------------------------------------
void CGM::ImplDefaultReplacement()
{
sal_uInt8* pBuf = (sal_uInt8*)maDefRepList.First();
if ( pBuf )
{
sal_uInt32 nElementSize = (sal_uInt32)(sal_uIntPtr)maDefRepSizeList.First();
sal_uInt32 nOldEscape = mnEscape;
sal_uInt32 nOldElementClass = mnElementClass;
sal_uInt32 nOldElementID = mnElementID;
sal_uInt32 nOldElementSize = mnElementSize;
sal_uInt8* pOldBuf = mpSource;
while( pBuf )
{
sal_uInt32 nCount = 0;
while ( mbStatus && ( nCount < nElementSize ) )
{
mpSource = pBuf + nCount;
mnParaSize = 0;
mnEscape = ImplGetUI16();
mnElementClass = mnEscape >> 12;
mnElementID = ( mnEscape & 0x0fe0 ) >> 5;
mnElementSize = mnEscape & 0x1f;
if ( mnElementSize == 31 )
{
mnElementSize = ImplGetUI16();
}
nCount += mnParaSize;
mnParaSize = 0;
mpSource = pBuf + nCount;
if ( mnElementSize & 1 )
nCount++;
nCount += mnElementSize;
if ( ( mnElementClass != 1 ) || ( mnElementID != 0xc ) ) // rekursion hier nicht moeglich!!
ImplDoClass();
}
nElementSize = (sal_uInt32)(sal_uIntPtr)maDefRepSizeList.Next();
pBuf = (sal_uInt8*)maDefRepList.Next();
}
mnEscape = nOldEscape;
mnElementClass = nOldElementClass;
mnElementID = nOldElementID;
mnParaSize = mnElementSize = nOldElementSize;
mpSource = pOldBuf;
}
}
// ---------------------------------------------------------------
sal_Bool CGM::Write( SvStream& rIStm )
{
if ( !mpBuf )
mpBuf = new sal_uInt8[ 0xffff ];
mnParaSize = 0;
mpSource = mpBuf;
rIStm.Read( mpSource, 2 );
mnEscape = ImplGetUI16();
mnElementClass = mnEscape >> 12;
mnElementID = ( mnEscape & 0x0fe0 ) >> 5;
mnElementSize = mnEscape & 0x1f;
if ( mnElementSize == 31 )
{
rIStm.Read( mpSource + mnParaSize, 2 );
mnElementSize = ImplGetUI16();
}
mnParaSize = 0;
if ( mnElementSize )
rIStm.Read( mpSource + mnParaSize, mnElementSize );
if ( mnElementSize & 1 )
rIStm.SeekRel( 1 );
ImplDoClass();
#ifdef CGM_USER_BREAKPOINT
#ifdef WNT
if ( !mbStatus || mnParaSize && ( mnElementSize != mnParaSize ) )
_asm int 0x3;
#endif
#endif
return mbStatus;
};
// ---------------------------------------------------------------
SvStream& operator>>( SvStream& rOStm, CGM& /*rCGM*/ )
{
return rOStm;
};
// ---------------------------------------------------------------
//================== GraphicImport - die exportierte Funktion ================
extern "C" sal_uInt32 __LOADONCALLAPI ImportCGM( String& rFileName, uno::Reference< frame::XModel > & rXModel, sal_uInt32 nMode, void* pProgressBar )
{
sal_uInt32 nStatus = 0; // retvalue == 0 -> ERROR
// == 0xffrrggbb -> background color in the lower 24 bits
sal_Bool bProgressBar = sal_False;
if( rXModel.is() )
{
CGM* pCGM= NULL;
try
{
pCGM = new CGM( nMode, rXModel );
if ( pCGM && pCGM->IsValid() )
{
if ( nMode & CGM_IMPORT_CGM )
{
SvStream* pIn = ::utl::UcbStreamHelper::CreateStream( rFileName, STREAM_READ );
if ( pIn )
{
pIn->SetNumberFormatInt( NUMBERFORMAT_INT_BIGENDIAN );
pIn->Seek( STREAM_SEEK_TO_END );
sal_uInt32 nInSize = pIn->Tell();
pIn->Seek( 0 );
#ifdef CGM_EXPORT_IMPRESS
uno::Reference< task::XStatusIndicator > aXStatInd;
sal_uInt32 nNext = 0;
sal_uInt32 nAdd = nInSize / 20;
if ( pProgressBar )
aXStatInd = *(uno::Reference< task::XStatusIndicator > *)pProgressBar;
bProgressBar = aXStatInd.is();
if ( bProgressBar )
aXStatInd->start( rtl::OUString::createFromAscii("CGM Import"), nInSize );
#endif
while ( pCGM->IsValid() && ( pIn->Tell() < nInSize ) && !pCGM->IsFinished() )
{
#ifdef CGM_EXPORT_IMPRESS
if ( bProgressBar )
{
sal_uInt32 nCurrentPos = pIn->Tell();
if ( nCurrentPos >= nNext )
{
aXStatInd->setValue( nCurrentPos );
nNext = nCurrentPos + nAdd;
}
}
#endif
if ( pCGM->Write( *pIn ) == sal_False )
break;
}
if ( pCGM->IsValid() )
{
nStatus = pCGM->GetBackGroundColor() | 0xff000000;
}
#ifdef CGM_EXPORT_IMPRESS
if ( bProgressBar )
aXStatInd->end();
#endif
delete pIn;
}
}
}
}
catch( ::com::sun::star::uno::Exception& )
{
nStatus = 0;
}
delete pCGM;
}
return nStatus;
}
| 8,966 |
372 | <reponame>kbore/pbis-open
/* Editor Settings: expandtabs and use 4 spaces for indentation
* ex: set softtabstop=4 tabstop=8 expandtab shiftwidth=4: *
*/
/*
* Copyright © BeyondTrust Software 2004 - 2019
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* BEYONDTRUST MAKES THIS SOFTWARE AVAILABLE UNDER OTHER LICENSING TERMS AS
* WELL. IF YOU HAVE ENTERED INTO A SEPARATE LICENSE AGREEMENT WITH
* BEYONDTRUST, THEN YOU MAY ELECT TO USE THE SOFTWARE UNDER THE TERMS OF THAT
* SOFTWARE LICENSE AGREEMENT INSTEAD OF THE TERMS OF THE APACHE LICENSE,
* NOTWITHSTANDING THE ABOVE NOTICE. IF YOU HAVE QUESTIONS, OR WISH TO REQUEST
* A COPY OF THE ALTERNATE LICENSING TERMS OFFERED BY BEYONDTRUST, PLEASE CONTACT
* BEYONDTRUST AT beyondtrust.com/contact
*/
/*
* Copyright (C) BeyondTrust Software. All rights reserved.
*
* Module Name:
*
* externs.h
*
* Abstract:
*
* BeyondTrust Security and Authentication Subsystem (LSASS)
*
* Active Directory Authentication Provider
*
* Global Variables
*
* Authors: <NAME> (<EMAIL>)
* <NAME> (<EMAIL>)
*/
#include "adprovider.h"
pthread_rwlock_t gADGlobalDataLock;
pthread_mutex_t gADDefaultDomainLock;
PCSTR gpszADProviderName = LSA_PROVIDER_TAG_AD;
LSA_PROVIDER_FUNCTION_TABLE gADProviderAPITable =
{
.pfnFindObjects = AD_FindObjects,
.pfnOpenEnumObjects = AD_OpenEnumObjects,
.pfnEnumObjects = AD_EnumObjects,
.pfnOpenEnumGroupMembers = AD_OpenEnumMembers,
.pfnEnumGroupMembers = AD_EnumMembers,
.pfnCloseEnum = AD_CloseEnum,
.pfnQueryMemberOf = AD_QueryMemberOf,
.pfnGetSmartCardUserObject = AD_GetSmartCardUserObject,
.pfnGetMachineAccountInfoA = AD_GetMachineAccountInfoA,
.pfnGetMachineAccountInfoW = AD_GetMachineAccountInfoW,
.pfnGetMachinePasswordInfoA = AD_GetMachinePasswordInfoA,
.pfnGetMachinePasswordInfoW = AD_GetMachinePasswordInfoW,
.pfnShutdownProvider = AD_ShutdownProvider,
.pfnOpenHandle = AD_OpenHandle,
.pfnCloseHandle = AD_CloseHandle,
.pfnServicesDomain = AD_ServicesDomain,
.pfnAuthenticateUserPam = AD_AuthenticateUserPam,
.pfnAuthenticateUserEx = AD_AuthenticateUserEx,
.pfnValidateUser = AD_ValidateUser,
.pfnCheckUserInList = AD_CheckUserInList,
.pfnChangePassword = AD_ChangePassword,
.pfnSetPassword = AD_SetPassword,
.pfnAddUser = AD_AddUser,
.pfnModifyUser = AD_ModifyUser,
.pfnAddGroup = AD_AddGroup,
.pfnModifyGroup = AD_ModifyGroup,
.pfnDeleteObject = AD_DeleteObject,
.pfnOpenSession = AD_OpenSession,
.pfnCloseSession = AD_CloseSession,
.pfnLookupNSSArtefactByKey = AD_FindNSSArtefactByKey,
.pfnBeginEnumNSSArtefacts = AD_BeginEnumNSSArtefacts,
.pfnEnumNSSArtefacts = AD_EnumNSSArtefacts,
.pfnEndEnumNSSArtefacts = AD_EndEnumNSSArtefacts,
.pfnGetStatus = AD_GetStatus,
.pfnFreeStatus = AD_FreeStatus,
.pfnRefreshConfiguration = AD_RefreshConfiguration,
.pfnProviderIoControl = AD_ProviderIoControl
};
// please put all new globals in the LSA_AD_PROVIDER_STATE
// structures which are stored in the following list:
LSA_LIST_LINKS gLsaAdProviderStateList;
ADCACHE_PROVIDER_FUNCTION_TABLE ADCacheTable;
PADCACHE_PROVIDER_FUNCTION_TABLE gpCacheProvider = &ADCacheTable;
BOOLEAN gbMultiTenancyEnabled = FALSE;
/*
local variables:
mode: c
c-basic-offset: 4
indent-tabs-mode: nil
tab-width: 4
end:
*/
| 1,425 |
1,350 | <gh_stars>1000+
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.timeseriesinsights.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.util.logging.ClientLogger;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Map;
/** Properties required to create any resource tracked by Azure Resource Manager. */
@Fluent
public class CreateOrUpdateTrackedResourceProperties {
@JsonIgnore private final ClientLogger logger = new ClientLogger(CreateOrUpdateTrackedResourceProperties.class);
/*
* The location of the resource.
*/
@JsonProperty(value = "location", required = true)
private String location;
/*
* Key-value pairs of additional properties for the resource.
*/
@JsonProperty(value = "tags")
private Map<String, String> tags;
/**
* Get the location property: The location of the resource.
*
* @return the location value.
*/
public String location() {
return this.location;
}
/**
* Set the location property: The location of the resource.
*
* @param location the location value to set.
* @return the CreateOrUpdateTrackedResourceProperties object itself.
*/
public CreateOrUpdateTrackedResourceProperties withLocation(String location) {
this.location = location;
return this;
}
/**
* Get the tags property: Key-value pairs of additional properties for the resource.
*
* @return the tags value.
*/
public Map<String, String> tags() {
return this.tags;
}
/**
* Set the tags property: Key-value pairs of additional properties for the resource.
*
* @param tags the tags value to set.
* @return the CreateOrUpdateTrackedResourceProperties object itself.
*/
public CreateOrUpdateTrackedResourceProperties withTags(Map<String, String> tags) {
this.tags = tags;
return this;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
public void validate() {
if (location() == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
"Missing required property location in model CreateOrUpdateTrackedResourceProperties"));
}
}
}
| 905 |
2,327 | <filename>10_pipeline/kubeflow/wip/dlc_archive/code/inference.py
import json
import subprocess
import sys
subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'tensorflow==2.1.0'])
subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'transformers==2.8.0'])
import tensorflow as tf
from transformers import DistilBertTokenizer
classes=[1, 2, 3, 4, 5]
max_seq_length=64
tokenizer = DistilBertTokenizer.from_pretrained('distilbert-base-uncased')
def input_handler(data, context):
transformed_instances = []
print('DATA {}'.format(data))
for instance in data:
data_str = instance.decode('utf-8')
print('DATA_STR {}'.format(data_str))
tokens = tokenizer.tokenize(data_str)
print('TOKENS {}'.format(tokens))
encode_plus_tokens = tokenizer.encode_plus(data_str,
pad_to_max_length=True,
max_length=max_seq_length,
# truncation=True
)
# Convert the text-based tokens to ids from the pre-trained BERT vocabulary
input_ids = encode_plus_tokens['input_ids']
# Specifies which tokens BERT should pay attention to (0 or 1)
input_mask = encode_plus_tokens['attention_mask']
# Segment Ids are always 0 for single-sequence tasks (or 1 if two-sequence tasks)
segment_ids = [0] * max_seq_length
transformed_instance = {
"input_ids": input_ids,
"input_mask": input_mask,
"segment_ids": segment_ids
}
transformed_instances.append(transformed_instance)
print(transformed_instances)
transformed_data = {"instances": transformed_instances}
print(transformed_data)
transformed_data_json = json.dumps(transformed_data)
print(transformed_data_json)
return transformed_data_json
def output_handler(response, context):
response_json = response.json()
print('response_json: {}'.format(response_json))
log_probabilities = response_json["predictions"]
predicted_classes = []
for log_probability in log_probabilities:
softmax = tf.nn.softmax(log_probability)
predicted_class_idx = tf.argmax(softmax, axis=-1, output_type=tf.int32)
predicted_class = classes[predicted_class_idx]
predicted_classes.append(predicted_class)
predicted_classes_json = json.dumps(predicted_classes)
print(predicted_classes_json)
response_content_type = context.accept_header
return predicted_classes_json, response_content_type
| 1,283 |
409 | /*
This file is a part of libcds - Concurrent Data Structures library
(C) Copyright <NAME> (<EMAIL>) 2006-2016
Source code repo: http://github.com/khizmax/libcds/
Download: http://sourceforge.net/projects/libcds/files/
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef CDSSTRESS_MICHAEL_ALLOC_H
#define CDSSTRESS_MICHAEL_ALLOC_H
#include <cds/memory/michael/allocator.h>
#include <memory>
namespace memory {
typedef cds::memory::michael::Heap<
cds::memory::michael::opt::check_bounds< cds::memory::michael::debug_bound_checking >
> michael_heap;
extern michael_heap s_MichaelHeap;
template <class T>
class MichaelAllocator
{
typedef std::allocator<T> std_allocator;
public:
// Declare typedefs from std::allocator
typedef typename std_allocator::const_pointer const_pointer;
typedef typename std_allocator::pointer pointer;
typedef typename std_allocator::const_reference const_reference;
typedef typename std_allocator::reference reference;
typedef typename std_allocator::difference_type difference_type;
typedef typename std_allocator::size_type size_type;
typedef typename std_allocator::value_type value_type;
// Allocation function
pointer allocate( size_type _Count, const void* /*_Hint*/ = nullptr )
{
return reinterpret_cast<pointer>( s_MichaelHeap.alloc( sizeof(T) * _Count ));
}
// Deallocation function
void deallocate( pointer _Ptr, size_type /*_Count*/ )
{
s_MichaelHeap.free( _Ptr );
}
pointer address( reference r ) const
{
return &r;
}
const_pointer address( const_reference r ) const
{
return &r;
}
void construct( pointer p, const T& val )
{
return new( p ) T( val );
}
void destroy( pointer p )
{
p->T::~T();
}
// Rebinding allocator to other type
template <class Other>
struct rebind {
typedef MichaelAllocator<Other> other;
};
};
} // namespace memory
#endif // #ifndef CDSSTRESS_MICHAEL_ALLOC_H
| 1,395 |
431 | <gh_stars>100-1000
{
"name":"permissions",
"label":"Permissions",
"description":"Management of permissions and authorisation.",
"version":"0.3.0",
"homepage":"http://calip.so",
"repository":{
"type":"git",
"url":"git://github.com/cliftonc/calipso.git"
},
"author":"<NAME> <<EMAIL>> (<EMAIL>)"
}
| 150 |
369 | <gh_stars>100-1000
/*
* Copyright © 2014-2019 <NAME>, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package io.cdap.cdap.data2.dataset2.lib.table.inmemory;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Maps;
import com.google.common.collect.Ordering;
import com.google.common.primitives.Longs;
import io.cdap.cdap.api.common.Bytes;
import io.cdap.cdap.data2.dataset2.lib.table.IncrementValue;
import io.cdap.cdap.data2.dataset2.lib.table.PutValue;
import io.cdap.cdap.data2.dataset2.lib.table.Update;
import io.cdap.cdap.data2.dataset2.lib.table.Updates;
import org.apache.tephra.Transaction;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.Map;
import java.util.NavigableMap;
import java.util.SortedMap;
import java.util.concurrent.ConcurrentNavigableMap;
import java.util.concurrent.ConcurrentSkipListMap;
import javax.annotation.Nullable;
/**
* Holds all in-memory tables for {@link InMemoryTable}.
*/
// todo: use locks instead of synchronize
// todo: consider using SortedMap instead of NavigableMap in APIs
public class InMemoryTableService {
private static Map<String, ConcurrentNavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, Update>>>> tables =
Maps.newHashMap();
public static synchronized boolean exists(String tableName) {
return tables.containsKey(tableName);
}
public static synchronized void create(String tableName) {
if (!tables.containsKey(tableName)) {
tables.put(tableName, new ConcurrentSkipListMap<>(Bytes.BYTES_COMPARATOR));
}
}
public static synchronized void truncate(String tableName) {
tables.get(tableName).clear();
}
public static synchronized void drop(String tableName) {
tables.remove(tableName);
}
public static synchronized void reset() {
tables.clear();
}
// no nulls
public static synchronized void merge(String tableName,
SortedMap<byte[], ? extends SortedMap<byte[], ? extends Update>> changes,
long version) {
// todo: handle nulls
ConcurrentNavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, Update>>> table = tables.get(tableName);
SortedMap<byte[], ? extends SortedMap<byte[], Update>> changesCopy = deepCopyUpdates(changes);
for (Map.Entry<byte[], ? extends SortedMap<byte[], Update>> change : changesCopy.entrySet()) {
merge(table, change.getKey(), change.getValue(), version);
}
}
private static void merge(ConcurrentNavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, Update>>> table,
byte[] row, Map<byte[], Update> changes, long version) {
// get the correct row from the table, create it if it doesn't exist
NavigableMap<byte[], NavigableMap<Long, Update>> rowMap = table.get(row);
if (rowMap == null) {
rowMap = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
table.put(row, rowMap);
}
// now merge the changes into the row, one by one
for (Map.Entry<byte[], Update> keyVal : changes.entrySet()) {
// create the column in the row if it does not exist
NavigableMap<Long, Update> colMap = rowMap.get(keyVal.getKey());
if (colMap == null) {
colMap = Maps.newTreeMap();
rowMap.put(keyVal.getKey(), colMap);
}
// put into the column with given version
Update merged = Updates.mergeUpdates(colMap.get(version), keyVal.getValue());
colMap.put(version, merged);
}
}
// todo: remove it from here: only used by "system" metrics table, which should be revised
@Deprecated
public static synchronized Map<byte[], Long> increment(String tableName, byte[] row, Map<byte[], Long> increments) {
Map<byte[], Long> resultMap = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
ConcurrentNavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, Update>>> table = tables.get(tableName);
// get the correct row from the table, create it if it doesn't exist
NavigableMap<byte[], NavigableMap<Long, Update>> rowMap = table.get(row);
if (rowMap == null) {
rowMap = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
table.put(row, rowMap);
}
// now increment each column, one by one
long versionForWrite = System.currentTimeMillis();
for (Map.Entry<byte[], Long> inc : increments.entrySet()) {
IncrementValue increment = new IncrementValue(inc.getValue());
// create the column in the row if it does not exist
NavigableMap<Long, Update> colMap = rowMap.get(inc.getKey());
Update last = null;
if (colMap == null) {
colMap = Maps.newTreeMap();
rowMap.put(inc.getKey(), colMap);
} else {
last = colMap.lastEntry().getValue();
}
Update merged = Updates.mergeUpdates(last, increment);
// put into the column with given version
long newValue = Bytes.toLong(merged.getBytes());
resultMap.put(inc.getKey(), newValue);
colMap.put(versionForWrite, merged);
}
return resultMap;
}
public static synchronized boolean swap(String tableName, byte[] row, byte[] column,
byte[] oldValue, byte[] newValue) {
ConcurrentNavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, Update>>> table = tables.get(tableName);
// get the correct row from the table, create it if it doesn't exist
NavigableMap<byte[], NavigableMap<Long, Update>> rowMap = table.get(row);
Update existingValue = null;
if (rowMap != null) {
NavigableMap<Long, Update> columnMap = rowMap.get(column);
if (columnMap != null) {
existingValue = columnMap.lastEntry().getValue();
}
}
// verify existing value matches
if (oldValue == null && existingValue != null) {
return false;
}
if (oldValue != null && (existingValue == null || !Bytes.equals(oldValue, existingValue.getBytes()))) {
return false;
}
// write new value
if (newValue == null) {
if (rowMap != null) {
rowMap.remove(column);
}
} else {
if (rowMap == null) {
rowMap = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
table.put(row, rowMap);
}
NavigableMap<Long, Update> columnMap = rowMap.get(column);
if (columnMap == null) {
columnMap = Maps.newTreeMap();
rowMap.put(column, columnMap);
}
PutValue newPut = new PutValue(newValue);
columnMap.put(System.currentTimeMillis(), newPut);
}
return true;
}
public static synchronized void undo(String tableName,
NavigableMap<byte[], NavigableMap<byte[], Update>> changes,
long version) {
// todo: handle nulls
ConcurrentNavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, Update>>> table = tables.get(tableName);
for (Map.Entry<byte[], NavigableMap<byte[], Update>> change : changes.entrySet()) {
byte[] row = change.getKey();
NavigableMap<byte[], NavigableMap<Long, Update>> rowMap = table.get(row);
if (rowMap != null) {
for (byte[] column : change.getValue().keySet()) {
NavigableMap<Long, Update> values = rowMap.get(column);
values.remove(version);
}
}
}
}
public static synchronized void delete(String tableName, Iterable<byte[]> rows) {
ConcurrentNavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, Update>>> table = tables.get(tableName);
for (byte[] row : rows) {
table.remove(row);
}
}
public static synchronized void deleteColumns(String tableName, byte[] row, byte[] column) {
ConcurrentNavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, Update>>> table = tables.get(tableName);
NavigableMap<byte[], NavigableMap<Long, Update>> columnValues = table.get(row);
columnValues.remove(column);
}
public static synchronized void delete(String tableName, byte[] rowPrefix) {
ConcurrentNavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, Update>>> table = tables.get(tableName);
if (rowPrefix.length == 0) {
table.clear();
} else {
byte[] rowAfter = rowAfterPrefix(rowPrefix);
if (rowAfter == null) {
table.tailMap(rowPrefix).clear();
} else {
table.subMap(rowPrefix, rowAfter).clear();
}
}
}
/**
* Given a key prefix, return the smallest key that is greater than all keys starting with that prefix.
*/
static byte[] rowAfterPrefix(byte[] prefix) {
Preconditions.checkNotNull(prefix, "prefix must not be null");
for (int i = prefix.length - 1; i >= 0; i--) {
if (prefix[i] != (byte) 0xff) {
// i is at the position of the last byte that is not xFF and thus can be incremented
byte[] after = Arrays.copyOf(prefix, i + 1);
++after[i];
return after;
}
}
// all bytes are xFF -> there is no upper bound
return null;
}
public static synchronized NavigableMap<byte[], NavigableMap<Long, byte[]>> get(String tableName,
byte[] row,
@Nullable Transaction tx) {
// todo: handle nulls
ConcurrentNavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, Update>>> table = tables.get(tableName);
Preconditions.checkArgument(table != null, "table not found: " + tableName);
NavigableMap<byte[], NavigableMap<Long, Update>> rowMap = table.get(row);
return deepCopy(Updates.rowToBytes(getVisible(rowMap, tx)));
}
public static synchronized NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>>
getRowRange(String tableName,
byte[] startRow,
byte[] stopRow,
@Nullable Transaction tx) {
// todo: handle nulls
ConcurrentNavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, Update>>> tableData = tables.get(tableName);
NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, Update>>> rows;
if (startRow == null && stopRow == null) {
rows = tableData;
} else if (startRow == null) {
rows = tableData.headMap(stopRow, false);
} else if (stopRow == null) {
rows = tableData.tailMap(startRow, true);
} else {
rows = tableData.subMap(startRow, true, stopRow, false);
}
NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> result =
Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
for (Map.Entry<byte[], NavigableMap<byte[], NavigableMap<Long, Update>>> rowMap : rows.entrySet()) {
NavigableMap<byte[], NavigableMap<Long, Update>> columns =
tx == null ? rowMap.getValue() : getVisible(rowMap.getValue(), tx);
result.put(copy(rowMap.getKey()), deepCopy(Updates.rowToBytes(columns)));
}
return result;
}
public static synchronized Collection<String> list() {
return ImmutableList.copyOf(tables.keySet());
}
private static NavigableMap<byte[], NavigableMap<Long, Update>> getVisible(
NavigableMap<byte[], NavigableMap<Long, Update>> rowMap, final Transaction tx) {
if (rowMap == null) {
return null;
}
NavigableMap<byte[], NavigableMap<Long, Update>> result = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
for (Map.Entry<byte[], NavigableMap<Long, Update>> column : rowMap.entrySet()) {
SortedMap<Long, Update> visbleValues = column.getValue();
if (tx != null) {
visbleValues = Maps.filterKeys(visbleValues, new Predicate<Long>() {
@Override
public boolean apply(Long version) {
return tx.isVisible(version);
}
});
}
if (visbleValues.size() > 0) {
NavigableMap<Long, Update> colMap = createVersionedValuesMap(visbleValues);
result.put(column.getKey(), colMap);
}
}
return result;
}
private static NavigableMap<Long, Update> createVersionedValuesMap(SortedMap<Long, Update> copy) {
NavigableMap<Long, Update> map = Maps.newTreeMap(VERSIONED_VALUE_MAP_COMPARATOR);
map.putAll(copy);
return map;
}
private static SortedMap<byte[], SortedMap<byte[], Update>> deepCopyUpdates(
SortedMap<byte[], ? extends SortedMap<byte[], ? extends Update>> src) {
SortedMap<byte[], SortedMap<byte[], Update>> copy = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
for (Map.Entry<byte[], ? extends SortedMap<byte[], ? extends Update>> entry : src.entrySet()) {
byte[] key = copy(entry.getKey());
SortedMap<byte[], Update> columnUpdates = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
copy.put(key, columnUpdates);
for (Map.Entry<byte[], ? extends Update> updateEntry : entry.getValue().entrySet()) {
byte[] col = copy(updateEntry.getKey());
columnUpdates.put(col, updateEntry.getValue().deepCopy());
}
}
return copy;
}
@Nullable
private static NavigableMap<byte[], NavigableMap<Long, byte[]>> deepCopy(
@Nullable NavigableMap<byte[], NavigableMap<Long, byte[]>> src) {
if (src == null) {
return null;
}
NavigableMap<byte[], NavigableMap<Long, byte[]>> copy = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
for (Map.Entry<byte[], NavigableMap<Long, byte[]>> entry : src.entrySet()) {
byte[] key = copy(entry.getKey());
NavigableMap<Long, byte[]> columnValues = Maps.newTreeMap(VERSIONED_VALUE_MAP_COMPARATOR);
copy.put(key, columnValues);
for (Map.Entry<Long, byte[]> valueEntry : entry.getValue().entrySet()) {
columnValues.put(valueEntry.getKey(), copy(valueEntry.getValue()));
}
}
return copy;
}
@Nullable
private static byte[] copy(@Nullable byte[] src) {
return src == null ? null : Arrays.copyOf(src, src.length);
}
// This is descending Longs comparator
public static final Comparator<Long> VERSIONED_VALUE_MAP_COMPARATOR = new Ordering<Long>() {
@Override
public int compare(@Nullable Long left, @Nullable Long right) {
// NOTE: versions never null
assert left != null && right != null;
return Longs.compare(right, left);
}
};
}
| 5,799 |
713 | <reponame>franz1981/infinispan
package org.infinispan.persistence.jpa.impl;
import org.infinispan.configuration.global.GlobalConfiguration;
import org.infinispan.factories.GlobalComponentRegistry;
import org.infinispan.factories.annotations.InfinispanModule;
import org.infinispan.lifecycle.ModuleLifecycle;
@InfinispanModule(name = "cachestore-jpa", requiredModules = "core")
public class JpaStoreLifecycleManager implements ModuleLifecycle {
@Override
public void cacheManagerStarting(GlobalComponentRegistry gcr, GlobalConfiguration globalConfiguration) {
gcr.registerComponent(new EntityManagerFactoryRegistry(), EntityManagerFactoryRegistry.class);
}
@Override
public void cacheManagerStopping(GlobalComponentRegistry gcr) {
gcr.getComponent(EntityManagerFactoryRegistry.class).closeAll();
}
}
| 252 |
1,996 | /**
* UDP implementation
*/
#ifndef AMC_UDP_H
#define AMC_UDP_H
#include "util.h"
#include <arpa/inet.h>
#include <assert.h>
#include <ctype.h>
#include <fcntl.h>
#include <limits.h>
#include <list>
#include <netdb.h>
#include <netinet/in.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <string>
#include <sstream>
#include <sys/errno.h>
#include <sys/file.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <sys/select.h>
#include <unistd.h>
#include <vector>
#include <map>
#include <set>
#include <queue>
#include <sys/time.h>
#include <time.h>
#include <unistd.h>
#include <semaphore.h>
class UDP {
protected:
typedef enum {
EVENT_ERROR, EVENT_TIMEOUT, EVENT_CLOSED, EVENT_DATA, EVENT_SESSION_FINISH
} Event;
virtual int recvEvent(Event event, struct sockaddr_in *addr, uint64_t sessionid, uint8_t *data, int len) = 0;
private:
const static int CONST_MAXRETRANS = 5;
const static int CONST_WINDOW = 5;
// const static int CONST_DATAQUEUE_SIZE = 10;
const static int CONST_VERSION = 1;
const static int CONST_MAXPKTSIZE = 1000;
typedef enum {
SYN_SENT, OPENING, OPEN, FIN_SENT
} Status;
static const char* getStatus(Status s) {
if (s == SYN_SENT) return "SYN_SENT";
if (s == OPENING) return "OPENING";
if (s == OPEN) return "OPEN";
if (s == FIN_SENT) return "FIN_SENT";
return "ERR_STATE";
}
const static int PACKAGE_DATA = 1;
const static int PACKAGE_ACK = 2;
const static int PACKAGE_SYN = 4;
const static int PACKAGE_FIN = 5;
static const char* packageTypeStr(unsigned char t) {
if (t == PACKAGE_DATA) return "DATA";
if (t == PACKAGE_ACK) return "ACK";
if (t == PACKAGE_SYN) return "SYN";
if (t == PACKAGE_FIN) return "FIN";
return "ERR_TYPE";
}
typedef struct UdpPkg {
uint8_t version;
uint8_t type;
uint16_t datalen;
uint32_t datacrc;
uint32_t totallen;
uint32_t offset;
uint32_t seq;
int64_t sessionid;
uint8_t data[CONST_MAXPKTSIZE];
UdpPkg() {
version = 0;
type = 0;
datalen = 0;
datacrc = 0;
totallen = 0;
offset = 0;
seq = 0;
sessionid = 0;
}
int pkgLen() {
return datalen + sizeof(UdpPkg) - sizeof(data);
}
std::string toString() {
char buf[512];
sprintf(buf, "P[%x v:%d t:%d,%s l:%d dl:%d c:0x%x tl:%d of:%d s:%d sid:%d]", TOINT(this) , version, type, packageTypeStr(type),
pkgLen(), datalen, datacrc, totallen, offset, seq, TOINT(sessionid));
return (std::string) buf;
}
}__attribute__ ((packed)) UdpPkg;
typedef struct TimeoutEvent {
int64_t timeout;
int tryCount;
struct sockaddr_in toaddr;
UdpPkg *udpPkg;
TimeoutEvent() {
timeout = 0;
tryCount = 0;
udpPkg = NULL;
memset(&toaddr, 0, sizeof(toaddr));
}
std::string toString() {
char buf[512];
sprintf(buf, "T[%x %s to:%d tc:%d %s]", TOINT(this) , sockaddrToString(toaddr).c_str(), TOINT(timeout - getMillisecond()),
tryCount,
udpPkg ? udpPkg->toString().c_str() : "pkg:NULL");
return (std::string) buf;
}
} TimeoutEvent;
typedef struct Sender {
struct sockaddr_in toaddr;
Status status;
uint32_t seq;
TimeoutEvent finTimeoutEvent;
TimeoutEvent synTimeoutEvent;
TimeoutEvent dataWindow[CONST_WINDOW];
std::queue<UdpPkg *> dataQueue;
std::string toString() {
char buf[512];
sprintf(buf, "S[%x %s %s s:%d q:%d syn:%s fin:%s]", TOINT(this) , sockaddrToString(toaddr).c_str(), getStatus(status), seq,
(int)dataQueue.size(), synTimeoutEvent.toString().c_str(), finTimeoutEvent.toString().c_str());
return (std::string) buf;
}
} Sender;
typedef struct Receiver {
struct sockaddr_in fromaddr;
Status status;
uint32_t expectedSeq;
std::map<int64_t, std::pair<uint32_t, uint8_t *> *> sessionMap;
std::string toString() {
char buf[512];
sprintf(buf, "R[%x %s %s s:%d m:%d]", TOINT(this), sockaddrToString(fromaddr).c_str(), getStatus(status), expectedSeq,
(int)sessionMap.size());
return (std::string) buf;
}
} Receiver;
typedef struct SendPack {
struct sockaddr_in toAddr;
uint64_t sessionid;
uint8_t *data;
uint32_t len;
SendPack() {
memset(&toAddr, 0, sizeof(toAddr));
sessionid = 0;
data = NULL;
len = 0;
}
} SendPack;
int m_fd;
int m_fdCloseRequested;
int m_timeoutms ;
int m_pipefd[2];
pthread_mutex_t m_mutexSendQueue;
rqueue<SendPack *, 10> sendQueue;
std::map<int64_t, Sender *> m_mapSender;
std::map<int64_t, Receiver *> m_mapReceiver;
std::list<TimeoutEvent *> m_eventTimerList;
int deleteTimeoutEvent(TimeoutEvent *timeoutEvent) {
for (std::list<TimeoutEvent *>::iterator it = m_eventTimerList.begin(); it != m_eventTimerList.end(); it++) {
if (*it == timeoutEvent) {
m_eventTimerList.erase(it);
return 0;
}
}
perr("event delete failed arg:%s", timeoutEvent->toString().c_str());
return -1;
}
int addTimeoutEvent(TimeoutEvent *timeoutEvent) {
std::list<TimeoutEvent *>::iterator it;
for (it = m_eventTimerList.begin(); it != m_eventTimerList.end(); it++) {
if (timeoutEvent->timeout < (*it)->timeout) {
break;
}
}
m_eventTimerList.insert(it, timeoutEvent);
return 0;
}
int processTimeout(TimeoutEvent *timeoutEvent) {
const int64_t iSender = sockaddrToint64(timeoutEvent->toaddr);
Sender *const ss = m_mapSender.count(iSender) ? m_mapSender[iSender] : NULL;
const std::string strAddr = sockaddrToString(timeoutEvent->toaddr);
if (!ss) {
perr("Invalid socket. Not in the list.strAddr:%s ", strAddr.c_str());
return -1;
}
if (timeoutEvent->udpPkg->type == PACKAGE_SYN && timeoutEvent != &ss->synTimeoutEvent) {
perr("timeoutEvent:%s != &ss->synTimeoutEvent:%s", timeoutEvent->toString().c_str(), ss->synTimeoutEvent.toString().c_str());
return -1;
}
if (timeoutEvent->udpPkg->type == PACKAGE_FIN && timeoutEvent != &ss->finTimeoutEvent) {
perr("timeoutEvent:%s != &ss->finTimeoutEvent:%s", timeoutEvent->toString().c_str(), ss->finTimeoutEvent.toString().c_str());
return -1;
}
if (timeoutEvent->udpPkg->type == PACKAGE_DATA) {
TimeoutEvent *tv = getWindowBySeq(ss->dataWindow, timeoutEvent->udpPkg->seq);
if (timeoutEvent != tv) {
perr("TIME_Win_not_found %s %s win:%s", timeoutEvent->toString().c_str(), ss->toString().c_str(),
getWindowSeqList(ss->dataWindow).c_str());
return -1;
}
}
pwrn("TIMEOUT: %s %s", timeoutEvent->toString().c_str(), timeoutEvent->tryCount >= CONST_MAXRETRANS ? "!MAXRETRANS!" : "");
if (timeoutEvent->tryCount >= CONST_MAXRETRANS) {
recvEvent(EVENT_TIMEOUT, &timeoutEvent->toaddr, timeoutEvent->udpPkg->sessionid, NULL, 0);
return 0;
}
timeoutEvent->tryCount++;
return sendPackage(&timeoutEvent->toaddr, timeoutEvent->udpPkg->type, timeoutEvent->udpPkg->seq, timeoutEvent->udpPkg);
}
int cleanReceiver(Receiver *rs) {
if (!rs) {
return 0;
}
m_mapReceiver.erase(sockaddrToint64(rs->fromaddr));
for (std::map<int64_t, std::pair<uint32_t, uint8_t *> *>::iterator it = rs->sessionMap.begin(); it != rs->sessionMap.end(); it++) {
std::pair<uint32_t, uint8_t *> *p = (*it).second;
delete[] p->second;
delete p;
}
delete rs;
return 1;
}
// #lizard forgives
int processReceive() {
struct sockaddr_in sender;
socklen_t sender_length = sizeof(struct sockaddr_in);
UdpPkg udpPkg;
const int recvRet = (int) recvfrom(m_fd, &udpPkg, sizeof(UdpPkg), 0, (struct sockaddr *) &sender, &sender_length);
const std::string strAddr = sockaddrToString(sender);
pdbg("RECV ret:%d from:%s fd:%d %s", recvRet, strAddr.c_str(), m_fd, udpPkg.toString().c_str());
const uint32_t crc = udpPkg.datacrc;
udpPkg.datacrc = 0;
if (recvRet <= 0 || getCrc32((uint8_t *) &udpPkg, udpPkg.pkgLen()) != crc) {
perr("IGNORERecv FAULTPkg ret:%d datalen:%d datacrc:0x%x", recvRet, udpPkg.datalen, crc);
return 0;
}
const int64_t iSender = sockaddrToint64(sender);
Receiver *rs = m_mapReceiver.count(iSender) ? m_mapReceiver[iSender] : NULL;
if (udpPkg.type == PACKAGE_SYN) {
if (rs && rs->status != OPENING) { //may be new
pwrn("R_CLEANNewSync %s RS:%s", udpPkg.toString().c_str(), rs ? rs->toString().c_str() : "NUL_RS");
cleanReceiver(rs);
rs = NULL;
}
if (!rs) {
rs = new Receiver();
rs->fromaddr = sender;
m_mapReceiver[sockaddrToint64(sender)] = rs;
pwrn("R_NEW RECEIVER:%s", rs->toString().c_str());
}
rs->status = OPENING;
rs->expectedSeq = udpPkg.seq + 1;
udpPkg.datalen = 0;
return sendPackage(&sender, PACKAGE_ACK, udpPkg.seq, &udpPkg);
}
if (udpPkg.type == PACKAGE_DATA) {
if ((!rs) || (rs->status != OPENING && rs->status != OPEN)) {
pwrn("R_IGNORERecvStat %s RS:%s", udpPkg.toString().c_str(), rs ? rs->toString().c_str() : "NUL_RS");
return 0;
}
if ((udpPkg.seq < (rs->expectedSeq - CONST_WINDOW)) || (udpPkg.seq > rs->expectedSeq)) { //check in window
pwrn("R_IGNORERecvSEQ [%d,%d] %s RS:%s", udpPkg.seq, rs->expectedSeq, udpPkg.toString().c_str(), rs->toString().c_str());
return 0;
}
if (udpPkg.seq == rs->expectedSeq) {
if (rs->status == OPENING) rs->status = OPEN;
rs->expectedSeq = udpPkg.seq + 1;
dataRecv(&udpPkg, rs);
}
udpPkg.offset += udpPkg.datalen; // tell sender , i recv this pkg
udpPkg.datalen = 0;
return sendPackage(&sender, PACKAGE_ACK, udpPkg.seq, &udpPkg);
}
if (udpPkg.type == PACKAGE_FIN) {
if (!rs) {
pwrn("R_IGNORERecvRSNull: %s maybe retrans fin pkg, ack it anyway.", udpPkg.toString().c_str());
return sendPackage(&sender, PACKAGE_ACK, udpPkg.seq, &udpPkg);
}
if (udpPkg.seq != rs->expectedSeq) {
pwrn("R_IGNORERecvSEQ [%d,%d] %s RS:%s", udpPkg.seq, rs->expectedSeq, udpPkg.toString().c_str(), rs->toString().c_str());
return 0;
}
udpPkg.datalen = 0;
int ret = sendPackage(&sender, PACKAGE_ACK, udpPkg.seq, &udpPkg);
pwrn("R_CLOSE RECEIVER:%s", rs->toString().c_str());
cleanReceiver(rs);
return ret;
}
Sender *const ss = m_mapSender.count(iSender) ? m_mapSender[iSender] : NULL;
if (udpPkg.type != PACKAGE_ACK || ss == NULL) {
pwrn("S_IGNORERecvStat %s SS:%s", udpPkg.toString().c_str(), ss ? ss->toString().c_str() : "NUL_SS");
return 0;
}
if (ss->status == FIN_SENT) {
if (udpPkg.seq != ss->seq) {
pwrn("S_IGNORERecvSEQ [%d,%d] %s SS:%s", udpPkg.seq, ss->seq, udpPkg.toString().c_str(), ss->toString().c_str());
return 0;
}
pwrn("S_CLOSE SENDER:%s", ss->toString().c_str());
deleteTimeoutEvent(&ss->finTimeoutEvent);
delete ss->finTimeoutEvent.udpPkg;
m_mapSender.erase(iSender);
return 0;
}
if (ss->status == SYN_SENT) {
if (ss->synTimeoutEvent.udpPkg == NULL || udpPkg.seq != ss->synTimeoutEvent.udpPkg->seq) {
pwrn("S_IGNORERecvSEQ [%d,%d] %s SS:%s", udpPkg.seq, ss->synTimeoutEvent.udpPkg == NULL ? -1 : ss->synTimeoutEvent.udpPkg->seq,
udpPkg.toString().c_str(), ss->toString().c_str());
return 0;
}
deleteTimeoutEvent(&ss->synTimeoutEvent);
delete ss->synTimeoutEvent.udpPkg;
ss->status = OPEN;
return 0;
}
if (ss->status == OPEN) {
TimeoutEvent *timeoutEvent = getWindowBySeq(ss->dataWindow, udpPkg.seq);
if (!timeoutEvent) {
pwrn("S_IGNORERecvWIN %s SS:%s win:%s", udpPkg.toString().c_str(), ss->toString().c_str(),
getWindowSeqList(ss->dataWindow).c_str());
return 0;
}
deleteTimeoutEvent(timeoutEvent);
delete timeoutEvent->udpPkg;
timeoutEvent->udpPkg = NULL;
if (udpPkg.totallen == udpPkg.offset) {
recvEvent(EVENT_SESSION_FINISH, &sender, udpPkg.sessionid, NULL, 0);
}
return 0;
}
perr("NEVER shuold be here :%s", udpPkg.toString().c_str());
return 0;
}
std::string getWindowSeqList(TimeoutEvent *window) {
std::stringstream s;
for (int i = 0; i < CONST_WINDOW; i++) {
s << (window[i].udpPkg ? window[i].udpPkg->seq : -1);
s << ";";
}
return s.str();
}
TimeoutEvent* getWindowBySeq(TimeoutEvent *window, uint32_t seq) {
for (uint32_t i = 0; i < CONST_WINDOW; i++) {
if (window[i].udpPkg && window[i].udpPkg->seq == seq) {
return &window[i];
}
}
return NULL;
}
TimeoutEvent* checkEmptyWindow(TimeoutEvent *window, int expectSeq) {
uint32_t minSeq = INT_MAX;
int emptyIndex = -1;
for (int i = 0; i < CONST_WINDOW; i++) {
if (!window[i].udpPkg) {
emptyIndex = i;
} else if (minSeq > window[i].udpPkg->seq) {
minSeq = window[i].udpPkg->seq;
}
}
if (emptyIndex == -1) {
return NULL;
}
if ((minSeq != INT_MAX) && (static_cast<int>(minSeq + CONST_WINDOW - 1) < expectSeq)) {
return NULL;
}
return &window[emptyIndex];
}
int windowAllEmpty(TimeoutEvent *window) {
for (uint32_t i = 0; i < CONST_WINDOW; i++) {
if (window[i].udpPkg) {
return 0;
}
}
return 1;
}
int sendPackage(struct sockaddr_in *addr, uint8_t type, u_int32_t seq, UdpPkg *udpPkg) {
if ((!udpPkg) || (!addr)) {
perr("sendpackage pkg:%d || addr:%d", (uint32_t) ((uint64_t)udpPkg & 0xffffffffL) , (int) ((uint64_t)addr & 0xffffffffL) );
return -1;
}
udpPkg->version = CONST_VERSION;
udpPkg->type = type;
udpPkg->seq = seq;
udpPkg->datacrc = 0;
udpPkg->datacrc = getCrc32((uint8_t *) udpPkg, udpPkg->pkgLen());
pdbg("SEND type:%s:%d seq:%d datalen:%d datacrc:0x%x ", packageTypeStr(type), type, seq, udpPkg->datalen, udpPkg->datacrc);
const int sendRet = (int) sendto(m_fd, udpPkg, udpPkg->pkgLen(), 0, (struct sockaddr *) addr, sizeof(struct sockaddr));
const std::string strAddr = sockaddrToString(*addr);
pdbg("SEND type:%s seq:%d addr:%s fd:%d len:%d sendRet:%d %s", packageTypeStr(udpPkg->type), udpPkg->seq,
strAddr.c_str(), m_fd, udpPkg->pkgLen(), sendRet, "");
if (udpPkg->type == PACKAGE_ACK) {
return sendRet; // Set a timeout event if the packet isn't an ACK
}
const int64_t iSender = sockaddrToint64(*addr);
Sender *const ss = m_mapSender.count(iSender) ? m_mapSender[iSender] : NULL;
if (!ss) {
perr("Invalid socket. Not in the list. addr:%s fd:%d", strAddr.c_str(), m_fd);
return -1;
}
TimeoutEvent *pEvent = NULL;
if (udpPkg->type == PACKAGE_SYN) {
pEvent = &ss->synTimeoutEvent;
} else if (udpPkg->type == PACKAGE_FIN) {
pEvent = &ss->finTimeoutEvent;
} else if (udpPkg->type == PACKAGE_DATA) {
pEvent = getWindowBySeq(ss->dataWindow, udpPkg->seq);
if (!pEvent) {
perr("DataPkgNotFountInWin :%s win:%s", udpPkg->toString().c_str(), getWindowSeqList(ss->dataWindow).c_str());
return -2;
}
} else {
perr("Invalid sock:%d type:%d", m_fd, udpPkg->type);
return -3;
}
memcpy(&(pEvent->toaddr), addr, sizeof(struct sockaddr_in));
pEvent->timeout = getMillisecond() + m_timeoutms;
addTimeoutEvent(pEvent);
return 0;
}
Sender* getSender(sockaddr_in addr) {
const int64_t iSender = sockaddrToint64(addr);
Sender *ss = m_mapSender.count(iSender) ? m_mapSender[iSender] : NULL;
if (ss) {
return ss;
}
ss = new Sender();
m_mapSender[iSender] = ss;
ss->status = SYN_SENT;
ss->toaddr = addr;
ss->seq = 100000;
ss->synTimeoutEvent.udpPkg = new UdpPkg();
sendPackage(&addr, PACKAGE_SYN, ss->seq, ss->synTimeoutEvent.udpPkg);
pwrn("NEW SENDER %s", ss->toString().c_str());
return ss;
}
int dataRecv(UdpPkg *udpPkg, Receiver *rs) {
std::pair<uint32_t, uint8_t *> *sessionPair = rs->sessionMap.count(udpPkg->sessionid) ? rs->sessionMap[udpPkg->sessionid] : NULL;
if (sessionPair == NULL && udpPkg->offset != 0) {
perr("Not session:%d sessionPair:%d first package offset:%u", TOINT(udpPkg->sessionid), (int) ((uint64_t)sessionPair & 0xffffffffL) , udpPkg->offset);
return 0;
}
if (sessionPair && udpPkg->offset != sessionPair->first) {
perr("session:%d offset error:%u local:%u", TOINT(udpPkg->sessionid), udpPkg->offset, sessionPair->first);
return 0;
}
if (!sessionPair) {
sessionPair = new std::pair<uint32_t, uint8_t *>(0, new uint8_t[udpPkg->totallen]);
rs->sessionMap[udpPkg->sessionid] = sessionPair;
}
memcpy(sessionPair->second + sessionPair->first, udpPkg->data, udpPkg->datalen);
sessionPair->first = udpPkg->offset + udpPkg->datalen;
if (sessionPair->first == udpPkg->totallen) {
recvEvent(EVENT_DATA, &rs->fromaddr, udpPkg->sessionid, sessionPair->second, sessionPair->first);
delete[] sessionPair->second;
delete sessionPair;
rs->sessionMap.erase(udpPkg->sessionid);
}
return 0;
}
int checkSendQueue() {
SendPack *sendPack = sendQueue.front();
if (sendPack) {
if (getSender(sendPack->toAddr)->dataQueue.empty()) {
sendQueue.pop();
} else {
sendPack = NULL;
}
}
int ret = 0;
if (sendPack) {
ret = 1;
uint32_t offset = 0;
while (offset < sendPack->len) {
UdpPkg *udpPkg = new UdpPkg();
udpPkg->sessionid = sendPack->sessionid;
udpPkg->totallen = sendPack->len;
udpPkg->offset = offset;
udpPkg->datalen = (sendPack->len - offset < CONST_MAXPKTSIZE) ? sendPack->len - offset : CONST_MAXPKTSIZE;
memcpy(udpPkg->data, sendPack->data + offset, udpPkg->datalen);
offset += udpPkg->datalen;
getSender(sendPack->toAddr)->dataQueue.push(udpPkg);
}
delete[] sendPack->data;
delete sendPack;
}
for (std::map<int64_t, Sender *>::iterator it = m_mapSender.begin(); it != m_mapSender.end(); it++) {
Sender *ss = it->second;
while (ss->dataQueue.size()) {
TimeoutEvent *timeoutEvent = checkEmptyWindow(ss->dataWindow, ss->seq + 1);
if (timeoutEvent == NULL) {
break;
}
ss->seq += 1;
timeoutEvent->tryCount = 0;
timeoutEvent->udpPkg = ss->dataQueue.front();
ss->dataQueue.pop();
sendPackage(&ss->toaddr, PACKAGE_DATA, ss->seq, timeoutEvent->udpPkg);
}
}
return ret;
}
int loop() {
while (m_fd > 0 || m_eventTimerList.size()) {
const int64_t start = getMillisecond();
const int checkCount = checkSendQueue();
const int64_t timeout = m_eventTimerList.size() ? (*m_eventTimerList.begin())->timeout - start : 0;
pdbg("startLoop ,timeout:%d timer:%d sender:%d checkcnt:[sq:%d %d run:%d] ",
TOINT(timeout), (int)m_eventTimerList.size(), (int)m_mapSender.size(), (int)sendQueue.size(), checkCount, TOINT(getMillisecond() - start));
if (m_eventTimerList.size() && timeout <= 0) { //timeout
const std::list<TimeoutEvent *>::iterator it = m_eventTimerList.begin();
int ret = processTimeout(*it);
pdbg("processTimeout: timeout:%d ret:%d run:%d %s", TOINT(timeout), ret, TOINT(getMillisecond() - start), (*it)->toString().c_str());
m_eventTimerList.erase(it);
if (ret < 0) {
return -1;
}
continue;
}
fd_set fdset;
FD_ZERO(&fdset);
FD_SET(m_fd, &fdset);
FD_SET(m_pipefd[0], &fdset);
struct timeval diff, *pdiff = NULL;
if (m_eventTimerList.size() && timeout > 0) {
diff.tv_sec = timeout / 1000;
diff.tv_usec = (timeout % 1000) * 1000;
pdiff = &diff;
}
const int selectRet = select(std::max(m_fd, m_pipefd[0]) + 1, &fdset, NULL, NULL, pdiff);
pdbg("SelectRet:%d err:%d time:%d realTime:%d diff[%d %d]", selectRet, errno, TOINT(timeout), TOINT(getMillisecond() - start),
TOINT(diff.tv_sec), TOINT(diff.tv_usec));
if (selectRet == -1 && errno != EINTR) {
pdbg("eventloop: select errno:%d", errno);
continue;
}
if (FD_ISSET(m_fd, &fdset)) {
int ret = processReceive();
pdbg("eventloop: processReceive ret:%d run:%d", ret, TOINT(getMillisecond() - start));
if (ret < 0) {
return -1;
}
}
if (FD_ISSET(m_pipefd[0], &fdset)) {
int i = 0;
int ret = read(m_pipefd[0], &i, sizeof(int));
pdbg("read pipe ret:%d run:%d ", ret, TOINT(getMillisecond() - start));
}
}
pdbg("end loop , fd:%d timer:%d", m_fd, (int)m_eventTimerList.size());
return 0;
}
public:
virtual ~UDP(){}
UDP() {
m_fd = 0;
m_fdCloseRequested = 0;
m_timeoutms = 0;
m_pipefd[0] = 0;
m_pipefd[1] = 0;
m_mutexSendQueue = PTHREAD_MUTEX_INITIALIZER;
}
int createSocket(const char *ip, int port, int timeoutms) {
if (m_fd > 0) {
return m_fd;
}
this->m_timeoutms = timeoutms;
srand(time(NULL));
pthread_mutex_init(&m_mutexSendQueue, NULL);
int ret = pipe(m_pipefd);
if (ret < 0) {
perr("create pipe ret:%d", ret);
return -1;
}
const int fd = socket(AF_INET, SOCK_DGRAM, 0);
if (fd < 0) {
perr("create socket fd:%d", fd);
return -1;
}
struct sockaddr_in address;
memset(&address, 0, sizeof(address));
address.sin_family = AF_INET;
address.sin_port = htons(port);
if (ip == NULL || strlen(ip) == 0) {
address.sin_addr.s_addr = htonl(INADDR_ANY);
}
else {
inet_pton(AF_INET, ip, &(address.sin_addr));
}
const int bindRet = ::bind(fd, (struct sockaddr *) &address, (socklen_t)sizeof(address));
if (bindRet < 0) {
perr("create socket bindRet:%d fd:%d %s:%d ", bindRet, fd, ip, port);
close(fd);
return -2;
}
m_fd = fd;
m_fdCloseRequested = false;
pwrn("Create socket succ. fd:%d %s:%d pipe:[%d,%d]", fd, ip, port, m_pipefd[0], m_pipefd[1]);
return fd;
}
int startLoop() {
return loop();
}
int64_t send(struct sockaddr_in addr, uint8_t *data, int len) {
SendPack *sendPack = new SendPack();
struct timeval now;
gettimeofday(&now, NULL);
sendPack->sessionid = (((int64_t) now.tv_sec) * 1000 * 1000) + now.tv_usec;
sendPack->toAddr = addr;
sendPack->len = len;
sendPack->data = new uint8_t[len];
memcpy(sendPack->data, data, len);
sendQueue.push(sendPack);
write(m_pipefd[1], &m_pipefd[1], sizeof(int));
return sendPack->sessionid;
}
};
#endif //AMC_UDP_H
| 13,478 |
988 | /*
* Copyright 2018-2021 Redis Labs Ltd. and Contributors
*
* This file is available under the Redis Labs Source Available License Agreement
*/
#include "RG.h"
#include "rg_matrix.h"
GrB_Info RG_eWiseAdd // C = A + B
(
RG_Matrix C, // input/output matrix for results
const GrB_Semiring semiring, // defines '+' for T=A+B
const RG_Matrix A, // first input: matrix A
const RG_Matrix B // second input: matrix B
) {
ASSERT(A != NULL);
ASSERT(B != NULL);
ASSERT(C != NULL);
ASSERT(semiring != NULL);
GrB_Info info;
GrB_Index nrows;
GrB_Index ncols;
GrB_Index DM_nvals;
GrB_Index DP_nvals;
GrB_Matrix _A = NULL;
GrB_Matrix _B = NULL;
GrB_Matrix _C = RG_MATRIX_M(C);
GrB_Matrix AM = RG_MATRIX_M(A);
GrB_Matrix BM = RG_MATRIX_M(B);
GrB_Matrix ADP = RG_MATRIX_DELTA_PLUS(A);
GrB_Matrix ADM = RG_MATRIX_DELTA_MINUS(A);
GrB_Matrix BDP = RG_MATRIX_DELTA_PLUS(B);
GrB_Matrix BDM = RG_MATRIX_DELTA_MINUS(B);
// TODO: check A, B and C are compatible
GrB_Matrix_nvals(&DM_nvals, ADM);
GrB_Matrix_nvals(&DP_nvals, ADP);
if(DM_nvals > 0 || DP_nvals > 0) {
info = RG_Matrix_export(&_A, A);
ASSERT(info == GrB_SUCCESS);
} else {
_A = AM;
}
GrB_Matrix_nvals(&DM_nvals, BDM);
GrB_Matrix_nvals(&DP_nvals, BDP);
if(DM_nvals > 0 || DP_nvals > 0) {
info = RG_Matrix_export(&_B, B);
ASSERT(info == GrB_SUCCESS);
} else {
_B = BM;
}
//--------------------------------------------------------------------------
// C = A + B
//--------------------------------------------------------------------------
info = GrB_Matrix_eWiseAdd_Semiring(_C, NULL, NULL, semiring, _A, _B, NULL);
ASSERT(info == GrB_SUCCESS);
if(_A != AM) GrB_free(&_A);
if(_B != BM) GrB_free(&_B);
return info;
}
| 905 |
965 | <filename>docs/mfc/reference/codesnippet/CPP/cwnd-class_30.cpp
// From message map for CMdiView, a CView-derived class
ON_NOTIFY_EX_RANGE(TTN_NEEDTEXTW, 0, 0xFFFF, &CMdiView::OnToolTipNotify)
ON_NOTIFY_EX_RANGE(TTN_NEEDTEXTA, 0, 0xFFFF, &CMdiView::OnToolTipNotify) | 118 |
1,210 | import unittest
from pycoin.key.subpaths import subpaths_for_path_range
from pycoin.symbols.btc import network
class KeychainTest(unittest.TestCase):
def test_keychain(self):
keychain = network.keychain()
bip32_list = [network.keys.bip32_seed(_) for _ in [b"foo", b"bar"]]
for bip32 in bip32_list:
keychain.add_key_paths(bip32.public_copy(), subpaths_for_path_range("0-1/0-10"))
keychain.add_secrets(bip32_list)
for bip32 in bip32_list:
for path in ["0/5", "1/2", "0/9"]:
subkey = bip32.subkey_for_path("0/5")
v = keychain.get(subkey.hash160())
self.assertEqual(v[0], subkey.secret_exponent())
v = keychain.get(b'0' * 32)
self.assertEqual(v, None)
| 390 |
413 | <gh_stars>100-1000
__author__ = 'Robert'
"""
from:
http://stackoverflow.com/questions/20317314/python-function-in-a-while-loop-ruining-it-for-me
"""
import sys
sys.path.append('..')
import easygui as eg
def get_user_input(target_value, dice_rolls):
operator_choices = ['+', '-', '*', '/']
operator_choices.extend(['OK', 'Del', 'Reroll'])
dice_choices = [str(r) for r in dice_rolls]
dice_choices.extend(['Del', 'Reroll'])
raw_user_input = list()
mode = 'tick'
while True:
if mode == 'tick':
choices = dice_choices
else:
choices = operator_choices
var = eg.indexbox(''.join(raw_user_input), "Target value: {}".format(target_value), choices)
if var is None:
raise ValueError("Dialog closed with invalid entry")
choice = choices[var]
if choice == 'OK':
return ''.join(raw_user_input)
if choice == 'Del':
raw_user_input = list()
dice_choices = [str(r) for r in dice_rolls]
dice_choices.extend(['Del', 'Reroll'])
mode = 'tick'
continue
if choice == 'Reroll':
return None
raw_user_input.append(choice)
if mode == 'tick': # Remove the dice from the list of dice
del dice_choices[dice_choices.index(choices[var])]
if mode == 'tick':
mode = 'tock'
else:
mode = 'tick'
br_value = 12 # Sample value
dice_value_list = [4, 2, 1, 1, 5] # Sample rolls
try:
user_input = get_user_input(br_value, dice_value_list)
except:
print("Your data entry was cancelled by the user")
exit()
if user_input is None:
print("Exiting program because you cancelled the dialog")
exit()
print("User entered: {}".format(user_input))
####
# Now, put code here to process your user_input
# ....
#
#### | 832 |
348 | {"nom":"Saint-Claude","circ":"4ème circonscription","dpt":"Guadeloupe","inscrits":8101,"abs":5967,"votants":2134,"blancs":83,"nuls":65,"exp":1986,"res":[{"nuance":"SOC","nom":"Mme <NAME>","voix":799},{"nuance":"LR","nom":"Mme <NAME>","voix":237},{"nuance":"DVD","nom":"<NAME>","voix":204},{"nuance":"FI","nom":"<NAME>","voix":195},{"nuance":"DVG","nom":"Mme <NAME>","voix":162},{"nuance":"DVD","nom":"Mme <NAME>","voix":98},{"nuance":"DVG","nom":"<NAME>","voix":60},{"nuance":"FN","nom":"Mme <NAME>","voix":57},{"nuance":"EXG","nom":"<NAME>","voix":36},{"nuance":"DVD","nom":"M. <NAME>","voix":29},{"nuance":"DIV","nom":"M. <NAME>","voix":29},{"nuance":"DVD","nom":"M. <NAME>","voix":23},{"nuance":"DVD","nom":"Mme <NAME>","voix":17},{"nuance":"DIV","nom":"M. <NAME>","voix":15},{"nuance":"COM","nom":"M. <NAME>","voix":14},{"nuance":"UDI","nom":"Mme <NAME>","voix":6},{"nuance":"DIV","nom":"Mme <NAME>","voix":5},{"nuance":"REG","nom":"Mme <NAME>","voix":0}]} | 383 |
1,302 | #pragma once
#include <webrtc/api/peer_connection_interface.h>
// IWYU pragma: no_include "src/enums/macros/impls.h"
#define ICE_TRANSPORTS_TYPE webrtc::PeerConnectionInterface::IceTransportsType
#define ICE_TRANSPORTS_TYPE_NAME "RTCIceTransportPolicy"
#define ICE_TRANSPORTS_TYPE_LIST \
ENUM_SUPPORTED(ICE_TRANSPORTS_TYPE::kAll, "all") \
ENUM_SUPPORTED(ICE_TRANSPORTS_TYPE::kRelay, "relay") \
ENUM_UNSUPPORTED(ICE_TRANSPORTS_TYPE::kNoHost, "no-host", "\"no-host\" is not a valid RTCIceTransportPolicy") \
ENUM_UNSUPPORTED(ICE_TRANSPORTS_TYPE::kNone, "none", "\"none\" is not a valid RTCIceTransportPolicy")
#define ENUM(X) ICE_TRANSPORTS_TYPE ## X
#include "src/enums/macros/decls.h"
#undef ENUM
| 292 |
412 | #include <assert.h>
#include <stdlib.h>
#ifdef _MSC_VER
# define _Static_assert(x, m) static_assert(x, m)
#endif
struct list;
typedef struct list list_nodet;
list_nodet fill_node(signed int depth_tag_list);
struct list
{
int datum;
struct list *next;
};
int max_depth = 2;
list_nodet *build_node(int depth)
{
if(max_depth < depth)
return ((list_nodet *)NULL);
else
{
_Static_assert(sizeof(list_nodet) == 16, "");
list_nodet *result = malloc(16);
if(result)
{
*result = fill_node(depth + 1);
}
return result;
}
}
list_nodet fill_node(int depth)
{
list_nodet result;
result.datum = depth;
result.next = build_node(depth);
return result;
}
int main()
{
list_nodet *node = build_node(0);
int i = 0;
list_nodet *list_walker = node;
for(; list_walker; list_walker = list_walker->next)
{
i = i + 1;
}
assert(i == 3);
return 0;
}
| 399 |
915 | <filename>chi_annotator/task_center/cmds.py<gh_stars>100-1000
import datetime
from chi_annotator.task_center.common import Command
from chi_annotator.task_center.common import DBLinker
from chi_annotator.algo_factory.common import Message, TrainingData
from chi_annotator.task_center.model import Trainer, Interpreter
class BatchTrainCmd(Command):
def __init__(self, db_config, task_config):
super(BatchTrainCmd, self).__init__(db_config)
self.db_config = db_config
self.task_config = task_config
self.uid = self.task_config.get("user_uuid")
self.dataset_id = self.task_config.get("dataset_uuid")
if "model_version" in task_config:
# override timestamp
self.timestamp = task_config["model_version"]
def __create_insert(self):
return {
"user_uuid": self.uid,
"dataset_uuid": self.dataset_id,
"model_type": self.task_config["model_type"],
"model_version": self.timestamp,
"is_full_train": False,
"status": Command.STATUS_RUNNING,
"start_timestamp": datetime.datetime.now(),
"end_timestamp": None
}
def __create_update(self, status):
return {"model_version": self.timestamp}, {"status": status, "end_timestamp": datetime.datetime.now()}
def exec(self):
# mark train status in db, self.timestamp = task id
self.linker.action(DBLinker.INSERT_SINGLE, **{"table_name": DBLinker.TRAIN_STATUS_TABLE,
"item": self.__create_insert()})
# get batch data
batch_exec_args = {"condition": self.task_config["condition"],
"table_name": DBLinker.ANNO_DATA_TABLE,
"sort_limit": self.task_config.get("sort_limit", None)}
batch_result = self.linker.action(DBLinker.BATCH_FETCH, **batch_exec_args)
# train process
self._train_batch(batch_result)
# mark train done in db
condition, item = self.__create_update(Command.STATUS_DONE)
self.linker.action(DBLinker.UPDATE,
**{"table_name": DBLinker.TRAIN_STATUS_TABLE, "item": item, "condition": condition})
def _train_batch(self, batch_result):
# from result to train_data, create train data
msg = []
for item in batch_result:
msg.append(Message(item["text"], {"label": item["label"]}))
train_data = TrainingData(msg)
# create interpreter
trainer = Trainer(self.task_config)
trainer.train(train_data)
# save model meta for config
trainer.persist(self.task_config.get_save_path_prefix())
return True
class BatchNoDbPredictCmd(Command):
"""
predict from json data not from db
"""
def __init__(self, db_config, task_config):
super(BatchNoDbPredictCmd, self).__init__(db_config)
self.db_config = db_config
self.task_config = task_config
self.uid = self.task_config.get("user_uuid")
self.dataset_id = self.task_config.get("dataset_uuid")
if "model_version" in task_config:
self.timestamp = task_config["model_version"]
def exec(self):
# from result to train_data, create train data
# load interpreter # todo model can be load from cache later.
filter_condition = {'user_uuid': self.uid,
"dataset_uuid": self.dataset_id,
"model_type": self.task_config["model_type"],
"status": Command.STATUS_DONE}
batch_exec_args = {"condition": filter_condition,
"table_name": DBLinker.TRAIN_STATUS_TABLE,
"sort_limit": ([("end_timestamp", -1)], 1)}
status_result = self.linker.action(DBLinker.BATCH_FETCH, **batch_exec_args)
# print(status_result)
if len(status_result) < 1:
print("no model trained now, please train model first or wait model train done.")
return None
model_version = str(status_result[0]["model_version"])
print("now model version is : ", model_version)
# get newest model version according user_id, dataset_id, and model_type.
# only need task config to generate saved path
# Interpreter can load model meta by itself
interpreter = Interpreter.load(self.task_config.get_save_path_prefix(), model_version)
preds = []
items = self.task_config["data"]
for item in items:
pred = interpreter.parse(item["text"])
preds.append(pred)
return preds
class BatchPredictCmd(Command):
"""
batch predicted command, this command using certain ${model_version} predict ${batch_num} samples, which
have not been labeled.
"""
def __init__(self, db_config, task_config):
super(BatchPredictCmd, self).__init__(db_config)
self.db_config = db_config
self.task_config = task_config
self.uid = self.task_config.get("user_uuid")
self.dataset_id = self.task_config.get("dataset_uuid")
if "model_version" in task_config:
# override timestamp
self.timestamp = task_config["model_version"]
def exec(self):
# get batch data
batch_exec_args = {"condition": self.task_config["condition"],
"table_name": DBLinker.RAW_DATA_TABLE,
"limit": self.task_config.get("batch_num", 100)}
batch_result = self.linker.action(DBLinker.LIMIT_BATCH_FETCH, **batch_exec_args)
# predict
return self._predict_batch(batch_result)
def _predict_batch(self, batch_result):
# from result to train_data, create train data
# load interpreter # todo model can be load from cache later.
filter_condition = {'user_uuid': self.uid,
"dataset_uuid": self.dataset_id,
"model_type": self.task_config["model_type"],
"status": Command.STATUS_DONE}
batch_exec_args = {"condition": filter_condition,
"table_name": DBLinker.TRAIN_STATUS_TABLE,
"sort_limit": ([("end_timestamp", -1)], 1)}
status_result = self.linker.action(DBLinker.BATCH_FETCH, **batch_exec_args)
# print(status_result)
if len(status_result) < 1:
print("no model trained now, please train model first or wait model train done.")
return None
model_version = str(status_result[0]["model_version"])
print("now model version is : ", model_version)
# get newest model version according user_id, dataset_id, and model_type.
interpreter = Interpreter.load(self.task_config.get_save_path_prefix(), model_version)
preds = []
for item in batch_result:
pred = interpreter.parse(item["text"])
preds.append(pred)
return preds
class StatusCmd(Command):
"""
query task status by user id && dataset id && task type
"""
def __init__(self, db_config, task_config):
super(StatusCmd, self).__init__(db_config)
self.db_config = db_config
self.task_config = task_config
self.uid = self.task_config.get("user_uuid")
self.dataset_id = self.task_config.get("dataset_uuid")
if "model_version" in task_config:
# override timestamp
self.timestamp = task_config["model_version"]
def exec(self):
filter_condition = {'user_uuid': self.uid,
"dataset_uuid": self.dataset_id,
"model_type": self.task_config["model_type"]}
batch_exec_args = {"condition": filter_condition,
"table_name": DBLinker.TRAIN_STATUS_TABLE,
"sort_limit": ([("start_timestamp", -1)], 1)}
batch_result = self.linker.action(DBLinker.BATCH_FETCH, **batch_exec_args)
# predict
if len(batch_result) == 1:
return batch_result[0]["status"], batch_result[0]["end_timestamp"]
else:
return "not found!", None
| 3,758 |
788 | /*
* Copyright (C) 2019 Trinity. All rights reserved.
* Copyright (C) 2019 <NAME> <<EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created by wlanjie on 2019/4/20.
//
#ifndef TRINITY_MUSIC_DECODER_H
#define TRINITY_MUSIC_DECODER_H
#include "packet_pool.h"
extern "C" {
#include "libavformat/avformat.h"
#include "libavcodec/avcodec.h"
#include "libswresample/swresample.h"
};
namespace trinity {
class MusicDecoder {
public:
MusicDecoder();
~MusicDecoder();
virtual int Init(const char* path, int packet_buffer_size);
virtual int Init(const char* path);
virtual void SetPacketBufferSize(int packet_buffer_size);
virtual AudioPacket* DecodePacket();
virtual void SeekFrame();
virtual void Destroy();
virtual int GetSampleRate();
void SetSeekReq(bool seek_req);
bool HasSeekReq();
bool HasSeekResp();
/** 设置到播放到什么位置,单位是秒,但是后边3位小数,其实是精确到毫秒 **/
void SetPosition(float seconds);
float GetActualSeekPosition();
private:
int ReadSamples(short* samples, int size);
int ReadFrame();
int ReceiveFrame();
bool AudioCodecIsSupported();
private:
bool seek_req_;
bool seek_resp_;
float seek_seconds_;
float actual_seek_position_;
AVFormatContext* format_context_;
AVCodecContext* codec_context_;
int stream_index_;
float time_base_;
AVFrame* audio_frame_;
AVPacket packet_;
char* path_;
bool seek_success_read_frame_success_;
int packet_buffer_size_;
short* audio_buffer_;
float position_;
int audio_buffer_cursor_;
int audio_buffer_size_;
float duration_;
bool need_first_frame_correct_flag_;
float first_frame_correction_in_secs_;
SwrContext* swr_context_;
void* swr_buffer_;
int swr_buffer_size_;
};
} // namespace trinity
#endif // TRINITY_MUSIC_DECODER_H
| 914 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.