max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
348 | {"nom":"Cristot","circ":"1ère circonscription","dpt":"Calvados","inscrits":162,"abs":99,"votants":63,"blancs":1,"nuls":10,"exp":52,"res":[{"nuance":"REM","nom":"<NAME>","voix":26},{"nuance":"UDI","nom":"<NAME>","voix":26}]} | 89 |
355 | /*
* This file is part of helper, licensed under the MIT License.
*
* Copyright (c) lucko (Luck) <<EMAIL>>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.lucko.helper.bucket;
import me.lucko.helper.bucket.partitioning.PartitioningStrategy;
import java.util.List;
import java.util.Set;
import javax.annotation.Nonnull;
/**
* A bucket is an extension of {@link Set}, which allows contained elements
* to be separated into parts by a {@link PartitioningStrategy}.
*
* <p>The performance of {@link Bucket} should be largely similar to the performance
* of the underlying {@link Set}. Elements are stored twice - once in a set
* containing all elements in the bucket, and again in a set representing each partition.</p>
*
* @param <E> the element type
*/
public interface Bucket<E> extends Set<E> {
/**
* Gets the number of partitions used to form this bucket.
*
* @return the number of partitions in this bucket
*/
int getPartitionCount();
/**
* Gets the partition with the given index value
*
* @param i the partition index
* @return the partition
* @throws IndexOutOfBoundsException if the index is out of range
* (<tt>index < 0 || index >= getPartitionCount()</tt>)
*/
@Nonnull
BucketPartition<E> getPartition(int i);
/**
* Gets the partitions which form this bucket.
*
* @return the partitions within the bucket
*/
@Nonnull
List<BucketPartition<E>> getPartitions();
/**
* Returns a cycle instance unique to this bucket.
*
* <p>This method is provided as a utility for operating deterministically on
* all elements within the bucket over a period of time.</p>
*
* <p>The same cycle instance is returned for each bucket.</p>
*
* @return a cycle of partitions
*/
@Nonnull
Cycle<BucketPartition<E>> asCycle();
}
| 933 |
892 | {
"schema_version": "1.2.0",
"id": "GHSA-4qv2-grq4-x33q",
"modified": "2022-04-30T18:17:22Z",
"published": "2022-04-30T18:17:22Z",
"aliases": [
"CVE-2001-1116"
],
"details": "Identix BioLogon 2.03 and earlier does not lock secondary displays on a multi-monitor system running Windows 98 or ME, which allows an attacker with physical access to the system to bypass authentication through a secondary display.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2001-1116"
},
{
"type": "WEB",
"url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/6948"
},
{
"type": "WEB",
"url": "http://ntbugtraq.ntadvice.com/default.asp?pid=36&sid=1&A2=IND0108&L=NTBUGTRAQ&F=P&S=&P=71"
},
{
"type": "WEB",
"url": "http://ntbugtraq.ntadvice.com/default.asp?pid=36&sid=1&A2=ind0108&L=ntbugtraq&F=P&S=&P=724"
},
{
"type": "WEB",
"url": "http://www.osvdb.org/5453"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/bid/3140"
}
],
"database_specific": {
"cwe_ids": [
],
"severity": "MODERATE",
"github_reviewed": false
}
} | 603 |
3,358 | /* -*- mode: c++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
/*
Copyright (C) 2005 <NAME>
Copyright (C) 2007 StatPro Italia srl
This file is part of QuantLib, a free-software/open-source library
for financial quantitative analysts and developers - http://quantlib.org/
QuantLib is free software: you can redistribute it and/or modify it
under the terms of the QuantLib license. You should have received a
copy of the license along with this program; if not, please email
<<EMAIL>>. The license is also available online at
<http://quantlib.org/license.shtml>.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the license for more details.
*/
#include <ql/pricingengines/vanilla/batesengine.hpp>
#include <ql/instruments/payoffs.hpp>
namespace QuantLib {
BatesEngine::BatesEngine(const ext::shared_ptr<BatesModel> & model,
Size integrationOrder)
: AnalyticHestonEngine(model, integrationOrder) { }
BatesEngine::BatesEngine(const ext::shared_ptr<BatesModel>& model,
Real relTolerance, Size maxEvaluations)
: AnalyticHestonEngine(model, relTolerance, maxEvaluations) { }
std::complex<Real> BatesEngine::addOnTerm(
Real phi, Time t, Size j) const {
ext::shared_ptr<BatesModel> batesModel =
ext::dynamic_pointer_cast<BatesModel>(*model_);
const Real nu_ = batesModel->nu();
const Real delta2_ = 0.5*batesModel->delta()*batesModel->delta();
const Real lambda_ = batesModel->lambda();
const Real i = (j == 1)? 1.0 : 0.0;
const std::complex<Real> g(i, phi);
//it can throw: to be fixed
return t*lambda_*(std::exp(nu_*g + delta2_*g*g) - 1.0
-g*(std::exp(nu_+delta2_) - 1.0));
}
BatesDetJumpEngine::BatesDetJumpEngine(
const ext::shared_ptr<BatesDetJumpModel>& model,
Size integrationOrder)
: BatesEngine(model, integrationOrder) { }
BatesDetJumpEngine::BatesDetJumpEngine(
const ext::shared_ptr<BatesDetJumpModel>& model,
Real relTolerance, Size maxEvaluations)
: BatesEngine(model, relTolerance, maxEvaluations) { }
std::complex<Real> BatesDetJumpEngine::addOnTerm(
Real phi, Time t, Size j) const {
const std::complex<Real> l =
BatesEngine::addOnTerm(phi, t, j);
ext::shared_ptr<BatesDetJumpModel> batesDetJumpModel =
ext::dynamic_pointer_cast<BatesDetJumpModel>(*model_);
const Real lambda = batesDetJumpModel->lambda();
const Real kappaLambda = batesDetJumpModel->kappaLambda();
const Real thetaLambda = batesDetJumpModel->thetaLambda();
return (kappaLambda*t - 1.0 + std::exp(-kappaLambda*t))
* thetaLambda*l/(kappaLambda*t*lambda)
+ (1.0 - std::exp(-kappaLambda*t))*l/(kappaLambda*t);
}
BatesDoubleExpEngine::BatesDoubleExpEngine(
const ext::shared_ptr<BatesDoubleExpModel> & model,
Size integrationOrder)
: AnalyticHestonEngine(model, integrationOrder) { }
BatesDoubleExpEngine::BatesDoubleExpEngine(
const ext::shared_ptr<BatesDoubleExpModel>& model,
Real relTolerance, Size maxEvaluations)
: AnalyticHestonEngine(model, relTolerance, maxEvaluations) { }
std::complex<Real> BatesDoubleExpEngine::addOnTerm(
Real phi, Time t, Size j) const {
ext::shared_ptr<BatesDoubleExpModel> batesDoubleExpModel =
ext::dynamic_pointer_cast<BatesDoubleExpModel>(*model_);
const Real p_ = batesDoubleExpModel->p();
const Real q_ = 1.0-p_;
const Real nuDown_= batesDoubleExpModel->nuDown();
const Real nuUp_ = batesDoubleExpModel->nuUp();
const Real lambda_= batesDoubleExpModel->lambda();
const Real i = (j == 1)? 1.0 : 0.0;
const std::complex<Real> g(i, phi);
return t*lambda_*(p_/(1.0-g*nuUp_) + q_/(1.0+g*nuDown_) - 1.0
- g*(p_/(1-nuUp_) + q_/(1+nuDown_)-1));
}
BatesDoubleExpDetJumpEngine::BatesDoubleExpDetJumpEngine(
const ext::shared_ptr<BatesDoubleExpDetJumpModel> & model,
Size integrationOrder)
: BatesDoubleExpEngine(model, integrationOrder) { }
BatesDoubleExpDetJumpEngine::BatesDoubleExpDetJumpEngine(
const ext::shared_ptr<BatesDoubleExpDetJumpModel>& model,
Real relTolerance, Size maxEvaluations)
: BatesDoubleExpEngine(model, relTolerance, maxEvaluations) { }
std::complex<Real> BatesDoubleExpDetJumpEngine::addOnTerm(
Real phi, Time t, Size j) const {
const std::complex<Real> l =
BatesDoubleExpEngine::addOnTerm(phi, t, j);
ext::shared_ptr<BatesDoubleExpDetJumpModel> doubleExpDetJumpModel
= ext::dynamic_pointer_cast<BatesDoubleExpDetJumpModel>(*model_);
const Real lambda = doubleExpDetJumpModel->lambda();
const Real kappaLambda = doubleExpDetJumpModel->kappaLambda();
const Real thetaLambda = doubleExpDetJumpModel->thetaLambda();
return (kappaLambda*t - 1.0 + std::exp(-kappaLambda*t))
* thetaLambda*l/(kappaLambda*t*lambda)
+ (1.0 - std::exp(-kappaLambda*t))*l/(kappaLambda*t);
}
}
| 2,309 |
1,551 | <reponame>skirdey/FARM
import pytest
import math
import numpy as np
from farm.evaluation.metrics import compute_metrics
from farm.evaluation.semantic_answer_similarity_evaluation import semantic_answer_similarity
def test_compute_metrics_basic():
# check we get some exception, may not always be the AssertionError we get now
with pytest.raises(Exception):
compute_metrics("acc", ["x"] * 10, [""] * 11)
ret = compute_metrics("acc", [], [])
assert isinstance(ret, dict)
assert "acc" in ret
assert math.isnan(ret["acc"])
with pytest.raises(Exception):
compute_metrics("asdfasdf", ["a"], ["b"])
ls = (["a"] * 5)
ls.extend(["b"] * 5)
ps = ["a"] * 10
ret = compute_metrics("acc", ps, ls)
assert ret["acc"] == 0.5
ret = compute_metrics("acc", ls, ps)
assert ret["acc"] == 0.5
ret = compute_metrics("f1_macro", ps, ls)
assert ret["f1_macro"] == 1/3
ret = compute_metrics("f1_macro", ls, ps)
assert ret["f1_macro"] == 1 / 3
ret = compute_metrics(["f1_macro", "acc"], ps, ls)
assert isinstance(ret, dict)
assert len(ret) == 2
assert "acc" in ret
assert "f1_macro" in ret
assert ret["f1_macro"] == 1/3
assert ret["acc"] == 0.5
ret = compute_metrics(["f1_macro", "acc", "acc"], ps, ls)
assert isinstance(ret, dict)
assert len(ret) == 2
assert "acc" in ret
assert "f1_macro" in ret
assert ret["f1_macro"] == 1/3
assert ret["acc"] == 0.5
ret = compute_metrics(["f1_macro", ["acc"]], ps, ls)
assert isinstance(ret, dict)
assert len(ret) == 2
assert "acc" in ret
assert "f1_macro" in ret
assert ret["f1_macro"] == 1/3
assert ret["acc"] == 0.5
def test_semantic_answer_similarity(bert_base_squad2):
bert_base_squad2.model.prediction_heads[0].n_best = 2
result = bert_base_squad2.inference_from_file(file="samples/qa/eval-sample.json",return_json=False)
top1_sim, topn_sim, r, d = semantic_answer_similarity(result=result,
sts_model_path_or_string="paraphrase-MiniLM-L6-v2",
debug=True)
assert np.isclose(top1_sim, 0.7405298)
assert np.isclose(topn_sim, 0.7405298)
assert len(d) == 1
assert "semantic_answer_score" in r[0].prediction[0].meta
| 1,054 |
8,629 | #pragma once
#include <Core/NamesAndTypes.h>
namespace DB
{
NamesAndTypesList getVirtualsForStorage(const NamesAndTypesList & storage_columns_, const NamesAndTypesList & default_virtuals_);
}
| 64 |
631 | <reponame>tradingsecret/beam_wallet
#include "../common.h"
#include "../app_common_impl.h"
#include "contract.h"
#define Voting_manager_create(macro)
#define Voting_manager_view(macro)
#define Voting_manager_destroy(macro) macro(ContractID, cid)
#define Voting_manager_proposals_view_all(macro) macro(ContractID, cid)
#define Voting_manager_proposal_view(macro) \
macro(ContractID, cid) \
macro(HashValue, pid)
#define Voting_manager_proposal_open(macro) \
macro(ContractID, cid) \
macro(HashValue, pid) \
macro(Height, hMin) \
macro(Height, hMax) \
macro(AssetID, aid) \
macro(uint32_t, num_variants)
#define VotingRole_manager(macro) \
macro(manager, create) \
macro(manager, destroy) \
macro(manager, view) \
macro(manager, proposals_view_all) \
macro(manager, proposal_view) \
macro(manager, proposal_open)
#define Voting_my_account_view_staking(macro) macro(ContractID, cid)
#define Voting_my_account_proposal_view(macro) \
macro(ContractID, cid) \
macro(HashValue, pid)
#define Voting_my_account_proposal_vote(macro) \
macro(ContractID, cid) \
macro(HashValue, pid) \
macro(Amount, amount) \
macro(uint32_t, variant)
#define Voting_my_account_proposal_withdraw(macro) \
macro(ContractID, cid) \
macro(HashValue, pid) \
macro(Amount, amount)
#define VotingRole_my_account(macro) \
macro(my_account, view_staking) \
macro(my_account, proposal_view) \
macro(my_account, proposal_vote) \
macro(my_account, proposal_withdraw)
#define VotingRoles_All(macro) \
macro(manager) \
macro(my_account)
BEAM_EXPORT void Method_0()
{
// scheme
Env::DocGroup root("");
{ Env::DocGroup gr("roles");
#define THE_FIELD(type, name) Env::DocAddText(#name, #type);
#define THE_METHOD(role, name) { Env::DocGroup grMethod(#name); Voting_##role##_##name(THE_FIELD) }
#define THE_ROLE(name) { Env::DocGroup grRole(#name); VotingRole_##name(THE_METHOD) }
VotingRoles_All(THE_ROLE)
#undef THE_ROLE
#undef THE_METHOD
#undef THE_FIELD
}
}
#define THE_FIELD(type, name) const type& name,
#define ON_METHOD(role, name) void On_##role##_##name(Voting_##role##_##name(THE_FIELD) int unused = 0)
void OnError(const char* sz)
{
Env::DocAddText("error", sz);
}
ON_METHOD(manager, view)
{
EnumAndDumpContracts(Voting::s_SID);
}
ON_METHOD(manager, create)
{
Env::GenerateKernel(nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, "create Voting contract", 0);
}
ON_METHOD(manager, destroy)
{
Env::GenerateKernel(&cid, 1, nullptr, 0, nullptr, 0, nullptr, 0, "destroy Voting contract", 0);
}
template <typename T>
void PrintDec(char* sz, T x)
{
uint32_t nDigs = 1;
for (T y = x; ; nDigs++)
if (!(y /= 10))
break;
sz[nDigs] = 0;
while (true)
{
sz[--nDigs] = '0' + (x % 10);
if (!nDigs)
break;
x /= 10;
}
}
typedef Env::Key_T<Voting::Proposal::ID> KeyProposal;
typedef Env::Key_T<Voting::UserKey> KeyUser;
struct ProposalWrap
{
Height m_Height;
ProposalWrap()
{
m_Height = Env::get_Height() + 1;
}
KeyProposal m_Key;
Voting::Proposal_MaxVars m_Proposal;
uint32_t m_Variants;
Env::VarReaderEx<true> m_Reader;
void EnumAll(const ContractID& cid)
{
KeyProposal k0, k1;
_POD_(k0.m_Prefix.m_Cid) = cid;
_POD_(k0.m_KeyInContract).SetZero();
_POD_(k1.m_Prefix.m_Cid) = cid;
_POD_(k1.m_KeyInContract).SetObject(0xff);
m_Reader.Enum_T(k0, k1);
}
bool MoveNext()
{
while (true)
{
uint32_t nKey = sizeof(m_Key), nVal = sizeof(m_Proposal);
if (!m_Reader.MoveNext(&m_Key, nKey, &m_Proposal, nVal, 0))
break;
if ((sizeof(m_Key) == nKey) && (nVal >= sizeof(Voting::Proposal)))
{
m_Variants = (nVal - sizeof(Voting::Proposal)) / sizeof(m_Proposal.m_pAmount[0]);
return true;
}
}
return false;
}
bool Read(const ContractID& cid, const Voting::Proposal::ID& pid)
{
KeyProposal k;
_POD_(k.m_Prefix.m_Cid) = cid;
_POD_(k.m_KeyInContract) = pid;
m_Reader.Enum_T(k, k);
if (MoveNext())
return true;
OnError("no such a proposal");
return false;
}
bool IsStarted() const {
return m_Height >= m_Proposal.m_Params.m_hMin;
}
bool IsFinished() const {
return m_Height > m_Proposal.m_Params.m_hMax;
}
void Print() const
{
Env::DocAddNum("Variants", m_Variants);
Env::DocAddNum("hMin", m_Proposal.m_Params.m_hMin);
Env::DocAddNum("hMax", m_Proposal.m_Params.m_hMax);
Env::DocAddNum("Aid", m_Proposal.m_Params.m_Aid);
Env::DocAddText("Status", IsFinished() ? "finished" : IsStarted() ? "in_progress" : "published");
Env::DocGroup grVotes("votes");
for (uint32_t i = 0; i < m_Variants; i++)
{
Amount val = m_Proposal.m_pAmount[i];
if (val)
{
char sz[10];
PrintDec(sz, i);
Env::DocAddNum(sz, val);
}
}
}
};
ON_METHOD(manager, proposals_view_all)
{
Env::DocArray gr("proposals");
ProposalWrap pw;
pw.EnumAll(cid);
while (true)
{
if (!pw.MoveNext())
break;
Env::DocGroup gr("");
Env::DocAddBlob_T("ID", pw.m_Key.m_KeyInContract);
pw.Print();
}
}
ON_METHOD(manager, proposal_view)
{
ProposalWrap pw;
if (!pw.Read(cid, pid))
return;
pw.Print();
Env::DocArray gr("funds_locked");
KeyUser k0, k1;
_POD_(k0.m_Prefix.m_Cid) = cid;
_POD_(k0.m_KeyInContract.m_ID) = pid;
_POD_(k0.m_KeyInContract.m_Pk).SetZero();
_POD_(k1) = k0;
_POD_(k1.m_KeyInContract.m_Pk).SetObject(0xff);
Env::VarReader r(k0, k1);
while (true)
{
KeyUser key;
Amount val;
if (!r.MoveNext_T(key, val))
break;
Env::DocGroup gr("");
Env::DocAddBlob_T("Pk", key.m_KeyInContract.m_Pk);
Env::DocAddNum("Amount", val);
}
}
ON_METHOD(manager, proposal_open)
{
Voting::OpenProposal arg;
arg.m_Params.m_hMin = hMin;
arg.m_Params.m_hMax = hMax;
arg.m_Params.m_Aid = aid;
arg.m_Variants = num_variants;
_POD_(arg.m_ID) = pid;
Env::GenerateKernel(&cid, arg.s_iMethod, &arg, sizeof(arg), nullptr, 0, nullptr, 0, "open proposal", 0);
}
#pragma pack (push, 1)
struct MyPkMaterial
{
ContractID m_Cid;
Voting::Proposal::ID m_Pid;
void Set(const Voting::Proposal::ID& pid, const ContractID& cid)
{
_POD_(m_Cid) = cid;
_POD_(m_Pid) = pid;
}
void Set(const KeyUser& uk)
{
Set(uk.m_KeyInContract.m_ID, uk.m_Prefix.m_Cid);
}
void Get(PubKey& pk)
{
Env::DerivePk(pk, this, sizeof(*this));
}
static void SetGet(KeyUser& uk)
{
MyPkMaterial x;
x.Set(uk);
x.Get(uk.m_KeyInContract.m_Pk);
}
};
#pragma pack (pop)
ON_METHOD(my_account, view_staking)
{
Amount totalLocked = 0, totalAvail = 0;
KeyUser uk;
_POD_(uk.m_Prefix.m_Cid) = cid;
{
Env::DocArray gr0("pids");
ProposalWrap pw;
pw.EnumAll(cid);
while (true)
{
if (!pw.MoveNext())
break;
if (!pw.IsStarted())
continue;
bool bFinished = pw.IsFinished();
_POD_(uk.m_KeyInContract.m_ID) = pw.m_Key.m_KeyInContract;
MyPkMaterial::SetGet(uk);
Amount amount;
if (Env::VarReader::Read_T(uk, amount))
{
Env::DocGroup gr("");
Env::DocAddBlob_T("pid", uk.m_KeyInContract.m_ID);
Env::DocAddNum("Amount", amount);
Env::DocAddText("Status", bFinished ? "available" : "locked");
(bFinished ? totalAvail : totalLocked) += amount;
}
}
}
Env::DocAddNum("total_locked", totalLocked);
Env::DocAddNum("total_available", totalAvail);
}
ON_METHOD(my_account, proposal_view)
{
bool bIsFinished;
{
ProposalWrap pw;
if (!pw.Read(cid, pid))
return;
pw.Print();
bIsFinished = pw.IsFinished();
}
KeyUser uk;
_POD_(uk.m_Prefix.m_Cid) = cid;
_POD_(uk.m_KeyInContract.m_ID) = pid;
MyPkMaterial::SetGet(uk);
Amount amount;
if (Env::VarReader::Read_T(uk, amount))
{
Env::DocAddNum("My_Amount", amount);
Env::DocAddText("Status", bIsFinished ? "available" : "locked");
}
}
void VoteOrWithdraw(const ContractID& cid, const Voting::Proposal::ID& pid, Amount amount, const uint32_t* pVote)
{
FundsChange fc;
fc.m_Amount = amount;
{
ProposalWrap pw;
if (!pw.Read(cid, pid))
return;
fc.m_Aid = pw.m_Proposal.m_Params.m_Aid;
}
Voting::Vote arg;
arg.m_Amount = amount;
_POD_(arg.m_ID) = pid;
MyPkMaterial pkMat;
pkMat.Set(pid, cid);
pkMat.Get(arg.m_Pk);
if (pVote)
{
arg.m_Variant = *pVote;
fc.m_Consume = 1;
Env::GenerateKernel(&cid, arg.s_iMethod, &arg, sizeof(arg), &fc, 1, nullptr, 0, "cast the vote", 0);
}
else
{
fc.m_Consume = 0;
SigRequest sig;
sig.m_pID = &pkMat;
sig.m_nID = sizeof(pkMat);
auto& arg_ = Cast::Down<Voting::UserRequest>(arg);
static_assert(sizeof(arg_) == sizeof(Voting::Withdraw));
Env::GenerateKernel(&cid, Voting::Withdraw::s_iMethod, &arg_, sizeof(arg_), &fc, 1, &sig, 1, "Withdraw after vote", 0);
}
}
ON_METHOD(my_account, proposal_vote)
{
VoteOrWithdraw(cid, pid, amount, &variant);
}
ON_METHOD(my_account, proposal_withdraw)
{
VoteOrWithdraw(cid, pid, amount, nullptr);
}
#undef ON_METHOD
#undef THE_FIELD
BEAM_EXPORT void Method_1()
{
Env::DocGroup root("");
char szRole[0x20], szAction[0x20];
if (!Env::DocGetText("role", szRole, sizeof(szRole)))
return OnError("Role not specified");
if (!Env::DocGetText("action", szAction, sizeof(szAction)))
return OnError("Action not specified");
#define PAR_READ(type, name) type arg_##name; Env::DocGet(#name, arg_##name);
#define PAR_PASS(type, name) arg_##name,
#define THE_METHOD(role, name) \
if (!Env::Strcmp(szAction, #name)) { \
Voting_##role##_##name(PAR_READ) \
On_##role##_##name(Voting_##role##_##name(PAR_PASS) 0); \
return; \
}
#define THE_ROLE(name) \
if (!Env::Strcmp(szRole, #name)) { \
VotingRole_##name(THE_METHOD) \
return OnError("invalid Action"); \
}
VotingRoles_All(THE_ROLE)
#undef THE_ROLE
#undef THE_METHOD
#undef PAR_PASS
#undef PAR_READ
OnError("unknown Role");
}
| 5,475 |
344 | <reponame>chengxingyao/webrtc
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef VIDEO_ALIGNMENT_ADJUSTER_H_
#define VIDEO_ALIGNMENT_ADJUSTER_H_
#include "api/video_codecs/video_encoder.h"
#include "api/video_codecs/video_encoder_config.h"
namespace webrtc {
class AlignmentAdjuster {
public:
// Returns the resolution alignment requested by the encoder (i.e
// |EncoderInfo::requested_resolution_alignment| which ensures that delivered
// frames to the encoder are divisible by this alignment).
//
// If |EncoderInfo::apply_alignment_to_all_simulcast_layers| is enabled, the
// alignment will be adjusted to ensure that each simulcast layer also is
// divisible by |requested_resolution_alignment|. The configured scale factors
// |scale_resolution_down_by| may be adjusted to a common multiple to limit
// the alignment value to avoid largely cropped frames and possibly with an
// aspect ratio far from the original.
// Note: |max_layers| currently only taken into account when using default
// scale factors.
static int GetAlignmentAndMaybeAdjustScaleFactors(
const VideoEncoder::EncoderInfo& info,
VideoEncoderConfig* config,
absl::optional<size_t> max_layers);
};
} // namespace webrtc
#endif // VIDEO_ALIGNMENT_ADJUSTER_H_
| 499 |
326 | {
"Download": "Download",
"Path": "Pad",
"Created at": "Gemaakt op",
"Size": "Grootte",
"No backups present": "Geen back-ups beschikbaar",
"Delete backup": "Back-up verwijderen",
"Are you sure you want to delete the backup created at :date ?": "Weet u zeker dat u de back-up gemaakt op :date wilt verwijderen?",
"Disk": "Schijf",
"Healthy": "Gezond",
"Amount of backups": "Aantal back-ups",
"Newest backup": "Nieuwste back-up",
"Used Storage": "Gebruikte opslagruimte",
"Backups": "Back-ups",
"Create Backup": "Maak een back-up",
"Create database backup": "Maak een databaseback-up",
"Create files backup": "Maak een bestandsback-up",
"Creating a new backup in the background...": "Nieuwe back-up op de achtergrond maken..."
}
| 316 |
892 | <filename>advisories/unreviewed/2022/05/GHSA-fp4q-4rpr-rprc/GHSA-fp4q-4rpr-rprc.json
{
"schema_version": "1.2.0",
"id": "GHSA-fp4q-4rpr-rprc",
"modified": "2022-05-13T01:01:12Z",
"published": "2022-05-13T01:01:12Z",
"aliases": [
"CVE-2017-2917"
],
"details": "An exploitable vulnerability exists in the notifications functionality of Circle with Disney running firmware 2.0.1. Specially crafted network packets can cause an OS command injection. An attacker can send an HTTP request to trigger this vulnerability.",
"severity": [
{
"type": "CVSS_V3",
"score": "CVSS:3.0/AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:H"
}
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2017-2917"
},
{
"type": "WEB",
"url": "https://www.talosintelligence.com/vulnerability_reports/TALOS-2017-0424"
}
],
"database_specific": {
"cwe_ids": [
"CWE-78"
],
"severity": "HIGH",
"github_reviewed": false
}
} | 478 |
335 | <reponame>Safal08/Hacktoberfest-1<filename>F/Fiasco_noun.json
{
"word": "Fiasco",
"definitions": [
"A complete failure, especially a ludicrous or humiliating one."
],
"parts-of-speech": "Noun"
} | 92 |
648 | package com.ywl5320.wlmedia.enums;
/**
* Created by ywl5320 on 2018-3-16.
*/
public enum WlPlayModel {
PLAYMODEL_AUDIO_VIDEO("PLAYMODEL_AUDIO_VIDEO", 0),
PLAYMODEL_ONLY_AUDIO("PLAYMODEL_ONLY_AUDIO", 1),
PLAYMODEL_ONLY_VIDEO("PLAYMODEL_ONLY_VIDEO", 2);
private String playModel;
private int value;
WlPlayModel(String playModel, int value)
{
this.playModel = playModel;
this.value = value;
}
public String getPlayModel() {
return playModel;
}
public void setPlayModel(String playModel) {
this.playModel = playModel;
}
public int getValue() {
return value;
}
public void setValue(int value) {
this.value = value;
}
}
| 315 |
398 | package com.ruiyun.jvppeteer.transport;
import com.ruiyun.jvppeteer.transport.factory.WebSocketTransportFactory;
import com.ruiyun.jvppeteer.util.ValidateUtil;
import org.java_websocket.client.WebSocketClient;
import org.java_websocket.drafts.Draft;
import org.java_websocket.handshake.ServerHandshake;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.URI;
import java.util.Map;
import java.util.function.Consumer;
/**
* websocket client
* @author fff
*
*/
public class WebSocketTransport extends WebSocketClient implements ConnectionTransport {
private static final Logger LOGGER = LoggerFactory.getLogger(WebSocketTransport.class);
private Consumer<String> messageConsumer = null;
private Connection connection = null;
public WebSocketTransport(URI serverUri, Draft draft) {
super(serverUri, draft);
}
public WebSocketTransport(URI serverURI) {
super( serverURI );
}
public WebSocketTransport( URI serverUri, Map<String, String> httpHeaders) {
super(serverUri, httpHeaders);
}
public static WebSocketTransport create(String browserWSEndpoint) throws InterruptedException {
return WebSocketTransportFactory.create(browserWSEndpoint);
}
@Override
public void onMessage(String message) {
ValidateUtil.notNull(this.messageConsumer,"MessageConsumer must be initialized");
this.messageConsumer.accept(message);
}
@Override
public void onClose() {
this.close();
}
@Override
public void onClose( int code, String reason, boolean remote ) {
LOGGER.info("Connection closed by " + ( remote ? "remote peer" : "us" ) + " Code: " + code + " Reason: " + reason );
// The codecodes are documented in class org.java_websocket.framing.CloseFrame
this.onClose();
this.connection.dispose();
}
@Override
public void onError(Exception e) {
LOGGER.error("Websocket error:",e);
}
@Override
public void onOpen(ServerHandshake serverHandshake) {
LOGGER.info("Websocket serverHandshake status: "+serverHandshake.getHttpStatus());
}
public void addMessageConsumer(Consumer<String> consumer) {
this.messageConsumer = consumer;
}
public void addConnection(Connection connection) {
this.connection = connection;
}
}
| 704 |
335 | <filename>I/Implement_verb.json
{
"word": "Implement",
"definitions": [
"Put (a decision, plan, agreement, etc.) into effect."
],
"parts-of-speech": "Verb"
} | 78 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.maven.customizer;
import java.awt.Component;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import javax.lang.model.element.TypeElement;
import javax.swing.DefaultListCellRenderer;
import javax.swing.JList;
import javax.swing.JPanel;
import javax.swing.ListSelectionModel;
import javax.swing.SwingUtilities;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import org.netbeans.api.java.source.ElementHandle;
import org.netbeans.api.java.source.SourceUtils;
import org.openide.awt.Mnemonics;
import org.openide.awt.MouseUtils;
import org.openide.filesystems.FileObject;
import org.openide.util.RequestProcessor;
import static org.netbeans.modules.maven.customizer.Bundle.*;
import org.openide.util.NbBundle.Messages;
/** Browses and allows to choose a project's main class.
*
* @author <NAME>
*/
public class MainClassChooser extends JPanel {
private ChangeListener changeListener;
private String dialogSubtitle = null;
private Collection<ElementHandle<TypeElement>> possibleMainClasses;
/** Creates new form MainClassChooser */
public MainClassChooser (FileObject[] sourcesRoots) {
this (sourcesRoots, null);
}
public MainClassChooser (FileObject[] sourcesRoots, String subtitle) {
dialogSubtitle = subtitle;
initComponents();
jMainClassList.setCellRenderer(new MainClassRenderer());
initClassesView (sourcesRoots);
}
@Messages("LBL_ChooseMainClass_NO_CLASSES_NODE=<No main classes found>")
private void initClassesView (final FileObject[] sourcesRoots) {
possibleMainClasses = null;
jMainClassList.setSelectionMode (ListSelectionModel.SINGLE_SELECTION);
jMainClassList.setListData (getWarmupList ());
jMainClassList.addListSelectionListener (new ListSelectionListener () {
@Override
public void valueChanged (ListSelectionEvent evt) {
if (changeListener != null) {
changeListener.stateChanged (new ChangeEvent (evt));
}
}
});
// support for double click to finish dialog with selected class
jMainClassList.addMouseListener (new MouseListener () {
@Override
public void mouseClicked (MouseEvent e) {
if (MouseUtils.isDoubleClick (e)) {
if (getSelectedMainClass () != null) {
if (changeListener != null) {
changeListener.stateChanged (new ChangeEvent (e));
}
}
}
}
@Override
public void mousePressed (MouseEvent e) {}
@Override
public void mouseReleased (MouseEvent e) {}
@Override
public void mouseEntered (MouseEvent e) {}
@Override
public void mouseExited (MouseEvent e) {}
});
RequestProcessor.getDefault ().post (new Runnable () {
@Override
public void run () {
possibleMainClasses = SourceUtils.getMainClasses(sourcesRoots);
if (possibleMainClasses.isEmpty ()) {
SwingUtilities.invokeLater( new Runnable () {
@Override
public void run () {
jMainClassList.setListData (new String[] { LBL_ChooseMainClass_NO_CLASSES_NODE () } ); // NOI18N
}
});
} else {
final ElementHandle<TypeElement>[] arr = possibleMainClasses.toArray(new ElementHandle[possibleMainClasses.size()]);
// #46861, sort name of classes
Arrays.sort (arr, new MainClassComparator());
SwingUtilities.invokeLater(new Runnable () {
@Override
public void run () {
jMainClassList.setListData (arr);
jMainClassList.setSelectedIndex (0);
}
});
}
}
});
if (dialogSubtitle != null) {
Mnemonics.setLocalizedText (jLabel1, dialogSubtitle);
}
}
@Messages("LBL_ChooseMainClass_WARMUP_MESSAGE=Initializing view, please wait ...")
private Object[] getWarmupList () {
return new Object[] {LBL_ChooseMainClass_WARMUP_MESSAGE ()}; //NOI18N
}
private boolean isValidMainClassName (Object value) {
return (possibleMainClasses != null) && (possibleMainClasses.contains (value));
}
/** Returns the selected main class.
*
* @return name of class or null if no class with the main method is selected
*/
public String getSelectedMainClass () {
if (isValidMainClassName (jMainClassList.getSelectedValue ())) {
return ((ElementHandle)jMainClassList.getSelectedValue()).getQualifiedName();
} else {
return null;
}
}
public void addChangeListener (ChangeListener l) {
changeListener = l;
}
public void removeChangeListener (ChangeListener l) {
changeListener = null;
}
/** Checks if given file object contains the main method.
*
* @param classFO file object represents java
* @return false if parameter is null or doesn't contain SourceCookie
* or SourceCookie doesn't contain the main method
*/
public static boolean hasMainMethod (FileObject fo) {
if (fo == null) {
// ??? maybe better should be thrown IAE
return false;
}
return !SourceUtils.getMainClasses(fo).isEmpty();
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
private void initComponents() {//GEN-BEGIN:initComponents
java.awt.GridBagConstraints gridBagConstraints;
jLabel1 = new javax.swing.JLabel();
jScrollPane1 = new javax.swing.JScrollPane();
jMainClassList = new javax.swing.JList();
setLayout(new java.awt.GridBagLayout());
setPreferredSize(new java.awt.Dimension(380, 300));
getAccessibleContext().setAccessibleDescription(org.openide.util.NbBundle.getBundle(MainClassChooser.class).getString("AD_MainClassChooser"));
jLabel1.setLabelFor(jMainClassList);
org.openide.awt.Mnemonics.setLocalizedText(jLabel1, org.openide.util.NbBundle.getBundle(MainClassChooser.class).getString("CTL_AvaialableMainClasses"));
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
gridBagConstraints.gridwidth = java.awt.GridBagConstraints.REMAINDER;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.insets = new java.awt.Insets(12, 12, 2, 12);
add(jLabel1, gridBagConstraints);
jScrollPane1.setMinimumSize(new java.awt.Dimension(100, 200));
jScrollPane1.setViewportView(jMainClassList);
jMainClassList.getAccessibleContext().setAccessibleDescription(org.openide.util.NbBundle.getBundle(MainClassChooser.class).getString("AD_jMainClassList"));
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 1;
gridBagConstraints.gridwidth = java.awt.GridBagConstraints.REMAINDER;
gridBagConstraints.gridheight = java.awt.GridBagConstraints.REMAINDER;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.weighty = 1.0;
gridBagConstraints.insets = new java.awt.Insets(0, 12, 0, 12);
add(jScrollPane1, gridBagConstraints);
}//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JLabel jLabel1;
private javax.swing.JList jMainClassList;
private javax.swing.JScrollPane jScrollPane1;
// End of variables declaration//GEN-END:variables
private static final class MainClassRenderer extends DefaultListCellRenderer {
@Override
public Component getListCellRendererComponent (JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) {
String displayName;
if (value instanceof ElementHandle) {
displayName = ((ElementHandle)value).getQualifiedName();
} else {
displayName = value.toString ();
}
return super.getListCellRendererComponent (list, displayName, index, isSelected, cellHasFocus);
}
}
private static class MainClassComparator implements Comparator<ElementHandle> {
@Override
public int compare(ElementHandle arg0, ElementHandle arg1) {
return arg0.getQualifiedName().compareTo(arg1.getQualifiedName());
}
}
}
| 4,453 |
898 | package com.spotify.heroic.aggregation;
import static org.junit.Assert.assertEquals;
import com.spotify.heroic.common.DateRange;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
public class BucketStrategyTest {
@Test
public void testStart() {
final BucketStrategy.Mapping mapping =
BucketStrategy.START.setup(new DateRange(10, 30), 10, 10);
final Map<Long, StartEnd> fromTo = new HashMap<>();
// underflow
for (long ts = 0L; ts < 10L; ts++) {
fromTo.put(ts, new StartEnd(0, 0));
}
// first bucket
for (long ts = 10L; ts < 20L; ts++) {
fromTo.put(ts, new StartEnd(0, 1));
}
// second bucket
for (long ts = 20L; ts < 30L; ts++) {
fromTo.put(ts, new StartEnd(1, 2));
}
// overflow
for (long ts = 30L; ts < 40L; ts++) {
fromTo.put(ts, new StartEnd(2, 2));
}
for (final Map.Entry<Long, StartEnd> e : fromTo.entrySet()) {
assertEquals("Expected same mapping for timestamp " + e.getKey(), e.getValue(),
mapping.map(e.getKey()));
}
}
@Test
public void testEnd() {
final BucketStrategy.Mapping mapping =
BucketStrategy.END.setup(new DateRange(10, 30), 10, 10);
final Map<Long, StartEnd> fromTo = new HashMap<>();
// underflow
for (long ts = 0L; ts <= 10L; ts++) {
fromTo.put(ts, new StartEnd(0, 0));
}
// first bucket
for (long ts = 11L; ts <= 20L; ts++) {
fromTo.put(ts, new StartEnd(0, 1));
}
// second bucket
for (long ts = 21L; ts <= 30L; ts++) {
fromTo.put(ts, new StartEnd(1, 2));
}
// overflow
for (long ts = 31L; ts <= 40L; ts++) {
fromTo.put(ts, new StartEnd(2, 2));
}
for (final Map.Entry<Long, StartEnd> e : fromTo.entrySet()) {
assertEquals("Expected same mapping for timestamp " + e.getKey(), e.getValue(),
mapping.map(e.getKey()));
}
}
}
| 1,035 |
2,636 | <reponame>ezhes/honggfuzz<gh_stars>1000+
#ifdef __cplusplus
extern "C" {
#endif
#include <fcntl.h>
#include <libhfuzz/libhfuzz.h>
#include <setjmp.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <unistd.h>
#include "magic.h"
/*
* Compile as:
* honggfuzz/hfuzz_cc/hfuzz-clang -I ./file-5.37/ honggfuzz/examples/file/persistent-file.c -o
* persistent-file ./file-5.37/src/.libs/libmagic.a -lz
*/
magic_t ms = NULL;
int LLVMFuzzerInitialize(int* argc, char*** argv) {
ms = magic_open(MAGIC_CONTINUE | MAGIC_CHECK | MAGIC_COMPRESS);
if (ms == NULL) {
fprintf(stderr, "magic_open() failed\n");
abort();
return 1;
}
const char* magic_file = "/usr/share/misc/magic.mgc";
if (*argc > 1) {
magic_file = (*argv)[1];
}
if (magic_load(ms, magic_file) == -1) {
fprintf(stderr, "magic_load() failed: %s\n", magic_error(ms));
magic_close(ms);
abort();
return 1;
}
return 0;
}
int LLVMFuzzerTestOneInput(const uint8_t* buf, size_t len) {
const char* type = magic_buffer(ms, buf, len);
if (type == NULL) {
printf("Type: [unknown]: %s\n", magic_error(ms));
} else {
printf("Type: '%s'\n", type);
}
return 0;
}
#ifdef __cplusplus
}
#endif
| 643 |
5,659 | <gh_stars>1000+
from cms.menu_bases import CMSAttachMenu
from menus.base import NavigationNode
from menus.menu_pool import menu_pool
class TestMenu(CMSAttachMenu):
name = "test menu"
def get_nodes(self, request):
nodes = []
n = NavigationNode('sample root page', "/", 1)
n2 = NavigationNode('sample settings page', "/bye/", 2)
n3 = NavigationNode('sample account page', "/hello/", 3)
n4 = NavigationNode('sample my profile page', "/hello/world/", 4, 3)
nodes.append(n)
nodes.append(n2)
nodes.append(n3)
nodes.append(n4)
return nodes
menu_pool.register_menu(TestMenu)
| 270 |
3,428 | {"id":"00935","group":"spam-2","checksum":{"type":"MD5","value":"64a85d481bc17b3b61da7861f9a4d0a3"},"text":"Received: from [192.168.127.12] ([67.32.39.130])\n\tby linux.midrange.com (8.11.6/8.11.6) with SMTP id g6N0Yee05828;\n\tMon, 22 Jul 2002 19:34:46 -0500\nReceived: from mail.midtennmortgage.com by [67.32.39.130]\n via smtpd (for 207-224-38-113.cust.chcg.qwest.net [20172.16.31.1013]) with SMTP; Mon, 22 Jul 2002 19:34:40 -0500\nReceived: from cavalryfw.cavalrybanking.com ([192.168.1.3]) by cbex012599.cavb.com with Microsoft SMTPSVC(5.0.2195.4905);\n\t Mon, 22 Jul 2002 19:34:36 -0500\nReceived: from [193.80.199.68] by cavalryfw.cavalrybanking.com\n via smtpd (for mail.midtennmortgage.com [192.168.1.17]) with SMTP; Mon, 22 Jul 2002 19:34:28 -0500\nMessage-ID: <0000188039a8$00007dda$0000342f@>\nTo: <<EMAIL>>\nFrom: \"<NAME>\" <ionicb<EMAIL>>\nSubject: why does your vehicle make that noise? AAK\nDate: Mon, 22 Jul 2002 17:46:45 -1900\nMIME-Version: 1.0\nContent-Type: text/html;\n\tcharset=\"iso-8859-1\"\nContent-Transfer-Encoding: quoted-printable\nX-Priority: 3\nX-Mailer: Microsoft Outlook Express 5.50.4807.1700\nX-Msmail-Priority: Normal\nX-OriginalArrivalTime: 23 Jul 2002 00:34:37.0803 (UTC) FILETIME=[B9B753B0:01C231E0]\nX-Status: \nX-Keywords: \n\n<HTML><HEAD></HEAD><BODY><center><FONT SIZE=3D4 COLOR=3D#FF3300><B>Protect =\nyour financial well-being.<BR>Purchase an Extended Auto Warranty for your =\nVehicle TODAY.</B></FONT><BR><BR><BR><FONT FACE=3DArial SIZE=3D4 COLOR=3D#=\nCC0000><B><U><A HREF=3Dhttp://www.qz932.com/auto/>Click Here for your free=\n, Fast, no BS Rates NOW!!</A></U></B></FONT><BR><br><FONT><B></center>Car =\ntroubles and expensive repair bills always seem to happen at the worst pos=\nsible time Dont they?. Protect yourself and your family with an Extended<b=\nr>Warranty for your car, truck, or SUV, so that a large expense cannot hit=\n you all at once. We cover most vehicles with less than 150,000 miles.</B>=\n</FONT><br><br><br><FONT SIZE=3D4 COLOR=3D#FF3333><B><I>Our warranties are=\n the same as the dealer offers but instead<BR>you are purchasing them dire=\nct!!!</I></B></FONT><BR><BR><BR><FONT SIZE=3D3><B><I>We offer fair prices =\nand prompt, toll-free claims service. Get an Extended Warranty on your veh=\nicle today.</I></B></FONT><br><br><A HREF=3Dhttp://www.qz932.com/auto/>Cli=\nck here today and we will include at no extra cost:</A><br><Br><UL TYPE=3D=\nCIRCLE><BR><LI>24-Hour Roadside Assistance.<LI>Car Rental Benefits.<LI>Tri=\np Interruption Benefits.<LI>Extended Towing Benefits.</UL><A HREF=3Dhttp:/=\n/www.qz932.com/auto/>Click Here for your free, Fast, no BS Rates NOW!!</A>=\n<br><br>Save now, don't wait until it is TOO LATE!<br><br><br><br><br><br>=\n<FONT SIZE=3D1 COLOR=3D#FFFF99>We search for the best offering's for<br>yo=\nu; we do the research and you get only The superior results<br>this email =\nis brought to you by; KBR . To abnegate<br>all future notices, <FONT COLOR=\n=3D#FFFF66><A HREF=3Dhttp://www.qz932.com/index2.html>Enter here</A></FONT=\n></FONT></BODY></HTML>\n\n\n\n"} | 1,267 |
666 | <filename>ExampleListActivity/src/com/jeremyfeinstein/slidingmenu/example/anim/CustomScaleAnimation.java<gh_stars>100-1000
package com.jeremyfeinstein.slidingmenu.example.anim;
import android.graphics.Canvas;
import com.jeremyfeinstein.slidingmenu.example.R;
import com.jeremyfeinstein.slidingmenu.example.R.string;
import com.jeremyfeinstein.slidingmenu.lib.SlidingMenu.CanvasTransformer;
public class CustomScaleAnimation extends CustomAnimation {
public CustomScaleAnimation() {
super(R.string.anim_scale, new CanvasTransformer() {
@Override
public void transformCanvas(Canvas canvas, float percentOpen) {
canvas.scale(percentOpen, 1, 0, 0);
}
});
}
}
| 254 |
5,169 | <filename>Specs/6/7/1/TGShakeToFeedback/0.1.3/TGShakeToFeedback.podspec.json
{
"name": "TGShakeToFeedback",
"version": "0.1.3",
"summary": "Library to have shake to feedback feature.",
"description": "It lets user shake the phone and mail composer will be prompted with users current screen.",
"homepage": "https://github.com/imthegiga/TGShakeToFeedback",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"authors": {
"<NAME>": "<EMAIL>"
},
"source": {
"git": "https://github.com/imthegiga/TGShakeToFeedback.git",
"tag": "0.1.3"
},
"platforms": {
"ios": "8.0"
},
"source_files": "TGShakeToFeedback/Library/TGShakeToFeedback.swift",
"pushed_with_swift_version": "3.0"
}
| 298 |
530 | {
"manufacturer": "Shenzhen Heiman Technology Co., Ltd.",
"manufacturerId": "0x0260",
"label": "HM-723ESY-Z",
"description": "Smart CO Alarm",
"devices": [
{
"productType": "0x8012",
"productId": "0x1000",
"zwaveAllianceId": 3165
}
],
"firmwareVersion": {
"min": "0.0",
"max": "255.255"
},
"metadata": {
"inclusion": "- Click [Add] icon in Z-Wave Controller.\n- Press the Net_Button 3 times within 1.5s, Green LED is Blinking 3 times within 1 second.\n- If Inclusion Process is successful, Green LED will turn off.",
"exclusion": "- Click [Remove] icon in Z-Wave Controller.\n- Press the Net_Button 3 times within 1.5s \n- If Exclusion Process is successful, Green LED is Blinking 6 times, then turn off.",
"reset": "Long press Net_Button at least 10 seconds in the product. \n-Device Reset Locally notification is Transmitted\n-The green LED stays on for 2 seconds and then goes off.\nPlease use this procedure only when the network primary controller is missing or otherwise inoperable.",
"manual": "https://products.z-wavealliance.org/ProductManual/File?folder=&filename=MarketCertificationFiles/3165/HM-723ESY-Z-S2%20documentation.pdf"
}
}
| 402 |
586 | # -*- coding: utf-8 -*-
from .test_settings import *
BACKGROUND_TASK_RUN_ASYNC = True
| 38 |
6,270 | [
{
"type": "feature",
"category": "StorageGateway",
"description": "The CloudWatchLogGroupARN parameter of the UpdateGatewayInformation API allows for configuring the gateway to use a CloudWatch log-group where Storage Gateway health events will be logged. "
}
] | 94 |
495 | <reponame>ScriptBox99/azure-counterfit
from PIL import Image
import tqdm
import numpy as np
from counterfit.core.config import Config
# Used for typing function arguments
from counterfit.core.frameworks import Framework
from counterfit.core.attacks import CFAttack
from counterfit.core.targets import Target
from counterfit.report.report_generator import get_target_data_type_obj
def cross_entropy(predictions, targets):
N = predictions.shape[0]
ce = -np.sum(targets * np.log(predictions)) / N
return ce
class AuglyAttack(object):
def __init__(self, classifier, attack_class, query_budget=5):
self.classifier = classifier
self.attack_class = attack_class
self.results = []
self.query_budget = query_budget
def generate(self, x: np.ndarray, **kwargs):
query_budget = self.query_budget
results = []
all_scores = []
for sample in x:
sample = np.squeeze(sample)
# get the original output
orig_pred = self.classifier.predict(sample)
best_score = 0
best_sample = None
for _ in tqdm.tqdm(range(query_budget)):
if isinstance(sample, np.ndarray) and sample.dtype == np.float32:
# convert to uint [0,255], apply transform, and convert back to float [0,1]
im = Image.fromarray((sample*255).astype(np.uint8))
aug_im = self.attack_class(im)
aug = np.array(aug_im, dtype=np.float32) / 255.0
elif isinstance(sample, np.ndarray) and sample.dtype == np.uint8:
im = Image.fromarray(sample)
aug_im = self.attack_class(im)
aug = np.array(aug_im, dtype=np.uint8)
else:
raise Exception(
"Expecting X to be numpy array of np.uint8 [0,255] or np.float32 [0,1]")
new_pred = self.classifier.predict(aug)
self.results.append(new_pred)
# score the sample using log loss (maximize loss)
score = cross_entropy(new_pred, orig_pred)
if score > best_score:
best_score = score
best_sample = aug
all_scores.append(score)
# append best sample
results.append(best_sample)
# return the "best" augmentation as the adversarial example
return np.array(results, dtype=x.dtype), np.array(all_scores).reshape((x.shape[0], query_budget)).mean(axis=-1)
def check_succes(self):
np.argmax(self.results)
class AuglyFramework(Framework):
def __init__(self):
super().__init__()
def load(self):
config_path = f"{Config.frameworks_path}/augly/config.json"
self.load_from_config(config_path)
def build(self, target: Target, attack: object):
new_attack = AuglyAttack(
target, attack())
return new_attack
def run(self, cfattack: CFAttack):
results, avscores = cfattack.attack.generate(cfattack.samples)
return results
def post_attack_processing(self, cfattack: CFAttack):
current_datatype = cfattack.target.target_data_type
current_dt_report_gen = get_target_data_type_obj(current_datatype)
summary = current_dt_report_gen.get_run_summary(cfattack)
current_dt_report_gen.print_run_summary(summary)
def fix_grayscale(x):
return np.squeeze(x, axis=2)
def check_success(self, cfattack: CFAttack):
final_outputs, final_labels = cfattack.target.get_sample_labels(
cfattack.results)
cfattack.final_labels = final_labels
cfattack.final_outputs = final_outputs
cfattack.initial_labels = final_labels
# successful
success = cfattack.final_labels != np.array(cfattack.initial_labels)
return success
| 1,807 |
305 | <filename>llvm-project/clang/test/Frontend/fixed_point_crash.c<gh_stars>100-1000
// RUN: %clang_cc1 -verify -ffixed-point %s
union a {
_Accum x;
int i;
};
int fn1() {
union a m;
m.x = 5.6k;
return m.i;
}
int fn2() {
union a m;
m.x = 7, 5.6k; // expected-warning {{expression result unused}}
return m.x, m.i; // expected-warning {{expression result unused}}
}
_Accum acc = (0.5r, 6.9k); // expected-warning {{expression result unused}}
| 189 |
732 | /* Copyright 2014 Adobe Systems Incorporated (http://www.adobe.com/). All Rights Reserved.
This software is licensed as OpenSource, under the Apache License, Version 2.0.
This license is available at: http://opensource.org/licenses/Apache-2.0. */
/* Mac OS Arabic aggregate Unicode initializer.
Element values are UVs. UV_UNDEF is 0xFFFF. Index by code, get UV.
Source: ftp://ftp.unicode.org/Public/MAPPINGS/VENDORS/APPLE/ARABIC.TXT
as of 9/9/99. */
UV_UNDEF, /* 00 */
UV_UNDEF, /* 01 */
UV_UNDEF, /* 02 */
UV_UNDEF, /* 03 */
UV_UNDEF, /* 04 */
UV_UNDEF, /* 05 */
UV_UNDEF, /* 06 */
UV_UNDEF, /* 07 */
UV_UNDEF, /* 08 */
UV_UNDEF, /* 09 */
UV_UNDEF, /* 0A */
UV_UNDEF, /* 0B */
UV_UNDEF, /* 0C */
UV_UNDEF, /* 0D */
UV_UNDEF, /* 0E */
UV_UNDEF, /* 0F */
UV_UNDEF, /* 10 */
UV_UNDEF, /* 11 */
UV_UNDEF, /* 12 */
UV_UNDEF, /* 13 */
UV_UNDEF, /* 14 */
UV_UNDEF, /* 15 */
UV_UNDEF, /* 16 */
UV_UNDEF, /* 17 */
UV_UNDEF, /* 18 */
UV_UNDEF, /* 19 */
UV_UNDEF, /* 1A */
UV_UNDEF, /* 1B */
UV_UNDEF, /* 1C */
UV_UNDEF, /* 1D */
UV_UNDEF, /* 1E */
UV_UNDEF, /* 1F */
0x0020, /* 20 SPACE, left-right */
0x0021, /* 21 EXCLAMATION MARK, left-right */
0x0022, /* 22 QUOTATION MARK, left-right */
0x0023, /* 23 NUMBER SIGN, left-right */
0x0024, /* 24 DOLLAR SIGN, left-right */
0x0025, /* 25 PERCENT SIGN, left-right */
0x0026, /* 26 AMPERSAND, left-right */
0x0027, /* 27 APOSTROPHE, left-right */
0x0028, /* 28 LEFT PARENTHESIS, left-right */
0x0029, /* 29 RIGHT PARENTHESIS, left-right */
0x002A, /* 2A ASTERISK, left-right */
0x002B, /* 2B PLUS SIGN, left-right */
0x002C, /* 2C COMMA, left-right */
0x002D, /* 2D HYPHEN-MINUS, left-right */
0x002E, /* 2E FULL STOP, left-right */
0x002F, /* 2F SOLIDUS, left-right */
0x0030, /* 30 DIGIT ZERO */
0x0031, /* 31 DIGIT ONE */
0x0032, /* 32 DIGIT TWO */
0x0033, /* 33 DIGIT THREE */
0x0034, /* 34 DIGIT FOUR */
0x0035, /* 35 DIGIT FIVE */
0x0036, /* 36 DIGIT SIX */
0x0037, /* 37 DIGIT SEVEN */
0x0038, /* 38 DIGIT EIGHT */
0x0039, /* 39 DIGIT NINE */
0x003A, /* 3A COLON, left-right */
0x003B, /* 3B SEMICOLON, left-right */
0x003C, /* 3C LESS-THAN SIGN, left-right */
0x003D, /* 3D EQUALS SIGN, left-right */
0x003E, /* 3E GREATER-THAN SIGN, left-right */
0x003F, /* 3F QUESTION MARK, left-right */
0x0040, /* 40 COMMERCIAL AT */
0x0041, /* 41 LATIN CAPITAL LETTER A */
0x0042, /* 42 LATIN CAPITAL LETTER B */
0x0043, /* 43 LATIN CAPITAL LETTER C */
0x0044, /* 44 LATIN CAPITAL LETTER D */
0x0045, /* 45 LATIN CAPITAL LETTER E */
0x0046, /* 46 LATIN CAPITAL LETTER F */
0x0047, /* 47 LATIN CAPITAL LETTER G */
0x0048, /* 48 LATIN CAPITAL LETTER H */
0x0049, /* 49 LATIN CAPITAL LETTER I */
0x004A, /* 4A LATIN CAPITAL LETTER J */
0x004B, /* 4B LATIN CAPITAL LETTER K */
0x004C, /* 4C LATIN CAPITAL LETTER L */
0x004D, /* 4D LATIN CAPITAL LETTER M */
0x004E, /* 4E LATIN CAPITAL LETTER N */
0x004F, /* 4F LATIN CAPITAL LETTER O */
0x0050, /* 50 LATIN CAPITAL LETTER P */
0x0051, /* 51 LATIN CAPITAL LETTER Q */
0x0052, /* 52 LATIN CAPITAL LETTER R */
0x0053, /* 53 LATIN CAPITAL LETTER S */
0x0054, /* 54 LATIN CAPITAL LETTER T */
0x0055, /* 55 LATIN CAPITAL LETTER U */
0x0056, /* 56 LATIN CAPITAL LETTER V */
0x0057, /* 57 LATIN CAPITAL LETTER W */
0x0058, /* 58 LATIN CAPITAL LETTER X */
0x0059, /* 59 LATIN CAPITAL LETTER Y */
0x005A, /* 5A LATIN CAPITAL LETTER Z */
0x005B, /* 5B LEFT SQUARE BRACKET, left-right */
0x005C, /* 5C REVERSE SOLIDUS, left-right */
0x005D, /* 5D RIGHT SQUARE BRACKET, left-right */
0x005E, /* 5E CIRCUMFLEX ACCENT, left-right */
0x005F, /* 5F LOW LINE, left-right */
0x0060, /* 60 GRAVE ACCENT */
0x0061, /* 61 LATIN SMALL LETTER A */
0x0062, /* 62 LATIN SMALL LETTER B */
0x0063, /* 63 LATIN SMALL LETTER C */
0x0064, /* 64 LATIN SMALL LETTER D */
0x0065, /* 65 LATIN SMALL LETTER E */
0x0066, /* 66 LATIN SMALL LETTER F */
0x0067, /* 67 LATIN SMALL LETTER G */
0x0068, /* 68 LATIN SMALL LETTER H */
0x0069, /* 69 LATIN SMALL LETTER I */
0x006A, /* 6A LATIN SMALL LETTER J */
0x006B, /* 6B LATIN SMALL LETTER K */
0x006C, /* 6C LATIN SMALL LETTER L */
0x006D, /* 6D LATIN SMALL LETTER M */
0x006E, /* 6E LATIN SMALL LETTER N */
0x006F, /* 6F LATIN SMALL LETTER O */
0x0070, /* 70 LATIN SMALL LETTER P */
0x0071, /* 71 LATIN SMALL LETTER Q */
0x0072, /* 72 LATIN SMALL LETTER R */
0x0073, /* 73 LATIN SMALL LETTER S */
0x0074, /* 74 LATIN SMALL LETTER T */
0x0075, /* 75 LATIN SMALL LETTER U */
0x0076, /* 76 LATIN SMALL LETTER V */
0x0077, /* 77 LATIN SMALL LETTER W */
0x0078, /* 78 LATIN SMALL LETTER X */
0x0079, /* 79 LATIN SMALL LETTER Y */
0x007A, /* 7A LATIN SMALL LETTER Z */
0x007B, /* 7B LEFT CURLY BRACKET, left-right */
0x007C, /* 7C VERTICAL LINE, left-right */
0x007D, /* 7D RIGHT CURLY BRACKET, left-right */
0x007E, /* 7E TILDE */
UV_UNDEF, /* 7F */
0x00C4, /* 80 LATIN CAPITAL LETTER A WITH DIAERESIS */
0x00A0, /* 81 NO-BREAK SPACE, right-left */
0x00C7, /* 82 LATIN CAPITAL LETTER C WITH CEDILLA */
0x00C9, /* 83 LATIN CAPITAL LETTER E WITH ACUTE */
0x00D1, /* 84 LATIN CAPITAL LETTER N WITH TILDE */
0x00D6, /* 85 LATIN CAPITAL LETTER O WITH DIAERESIS */
0x00DC, /* 86 LATIN CAPITAL LETTER U WITH DIAERESIS */
0x00E1, /* 87 LATIN SMALL LETTER A WITH ACUTE */
0x00E0, /* 88 LATIN SMALL LETTER A WITH GRAVE */
0x00E2, /* 89 LATIN SMALL LETTER A WITH CIRCUMFLEX */
0x00E4, /* 8A LATIN SMALL LETTER A WITH DIAERESIS */
0x06BA, /* 8B ARABIC LETTER NOON GHUNNA */
0x00AB, /* 8C LEFT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left */
0x00E7, /* 8D LATIN SMALL LETTER C WITH CEDILLA */
0x00E9, /* 8E LATIN SMALL LETTER E WITH ACUTE */
0x00E8, /* 8F LATIN SMALL LETTER E WITH GRAVE */
0x00EA, /* 90 LATIN SMALL LETTER E WITH CIRCUMFLEX */
0x00EB, /* 91 LATIN SMALL LETTER E WITH DIAERESIS */
0x00ED, /* 92 LATIN SMALL LETTER I WITH ACUTE */
0x2026, /* 93 HORIZONTAL ELLIPSIS, right-left */
0x00EE, /* 94 LATIN SMALL LETTER I WITH CIRCUMFLEX */
0x00EF, /* 95 LATIN SMALL LETTER I WITH DIAERESIS */
0x00F1, /* 96 LATIN SMALL LETTER N WITH TILDE */
0x00F3, /* 97 LATIN SMALL LETTER O WITH ACUTE */
0x00BB, /* 98 RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left */
0x00F4, /* 99 LATIN SMALL LETTER O WITH CIRCUMFLEX */
0x00F6, /* 9A LATIN SMALL LETTER O WITH DIAERESIS */
0x00F7, /* 9B DIVISION SIGN, right-left */
0x00FA, /* 9C LATIN SMALL LETTER U WITH ACUTE */
0x00F9, /* 9D LATIN SMALL LETTER U WITH GRAVE */
0x00FB, /* 9E LATIN SMALL LETTER U WITH CIRCUMFLEX */
0x00FC, /* 9F LATIN SMALL LETTER U WITH DIAERESIS */
0x0020, /* A0 SPACE, right-left */
0x0021, /* A1 EXCLAMATION MARK, right-left */
0x0022, /* A2 QUOTATION MARK, right-left */
0x0023, /* A3 NUMBER SIGN, right-left */
0x0024, /* A4 DOLLAR SIGN, right-left */
0x066A, /* A5 ARABIC PERCENT SIGN */
0x0026, /* A6 AMPERSAND, right-left */
0x0027, /* A7 APOSTROPHE, right-left */
0x0028, /* A8 LEFT PARENTHESIS, right-left */
0x0029, /* A9 RIGHT PARENTHESIS, right-left */
0x002A, /* AA ASTERISK, right-left */
0x002B, /* AB PLUS SIGN, right-left */
0x060C, /* AC ARABIC COMMA */
0x002D, /* AD HYPHEN-MINUS, right-left */
0x002E, /* AE FULL STOP, right-left */
0x002F, /* AF SOLIDUS, right-left */
0x0660, /* B0 ARABIC-INDIC DIGIT ZERO, right-left */
0x0661, /* B1 ARABIC-INDIC DIGIT ONE, right-left */
0x0662, /* B2 ARABIC-INDIC DIGIT TWO, right-left */
0x0663, /* B3 ARABIC-INDIC DIGIT THREE, right-left */
0x0664, /* B4 ARABIC-INDIC DIGIT FOUR, right-left */
0x0665, /* B5 ARABIC-INDIC DIGIT FIVE, right-left */
0x0666, /* B6 ARABIC-INDIC DIGIT SIX, right-left */
0x0667, /* B7 ARABIC-INDIC DIGIT SEVEN, right-left */
0x0668, /* B8 ARABIC-INDIC DIGIT EIGHT, right-left */
0x0669, /* B9 ARABIC-INDIC DIGIT NINE, right-left */
0x003A, /* BA COLON, right-left */
0x061B, /* BB ARABIC SEMICOLON */
0x003C, /* BC LESS-THAN SIGN, right-left */
0x003D, /* BD EQUALS SIGN, right-left */
0x003E, /* BE GREATER-THAN SIGN, right-left */
0x061F, /* BF ARABIC QUESTION MARK */
0x274A, /* C0 EIGHT TEARDROP-SPOKED PROPELLER ASTERISK, right-left */
0x0621, /* C1 ARABIC LETTER HAMZA */
0x0622, /* C2 ARABIC LETTER ALEF WITH MADDA ABOVE */
0x0623, /* C3 ARABIC LETTER ALEF WITH HAMZA ABOVE */
0x0624, /* C4 ARABIC LETTER WAW WITH HAMZA ABOVE */
0x0625, /* C5 ARABIC LETTER ALEF WITH HAMZA BELOW */
0x0626, /* C6 ARABIC LETTER YEH WITH HAMZA ABOVE */
0x0627, /* C7 ARABIC LETTER ALEF */
0x0628, /* C8 ARABIC LETTER BEH */
0x0629, /* C9 ARABIC LETTER TEH MARBUTA */
0x062A, /* CA ARABIC LETTER TEH */
0x062B, /* CB ARABIC LETTER THEH */
0x062C, /* CC ARABIC LETTER JEEM */
0x062D, /* CD ARABIC LETTER HAH */
0x062E, /* CE ARABIC LETTER KHAH */
0x062F, /* CF ARABIC LETTER DAL */
0x0630, /* D0 ARABIC LETTER THAL */
0x0631, /* D1 ARABIC LETTER REH */
0x0632, /* D2 ARABIC LETTER ZAIN */
0x0633, /* D3 ARABIC LETTER SEEN */
0x0634, /* D4 ARABIC LETTER SHEEN */
0x0635, /* D5 ARABIC LETTER SAD */
0x0636, /* D6 ARABIC LETTER DAD */
0x0637, /* D7 ARABIC LETTER TAH */
0x0638, /* D8 ARABIC LETTER ZAH */
0x0639, /* D9 ARABIC LETTER AIN */
0x063A, /* DA ARABIC LETTER GHAIN */
0x005B, /* DB LEFT SQUARE BRACKET, right-left */
0x005C, /* DC REVERSE SOLIDUS, right-left */
0x005D, /* DD RIGHT SQUARE BRACKET, right-left */
0x005E, /* DE CIRCUMFLEX ACCENT, right-left */
0x005F, /* DF LOW LINE, right-left */
0x0640, /* E0 ARABIC TATWEEL */
0x0641, /* E1 ARABIC LETTER FEH */
0x0642, /* E2 ARABIC LETTER QAF */
0x0643, /* E3 ARABIC LETTER KAF */
0x0644, /* E4 ARABIC LETTER LAM */
0x0645, /* E5 ARABIC LETTER MEEM */
0x0646, /* E6 ARABIC LETTER NOON */
0x0647, /* E7 ARABIC LETTER HEH */
0x0648, /* E8 ARABIC LETTER WAW */
0x0649, /* E9 ARABIC LETTER ALEF MAKSURA */
0x064A, /* EA ARABIC LETTER YEH */
0x064B, /* EB ARABIC FATHATAN */
0x064C, /* EC ARABIC DAMMATAN */
0x064D, /* ED ARABIC KASRATAN */
0x064E, /* EE ARABIC FATHA */
0x064F, /* EF ARABIC DAMMA */
0x0650, /* F0 ARABIC KASRA */
0x0651, /* F1 ARABIC SHADDA */
0x0652, /* F2 ARABIC SUKUN */
0x067E, /* F3 ARABIC LETTER PEH */
0x0679, /* F4 ARABIC LETTER TTEH */
0x0686, /* F5 ARABIC LETTER TCHEH */
0x06D5, /* F6 ARABIC LETTER AE */
0x06A4, /* F7 ARABIC LETTER VEH */
0x06AF, /* F8 ARABIC LETTER GAF */
0x0688, /* F9 ARABIC LETTER DDAL */
0x0691, /* FA ARABIC LETTER RREH */
0x007B, /* FB LEFT CURLY BRACKET, right-left */
0x007C, /* FC VERTICAL LINE, right-left */
0x007D, /* FD RIGHT CURLY BRACKET, right-left */
0x0698, /* FE ARABIC LETTER JEH */
0x06D2, /* FF ARABIC LETTER YEH BARREE */
| 5,771 |
2,002 | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in the project root for license information.
#pragma once
#include <unknwn.h>
#include <string>
#include <thread>
#include <mutex>
#include <stdexcept>
| 89 |
650 | package cn.netdiscovery.example;
import cn.netdiscovery.core.domain.ResultItems;
import cn.netdiscovery.pipeline.elasticsearch.ElasticSearchPipline;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.transport.client.PreBuiltTransportClient;
import java.net.InetAddress;
import java.net.UnknownHostException;
/**
* @author bdq
* @date 2018-12-26
*/
public class TestESPipline {
public static void main(String[] args) throws UnknownHostException {
Settings settings = Settings.builder()
.put("cluster.name", "docker-cluster").build();
TransportClient client = new PreBuiltTransportClient(settings)
.addTransportAddress(new TransportAddress(InetAddress.getByName("localhost"), 9300));
ResultItems resultItems = new ResultItems();
resultItems.put("test", 1);
new ElasticSearchPipline(client, "test", "_doc").process(resultItems);
}
}
| 363 |
1,018 | #import <React/RCTBridgeModule.h>
#import "FaceDetectorManagerMlkit.h"
@interface RNFaceDetectorModuleMLKit : NSObject <RCTBridgeModule>
@end
| 52 |
335 | {
"word": "Assimilation",
"definitions": [
"The process of taking in and fully understanding information or ideas.",
"The absorption and integration of people, ideas, or culture into a wider society or culture.",
"The absorption and digestion of food or nutrients by the body or any biological system.",
"The process of becoming similar to something.",
"The fact of a sound being made more like another in the same or next word."
],
"parts-of-speech": "Noun"
} | 159 |
3,084 | <filename>network/trans/WFPSampler/lib/HelperFunctions_GUID.h
////////////////////////////////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2012 Microsoft Corporation. All Rights Reserved.
//
// Module Name:
// HelperFunctions_GUID.h
//
// Abstract:
// This module contains prototypes for functions which assist in actions pertaining to GUIDs.
//
// Author:
// <NAME> (DHarper)
//
// Revision History:
//
// [ Month ][Day] [Year] - [Revision]-[ Comments ]
// May 01, 2010 - 1.0 - Creation
//
////////////////////////////////////////////////////////////////////////////////////////////////////
#ifndef HELPERFUNCTIONS_GUID_H
#define HELPERFUNCTIONS_GUID_H
VOID HlprGUIDPurge(_Inout_ GUID* pGUID);
_At_(*ppGUID, _Post_ _Null_)
VOID HlprGUIDDestroy(_Inout_ GUID** ppGUID);
_Success_(return == NO_ERROR)
UINT32 HlprGUIDPopulate(_Inout_ GUID* pGUID);
_At_(*ppGUID, _Pre_ _Null_)
_When_(return != NO_ERROR, _At_(*ppGUID, _Post_ _Null_))
_When_(return == NO_ERROR, _At_(*ppGUID, _Post_ _Notnull_))
_Success_(return == NO_ERROR)
UINT32 HlprGUIDCreate(_Outptr_ GUID** ppGUID);
_When_(return != NO_ERROR, _At_(*ppGUIDString, _Post_ _Notnull_))
_When_(return == NO_ERROR, _At_(*ppGUIDString, _Post_ _Null_))
_Success_(return == NO_ERROR)
UINT32 HlprGUIDDestroyString(_Inout_ PWSTR* ppGUIDString);
_Success_(return != 0)
PWSTR HlprGUIDCreateString(_In_ const GUID* pGUID);
BOOLEAN HlprGUIDsAreEqual(_In_ const GUID* pGUIDAlpha,
_In_ const GUID* pGUIDOmega);
BOOLEAN HlprGUIDIsNull(_In_ const GUID* pGUID);
#endif /// HELPERFUNCTIONS_GUID_H | 696 |
338 | # -*- coding: utf-8 -*-
from __future__ import division
import pandas as pd
import numpy as np
import biosppy
import cvxopt as cv
import cvxopt.solvers
from ..statistics import z_score
from ..statistics import find_closest_in_list
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ==============================================================================
def eda_process(eda, sampling_rate=1000, alpha=8e-4, gamma=1e-2, filter_type = "butter", scr_method="makowski", scr_treshold=0.1):
"""
Automated processing of EDA signal using convex optimization (CVXEDA; Greco et al., 2015).
Parameters
----------
eda : list or array
EDA signal array.
sampling_rate : int
Sampling rate (samples/second).
alpha : float
cvxEDA penalization for the sparse SMNA driver.
gamma : float
cvxEDA penalization for the tonic spline coefficients.
filter_type : str or None
Can be Butterworth filter ("butter"), Finite Impulse Response filter ("FIR"), Chebyshev filters ("cheby1" and "cheby2"), Elliptic filter ("ellip") or Bessel filter ("bessel"). Set to None to skip filtering.
scr_method : str
SCR extraction algorithm. "makowski" (default), "kim" (biosPPy's default; See Kim et al., 2004) or "gamboa" (Gamboa, 2004).
scr_treshold : float
SCR minimum treshold (in terms of signal standart deviation).
Returns
----------
processed_eda : dict
Dict containing processed EDA features.
Contains the EDA raw signal, the filtered signal, the phasic compnent (if cvxEDA is True), the SCR onsets, peak indexes and amplitudes.
This function is mainly a wrapper for the biosppy.eda.eda() and cvxEDA() functions. Credits go to their authors.
Example
----------
>>> import neurokit as nk
>>>
>>> processed_eda = nk.eda_process(eda_signal)
Notes
----------
*Details*
- **cvxEDA**: Based on a model which describes EDA as the sum of three terms: the phasic component, the tonic component, and an additive white Gaussian noise term incorporating model prediction errors as well as measurement errors and artifacts. This model is physiologically inspired and fully explains EDA through a rigorous methodology based on Bayesian statistics, mathematical convex optimization and sparsity.
*Authors*
- `<NAME> <https://dominiquemakowski.github.io/>`_
*Dependencies*
- biosppy
- numpy
- pandas
- cvxopt
*See Also*
- BioSPPy: https://github.com/PIA-Group/BioSPPy
- cvxEDA: https://github.com/lciti/cvxEDA
References
-----------
- <NAME>., <NAME>., & <NAME>. (2016). Evaluation of CDA and CvxEDA Models. In Advances in Electrodermal Activity Processing with Applications for Mental Health (pp. 35-43). Springer International Publishing.
- <NAME>., <NAME>., <NAME>., <NAME>., & <NAME>. (2016). cvxEDA: A convex optimization approach to electrodermal activity processing. IEEE Transactions on Biomedical Engineering, 63(4), 797-804.
- <NAME>., <NAME>., & <NAME>. (2004). Emotion recognition system using short-term monitoring of physiological signals. Medical and biological engineering and computing, 42(3), 419-427.
- <NAME>. (2008). Multi-Modal Behavioral Biometrics Based on HCI and Electrophysiology (Doctoral dissertation, PhD thesis, Universidade Técnica de Lisboa, Instituto Superior Técnico).
"""
# Initialization
eda = np.array(eda)
eda_df = pd.DataFrame({"EDA_Raw": np.array(eda)})
# Preprocessing
# ===================
# Filtering
if filter_type is not None:
filtered, _, _ = biosppy.tools.filter_signal(signal=eda,
ftype=filter_type,
band='lowpass',
order=4,
frequency=5,
sampling_rate=sampling_rate)
# Smoothing
filtered, _ = biosppy.tools.smoother(signal=filtered,
kernel='boxzen',
size=int(0.75 * sampling_rate),
mirror=True)
eda_df["EDA_Filtered"] = filtered
# Derive Phasic and Tonic
try:
tonic, phasic = cvxEDA(eda, sampling_rate=sampling_rate, alpha=alpha, gamma=gamma)
eda_df["EDA_Phasic"] = phasic
eda_df["EDA_Tonic"] = tonic
signal = phasic
except:
print("NeuroKit Warning: eda_process(): Error in cvxEDA algorithm, couldn't extract phasic and tonic components. Using raw signal.")
signal = eda
# Skin-Conductance Responses
# ===========================
if scr_method == "kim":
onsets, peaks, amplitudes = biosppy.eda.kbk_scr(signal=signal, sampling_rate=sampling_rate, min_amplitude=scr_treshold)
recoveries = [np.nan]*len(onsets)
elif scr_method == "gamboa":
onsets, peaks, amplitudes = biosppy.eda.basic_scr(signal=signal, sampling_rate=sampling_rate)
recoveries = [np.nan]*len(onsets)
else: # makowski's algorithm
onsets, peaks, amplitudes, recoveries = eda_scr(signal, sampling_rate=sampling_rate, treshold=scr_treshold, method="fast")
# Store SCR onsets and recoveries positions
scr_onsets = np.array([np.nan]*len(signal))
if len(onsets) > 0:
scr_onsets[onsets] = 1
eda_df["SCR_Onsets"] = scr_onsets
scr_recoveries = np.array([np.nan]*len(signal))
if len(recoveries) > 0:
scr_recoveries[recoveries[pd.notnull(recoveries)].astype(int)] = 1
eda_df["SCR_Recoveries"] = scr_recoveries
# Store SCR peaks and amplitudes
scr_peaks = np.array([np.nan]*len(eda))
peak_index = 0
for index in range(len(scr_peaks)):
try:
if index == peaks[peak_index]:
scr_peaks[index] = amplitudes[peak_index]
peak_index += 1
except:
pass
eda_df["SCR_Peaks"] = scr_peaks
processed_eda = {"df": eda_df,
"EDA": {
"SCR_Onsets": onsets,
"SCR_Peaks_Indexes": peaks,
"SCR_Recovery_Indexes": recoveries,
"SCR_Peaks_Amplitudes": amplitudes}}
return(processed_eda)
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ==============================================================================
def cvxEDA(eda, sampling_rate=1000, tau0=2., tau1=0.7, delta_knot=10., alpha=8e-4, gamma=1e-2, solver=None, verbose=False, options={'reltol':1e-9}):
"""
A convex optimization approach to electrodermal activity processing (CVXEDA).
This function implements the cvxEDA algorithm described in "cvxEDA: a
Convex Optimization Approach to Electrodermal Activity Processing" (Greco et al., 2015).
Parameters
----------
eda : list or array
raw EDA signal array.
sampling_rate : int
Sampling rate (samples/second).
tau0 : float
Slow time constant of the Bateman function.
tau1 : float
Fast time constant of the Bateman function.
delta_knot : float
Time between knots of the tonic spline function.
alpha : float
Penalization for the sparse SMNA driver.
gamma : float
Penalization for the tonic spline coefficients.
solver : bool
Sparse QP solver to be used, see cvxopt.solvers.qp
verbose : bool
Print progress?
options : dict
Solver options, see http://cvxopt.org/userguide/coneprog.html#algorithm-parameters
Returns
----------
phasic : numpy.array
The phasic component.
Notes
----------
*Authors*
- <NAME> (https://github.com/lciti)
- <NAME>
*Dependencies*
- cvxopt
- numpy
*See Also*
- cvxEDA: https://github.com/lciti/cvxEDA
References
-----------
- <NAME>., <NAME>., & <NAME>. (2016). Evaluation of CDA and CvxEDA Models. In Advances in Electrodermal Activity Processing with Applications for Mental Health (pp. 35-43). Springer International Publishing.
- <NAME>., <NAME>., <NAME>., <NAME>., & <NAME>. (2016). cvxEDA: A convex optimization approach to electrodermal activity processing. IEEE Transactions on Biomedical Engineering, 63(4), 797-804.
"""
frequency = 1/sampling_rate
# Normalizing signal
eda = z_score(eda)
eda = np.array(eda)[:,0]
n = len(eda)
eda = eda.astype('double')
eda = cv.matrix(eda)
# bateman ARMA model
a1 = 1./min(tau1, tau0) # a1 > a0
a0 = 1./max(tau1, tau0)
ar = np.array([(a1*frequency + 2.) * (a0*frequency + 2.), 2.*a1*a0*frequency**2 - 8.,
(a1*frequency - 2.) * (a0*frequency - 2.)]) / ((a1 - a0) * frequency**2)
ma = np.array([1., 2., 1.])
# matrices for ARMA model
i = np.arange(2, n)
A = cv.spmatrix(np.tile(ar, (n-2,1)), np.c_[i,i,i], np.c_[i,i-1,i-2], (n,n))
M = cv.spmatrix(np.tile(ma, (n-2,1)), np.c_[i,i,i], np.c_[i,i-1,i-2], (n,n))
# spline
delta_knot_s = int(round(delta_knot / frequency))
spl = np.r_[np.arange(1.,delta_knot_s), np.arange(delta_knot_s, 0., -1.)] # order 1
spl = np.convolve(spl, spl, 'full')
spl /= max(spl)
# matrix of spline regressors
i = np.c_[np.arange(-(len(spl)//2), (len(spl)+1)//2)] + np.r_[np.arange(0, n, delta_knot_s)]
nB = i.shape[1]
j = np.tile(np.arange(nB), (len(spl),1))
p = np.tile(spl, (nB,1)).T
valid = (i >= 0) & (i < n)
B = cv.spmatrix(p[valid], i[valid], j[valid])
# trend
C = cv.matrix(np.c_[np.ones(n), np.arange(1., n+1.)/n])
nC = C.size[1]
# Solve the problem:
# .5*(M*q + B*l + C*d - eda)^2 + alpha*sum(A,1)*p + .5*gamma*l'*l
# s.t. A*q >= 0
if verbose is False:
options["show_progress"] = False
old_options = cv.solvers.options.copy()
cv.solvers.options.clear()
cv.solvers.options.update(options)
if solver == 'conelp':
# Use conelp
z = lambda m,n: cv.spmatrix([],[],[],(m,n))
G = cv.sparse([[-A,z(2,n),M,z(nB+2,n)],[z(n+2,nC),C,z(nB+2,nC)],
[z(n,1),-1,1,z(n+nB+2,1)],[z(2*n+2,1),-1,1,z(nB,1)],
[z(n+2,nB),B,z(2,nB),cv.spmatrix(1.0, range(nB), range(nB))]])
h = cv.matrix([z(n,1),.5,.5,eda,.5,.5,z(nB,1)])
c = cv.matrix([(cv.matrix(alpha, (1,n)) * A).T,z(nC,1),1,gamma,z(nB,1)])
res = cv.solvers.conelp(c, G, h, dims={'l':n,'q':[n+2,nB+2],'s':[]})
obj = res['primal objective']
else:
# Use qp
Mt, Ct, Bt = M.T, C.T, B.T
H = cv.sparse([[Mt*M, Ct*M, Bt*M], [Mt*C, Ct*C, Bt*C],
[Mt*B, Ct*B, Bt*B+gamma*cv.spmatrix(1.0, range(nB), range(nB))]])
f = cv.matrix([(cv.matrix(alpha, (1,n)) * A).T - Mt*eda, -(Ct*eda), -(Bt*eda)])
res = cv.solvers.qp(H, f, cv.spmatrix(-A.V, A.I, A.J, (n,len(f))),
cv.matrix(0., (n,1)), solver=solver)
obj = res['primal objective'] + .5 * (eda.T * eda)
cv.solvers.options.clear()
cv.solvers.options.update(old_options)
l = res['x'][-nB:]
d = res['x'][n:n+nC]
tonic = B*l + C*d
q = res['x'][:n]
p = A * q
phasic = M * q
e = eda - phasic - tonic
phasic = np.array(phasic)[:,0]
# results = (np.array(a).ravel() for a in (r, t, p, l, d, e, obj))
return(tonic, phasic)
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ==============================================================================
def eda_scr(signal, sampling_rate=1000, treshold=0.1, method="fast"):
"""
Skin-Conductance Responses extraction algorithm.
Parameters
----------
signal : list or array
EDA signal array.
sampling_rate : int
Sampling rate (samples/second).
treshold : float
SCR minimum treshold (in terms of signal standart deviation).
method : str
"fast" or "slow". Either use a gradient-based approach or a local extrema one.
Returns
----------
onsets, peaks, amplitudes, recoveries : lists
SCRs features.
Example
----------
>>> import neurokit as nk
>>>
>>> onsets, peaks, amplitudes, recoveries = nk.eda_scr(eda_signal)
Notes
----------
*Authors*
- `<NAME> <https://dominiquemakowski.github.io/>`_
*Dependencies*
- biosppy
- numpy
- pandas
*See Also*
- BioSPPy: https://github.com/PIA-Group/BioSPPy
References
-----------
- <NAME>., <NAME>., & <NAME>. (2004). Emotion recognition system using short-term monitoring of physiological signals. Medical and biological engineering and computing, 42(3), 419-427.
- <NAME>. (2008). Multi-Modal Behavioral Biometrics Based on HCI and Electrophysiology (Doctoral dissertation, PhD thesis, Universidade Técnica de Lisboa, Instituto Superior Técnico).
"""
# Processing
# ===========
if method == "slow":
# Compute gradient (sort of derivative)
gradient = np.gradient(signal)
# Smoothing
size = int(0.1 * sampling_rate)
smooth, _ = biosppy.tools.smoother(signal=gradient, kernel='bartlett', size=size, mirror=True)
# Find zero-crossings
zeros, = biosppy.tools.zero_cross(signal=smooth, detrend=True)
# Separate onsets and peaks
onsets = []
peaks = []
for i in zeros:
if smooth[i+1] > smooth[i-1]:
onsets.append(i)
else:
peaks.append(i)
peaks = np.array(peaks)
onsets = np.array(onsets)
else:
# find extrema
peaks, _ = biosppy.tools.find_extrema(signal=signal, mode='max')
onsets, _ = biosppy.tools.find_extrema(signal=signal, mode='min')
# Keep only pairs
peaks = peaks[peaks > onsets[0]]
onsets = onsets[onsets < peaks[-1]]
# Artifact Treatment
# ====================
# Compute rising times
risingtimes = peaks-onsets
risingtimes = risingtimes/sampling_rate*1000
peaks = peaks[risingtimes > 100]
onsets = onsets[risingtimes > 100]
# Compute amplitudes
amplitudes = signal[peaks]-signal[onsets]
# Remove low amplitude variations
mask = amplitudes > np.std(signal)*treshold
peaks = peaks[mask]
onsets = onsets[mask]
amplitudes = amplitudes[mask]
# Recovery moments
recoveries = []
for x, peak in enumerate(peaks):
try:
window = signal[peak:onsets[x+1]]
except IndexError:
window = signal[peak:]
recovery_amp = signal[peak]-amplitudes[x]/2
try:
smaller = find_closest_in_list(recovery_amp, window, "smaller")
recovery_pos = peak + list(window).index(smaller)
recoveries.append(recovery_pos)
except ValueError:
recoveries.append(np.nan)
recoveries = np.array(recoveries)
return(onsets, peaks, amplitudes, recoveries)
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ==============================================================================
def eda_EventRelated(epoch, event_length, window_post=4):
"""
Extract event-related EDA and Skin Conductance Response (SCR).
Parameters
----------
epoch : pandas.DataFrame
An epoch contains in the epochs dict returned by :function:`neurokit.create_epochs()` on dataframe returned by :function:`neurokit.bio_process()`. Index must range from -4s to +4s (relatively to event onset and end).
event_length : int
Event's length in seconds.
window_post : float
Post-stimulus window size (in seconds) to include eventual responses (usually 3 or 4).
Returns
----------
EDA_Response : dict
Event-related EDA response features.
Example
----------
>>> import neurokit as nk
>>> bio = nk.bio_process(ecg=data["ECG"], rsp=data["RSP"], eda=data["EDA"], sampling_rate=1000, add=data["Photosensor"])
>>> df = bio["df"]
>>> events = nk.find_events(df["Photosensor"], cut="lower")
>>> epochs = nk.create_epochs(df, events["onsets"], duration=7, onset=-0.5)
>>> for epoch in epochs:
>>> bio_response = nk.bio_EventRelated(epoch, event_length=4, window_post=3)
Notes
----------
**Looking for help**: *Experimental*: respiration artifacts correction needs to be implemented.
*Details*
- **EDA_Peak**: Max of EDA (in a window starting 1s after the stim onset) minus baseline.
- **SCR_Amplitude**: Peak of SCR. If no SCR, returns NA.
- **SCR_Magnitude**: Peak of SCR. If no SCR, returns 0.
- **SCR_Amplitude_Log**: log of 1+amplitude.
- **SCR_Magnitude_Log**: log of 1+magnitude.
- **SCR_PeakTime**: Time of peak.
- **SCR_Latency**: Time between stim onset and SCR onset.
- **SCR_RiseTime**: Time between SCR onset and peak.
- **SCR_Strength**: *Experimental*: peak divided by latency. Angle of the line between peak and onset.
- **SCR_RecoveryTime**: Time between peak and recovery point (half of the amplitude).
*Authors*
- `<NAME> <https://dominiquemakowski.github.io/>`_
*Dependencies*
- numpy
- pandas
*See Also*
- https://www.biopac.com/wp-content/uploads/EDA-SCR-Analysis.pdf
References
-----------
- <NAME>., <NAME>., <NAME>., <NAME>., & <NAME>. (2003). Respiration-related artifacts in EDA recordings: introducing a standardized method to overcome multiple interpretations. Psychological reports, 93(3), 907-920.
- <NAME>., <NAME>., & <NAME>. (2012). EDA positive change: A simple algorithm for electrodermal activity to measure general audience arousal during media exposure. Communication Methods and Measures, 6(4), 237-250.
"""
# Initialization
EDA_Response = {}
window_end = event_length + window_post
# Sanity check
if epoch.index[-1]-event_length < 1:
print("NeuroKit Warning: eda_EventRelated(): your epoch only lasts for about %.2f s post stimulus. You might lose some SCRs." %(epoch.index[-1]-event_length))
# EDA Based
# =================
# This is a basic and bad model
if "EDA_Filtered" in epoch.columns:
baseline = epoch["EDA_Filtered"][0:1].min()
eda_peak = epoch["EDA_Filtered"][1:window_end].max()
EDA_Response["EDA_Peak"] = eda_peak - baseline
# SCR Based
# =================
if "SCR_Onsets" in epoch.columns:
# Computation
peak_onset = epoch["SCR_Onsets"][1:window_end].idxmax()
if pd.notnull(peak_onset):
amplitude = epoch["SCR_Peaks"][peak_onset:window_end].max()
peak_time = epoch["SCR_Peaks"][peak_onset:window_end].idxmax()
if pd.isnull(amplitude):
magnitude = 0
else:
magnitude = amplitude
risetime = peak_time - peak_onset
if risetime > 0:
strength = magnitude/risetime
else:
strength = np.nan
if pd.isnull(peak_time) is False:
recovery = epoch["SCR_Recoveries"][peak_time:window_end].idxmax() - peak_time
else:
recovery = np.nan
else:
amplitude = np.nan
magnitude = 0
risetime = np.nan
strength = np.nan
peak_time = np.nan
recovery = np.nan
# Storage
EDA_Response["SCR_Amplitude"] = amplitude
EDA_Response["SCR_Magnitude"] = magnitude
EDA_Response["SCR_Amplitude_Log"] = np.log(1+amplitude)
EDA_Response["SCR_Magnitude_Log"] = np.log(1+magnitude)
EDA_Response["SCR_Latency"] = peak_onset
EDA_Response["SCR_PeakTime"] = peak_time
EDA_Response["SCR_RiseTime"] = risetime
EDA_Response["SCR_Strength"] = strength # Experimental
EDA_Response["SCR_RecoveryTime"] = recovery
# Artifact Correction
# ====================
# TODO !!
# Respiration artifacts
# if "RSP_Filtered" in epoch.columns:
# pass # I Dunno, maybe with granger causality or something?
return(EDA_Response)
| 8,834 |
1,694 | //
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Sep 17 2017 16:24:48).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by <NAME>.
//
#import <MMCommon/MMObject.h>
@class NSString, UIColor;
@interface MMWebViewPresetUI : MMObject
{
_Bool _bUseBlurBackground;
UIColor *_navigationBarColor;
UIColor *_navigationBarTitleColor;
UIColor *_navigationLeftItemFontColor;
UIColor *_navigationRightItemColor;
unsigned long long _statusBarColor;
UIColor *_backgroundColor;
NSString *_navigationLeftIconName;
NSString *_navigationBarTitle;
}
@property(nonatomic) _Bool bUseBlurBackground; // @synthesize bUseBlurBackground=_bUseBlurBackground;
@property(copy, nonatomic) NSString *navigationBarTitle; // @synthesize navigationBarTitle=_navigationBarTitle;
@property(retain, nonatomic) NSString *navigationLeftIconName; // @synthesize navigationLeftIconName=_navigationLeftIconName;
@property(retain, nonatomic) UIColor *backgroundColor; // @synthesize backgroundColor=_backgroundColor;
@property(nonatomic) unsigned long long statusBarColor; // @synthesize statusBarColor=_statusBarColor;
@property(retain, nonatomic) UIColor *navigationRightItemColor; // @synthesize navigationRightItemColor=_navigationRightItemColor;
@property(retain, nonatomic) UIColor *navigationLeftItemFontColor; // @synthesize navigationLeftItemFontColor=_navigationLeftItemFontColor;
@property(retain, nonatomic) UIColor *navigationBarTitleColor; // @synthesize navigationBarTitleColor=_navigationBarTitleColor;
@property(retain, nonatomic) UIColor *navigationBarColor; // @synthesize navigationBarColor=_navigationBarColor;
- (void).cxx_destruct;
@end
| 564 |
777 | // Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef AnimationEffectTimingReadOnly_h
#define AnimationEffectTimingReadOnly_h
#include "bindings/core/v8/ScriptWrappable.h"
#include "core/CoreExport.h"
#include "core/animation/AnimationEffectReadOnly.h"
#include "wtf/text/WTFString.h"
namespace blink {
class UnrestrictedDoubleOrString;
class CORE_EXPORT AnimationEffectTimingReadOnly
: public GarbageCollected<AnimationEffectTimingReadOnly>,
public ScriptWrappable {
DEFINE_WRAPPERTYPEINFO();
public:
static AnimationEffectTimingReadOnly* create(AnimationEffectReadOnly* parent);
double delay();
double endDelay();
String fill();
double iterationStart();
double iterations();
void duration(UnrestrictedDoubleOrString&);
double playbackRate();
String direction();
String easing();
virtual bool isAnimationEffectTiming() const { return false; }
DECLARE_VIRTUAL_TRACE();
protected:
Member<AnimationEffectReadOnly> m_parent;
explicit AnimationEffectTimingReadOnly(AnimationEffectReadOnly*);
};
} // namespace blink
#endif
| 350 |
1,319 | from __future__ import absolute_import
from .triplet_loss import *
| 19 |
892 | {
"schema_version": "1.2.0",
"id": "GHSA-7hf8-pr4q-94mp",
"modified": "2022-05-02T03:40:58Z",
"published": "2022-05-02T03:40:58Z",
"aliases": [
"CVE-2009-3029"
],
"details": "Cross-site scripting (XSS) vulnerability in the console in Symantec SecurityExpressions Audit and Compliance Server 4.1.1, 4.1, and earlier allows remote authenticated users to inject arbitrary web script or HTML via \"external client input\" that triggers crafted error messages.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2009-3029"
},
{
"type": "WEB",
"url": "http://secunia.com/advisories/36972"
},
{
"type": "WEB",
"url": "http://securitytracker.com/id?1022989"
},
{
"type": "WEB",
"url": "http://www.osvdb.org/58651"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/bid/36570"
},
{
"type": "WEB",
"url": "http://www.symantec.com/security_response/securityupdates/detail.jsp?fid=security_advisory&pvid=security_advisory&year=2009&suid=20091006_00"
},
{
"type": "WEB",
"url": "http://www.vupen.com/english/advisories/2009/2849"
}
],
"database_specific": {
"cwe_ids": [
"CWE-79"
],
"severity": "LOW",
"github_reviewed": false
}
} | 638 |
659 | <gh_stars>100-1000
package co.infinum.goldfinger;
import android.content.Context;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import java.util.List;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.biometric.BiometricManager;
import androidx.biometric.BiometricPrompt;
import androidx.fragment.app.Fragment;
import androidx.fragment.app.FragmentActivity;
import static co.infinum.goldfinger.LogUtils.log;
/**
* Goldfinger implementation for Android Marshmallow and newer.
* Older versions use {@link GoldfingerMock}.
*/
@RequiresApi(Build.VERSION_CODES.M)
class GoldfingerImpl implements Goldfinger {
private static final Handler MAIN_HANDLER = new Handler(Looper.getMainLooper());
@NonNull private final AsyncCryptoObjectFactory asyncCryptoFactory;
@Nullable private AsyncCryptoObjectFactory.Callback asyncCryptoFactoryCallback;
@Nullable private BiometricPrompt biometricPrompt;
@NonNull private final CrypterProxy cryptoProxy;
@NonNull private final BiometricManager biometricManager;
@NonNull private final Executor executor = Executors.newSingleThreadExecutor();
@Nullable private BiometricCallback biometricCallback;
private boolean creatingCryptoObject = false;
GoldfingerImpl(
@NonNull Context context,
@NonNull AsyncCryptoObjectFactory asyncCryptoFactory,
@NonNull CrypterProxy cryptoProxy
) {
this.biometricManager = BiometricManager.from(context);
this.asyncCryptoFactory = asyncCryptoFactory;
this.cryptoProxy = cryptoProxy;
}
/**
* @see Goldfinger#authenticate
*/
@Override
public void authenticate(
@NonNull PromptParams params,
@NonNull Callback callback
) {
if (preconditionsInvalid(params, Mode.AUTHENTICATION, null, null, callback)) {
return;
}
log("Starting authentication");
startNativeFingerprintAuthentication(params, Mode.AUTHENTICATION, null, null, callback, null);
}
@Override
public boolean canAuthenticate() {
return biometricManager.canAuthenticate() == BiometricManager.BIOMETRIC_SUCCESS;
}
/**
* @see Goldfinger#cancel
*/
@Override
public void cancel() {
if (biometricPrompt != null) {
biometricPrompt.cancelAuthentication();
biometricPrompt = null;
}
if (biometricCallback != null) {
biometricCallback.cancel();
biometricCallback = null;
}
if (asyncCryptoFactoryCallback != null) {
asyncCryptoFactoryCallback.cancel();
asyncCryptoFactoryCallback = null;
}
}
@Override
public void decrypt(@NonNull PromptParams params, @NonNull String key, @NonNull String value, @NonNull Callback callback) {
if (preconditionsInvalid(params, Mode.DECRYPTION, key, value, callback)) {
return;
}
initializeCryptoObject(params, Mode.DECRYPTION, key, value, callback);
}
@Override
public void encrypt(@NonNull PromptParams params, @NonNull String key, @NonNull String value, @NonNull Callback callback) {
if (preconditionsInvalid(params, Mode.ENCRYPTION, key, value, callback)) {
return;
}
initializeCryptoObject(params, Mode.ENCRYPTION, key, value, callback);
}
@Override
public boolean hasEnrolledFingerprint() {
return biometricManager.canAuthenticate() != BiometricManager.BIOMETRIC_ERROR_NONE_ENROLLED;
}
@Override
public boolean hasFingerprintHardware() {
return biometricManager.canAuthenticate() != BiometricManager.BIOMETRIC_ERROR_NO_HARDWARE;
}
private void initializeCryptoObject(
@NonNull final PromptParams params,
@NonNull final Mode mode,
@NonNull final String key,
@NonNull final String value,
@NonNull final Callback callback
) {
log("Creating CryptoObject");
asyncCryptoFactoryCallback = new AsyncCryptoObjectFactory.Callback() {
@Override
void onCryptoObjectCreated(@Nullable BiometricPrompt.CryptoObject cryptoObject) {
creatingCryptoObject = false;
if (cryptoObject != null) {
startNativeFingerprintAuthentication(params, mode, key, value, callback, cryptoObject);
} else {
log("Failed to create CryptoObject");
callback.onError(new CryptoObjectInitException());
}
}
};
creatingCryptoObject = true;
asyncCryptoFactory.createCryptoObject(mode, key, asyncCryptoFactoryCallback);
}
private boolean preconditionsInvalid(PromptParams params, Mode mode, String key, String value, Callback callback) {
if ((biometricCallback != null && biometricCallback.isAuthenticationActive) || creatingCryptoObject) {
log("Authentication is already active. Ignoring authenticate call.");
return true;
}
if (!hasFingerprintHardware()) {
callback.onError(new MissingHardwareException());
return true;
}
if (!hasEnrolledFingerprint()) {
callback.onError(new NoEnrolledFingerprintException());
return true;
}
List<String> promptParams = ValidateUtils.validatePromptParams(mode, params);
if (!promptParams.isEmpty()) {
callback.onError(new InvalidParametersException(promptParams));
return true;
}
List<String> cipherErrors = ValidateUtils.validateCipherParams(mode, key, value);
if (!cipherErrors.isEmpty()) {
callback.onError(new InvalidParametersException(cipherErrors));
return true;
}
return false;
}
@SuppressWarnings("ConstantConditions")
private void startNativeFingerprintAuthentication(
@NonNull final PromptParams params,
@NonNull final Mode mode,
@Nullable final String key,
@Nullable final String value,
@NonNull final Callback callback,
@Nullable final BiometricPrompt.CryptoObject cryptoObject
) {
/*
* Use proxy callback because some devices do not cancel authentication when error is received.
* Cancel authentication manually and proxy the result to real callback.
*/
this.biometricCallback = new BiometricCallback(cryptoProxy, mode, value, new Callback() {
@Override
public void onError(@NonNull Exception e) {
cancel();
callback.onError(e);
}
@Override
public void onResult(@NonNull Result result) {
if (result.type() == Type.ERROR || result.type() == Type.SUCCESS) {
cancel();
}
callback.onResult(result);
}
});
if (params.dialogOwner() instanceof FragmentActivity) {
this.biometricPrompt = new BiometricPrompt((FragmentActivity) params.dialogOwner(), executor, biometricCallback);
}
if (params.dialogOwner() instanceof Fragment) {
this.biometricPrompt = new BiometricPrompt((Fragment) params.dialogOwner(), executor, biometricCallback);
}
/* Delay with post because Navigation and Prompt both work with Fragment transactions */
MAIN_HANDLER.post(new Runnable() {
@Override
public void run() {
if (GoldfingerImpl.this.biometricPrompt == null) {
return;
}
if (mode == Mode.AUTHENTICATION) {
/* Simple Authentication call */
log("Starting authentication");
callback.onResult(new Result(Type.INFO, Reason.AUTHENTICATION_START));
GoldfingerImpl.this.biometricPrompt.authenticate(params.buildPromptInfo());
} else {
/* Encryption/Decryption call with initialized CryptoObject */
log("Starting authentication [keyName=%s; value=%s]", key, value);
callback.onResult(new Result(Type.INFO, Reason.AUTHENTICATION_START));
GoldfingerImpl.this.biometricPrompt.authenticate(params.buildPromptInfo(), cryptoObject);
}
}
});
}
}
| 3,482 |
1,350 | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.communication.callingserver.implementation.models;
import com.azure.core.annotation.Fluent;
import com.fasterxml.jackson.annotation.JsonProperty;
/** A participant in a call. */
@Fluent
public final class CallParticipantInternal {
/*
* Communication identifier of the participant
*/
@JsonProperty(value = "identifier")
private CommunicationIdentifierModel identifier;
/*
* Participant id
*/
@JsonProperty(value = "participantId")
private String participantId;
/*
* Is participant muted
*/
@JsonProperty(value = "isMuted", required = true)
private boolean isMuted;
/**
* Get the identifier property: Communication identifier of the participant.
*
* @return the identifier value.
*/
public CommunicationIdentifierModel getIdentifier() {
return this.identifier;
}
/**
* Set the identifier property: Communication identifier of the participant.
*
* @param identifier the identifier value to set.
* @return the CallParticipantInternal object itself.
*/
public CallParticipantInternal setIdentifier(CommunicationIdentifierModel identifier) {
this.identifier = identifier;
return this;
}
/**
* Get the participantId property: Participant id.
*
* @return the participantId value.
*/
public String getParticipantId() {
return this.participantId;
}
/**
* Set the participantId property: Participant id.
*
* @param participantId the participantId value to set.
* @return the CallParticipantInternal object itself.
*/
public CallParticipantInternal setParticipantId(String participantId) {
this.participantId = participantId;
return this;
}
/**
* Get the isMuted property: Is participant muted.
*
* @return the isMuted value.
*/
public boolean isMuted() {
return this.isMuted;
}
/**
* Set the isMuted property: Is participant muted.
*
* @param isMuted the isMuted value to set.
* @return the CallParticipantInternal object itself.
*/
public CallParticipantInternal setIsMuted(boolean isMuted) {
this.isMuted = isMuted;
return this;
}
}
| 848 |
571 | <gh_stars>100-1000
"""
Copyright 2021 Nirlep_5252_
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import asyncio
import validators
import json as json_but_pain
from discord import Embed
from config import MAIN_COLOR, RED_COLOR
def success_embed(title, description):
return Embed(
title=title,
description=description,
color=MAIN_COLOR
)
def meh_embed(title, description):
return Embed(
title=title,
description=description
)
def error_embed(title, description):
return Embed(
title=title,
description=description,
color=RED_COLOR
)
async def edit_msg_multiple_times(ctx, time_, first_msg, other_msgs, final_emb):
msg = await ctx.send(embed=Embed(title=first_msg, color=MAIN_COLOR))
await asyncio.sleep(time_)
for e in other_msgs:
embed = Embed(title=e[0], color=MAIN_COLOR)
if len(e) == 2:
embed.description = e[1]
await msg.edit(embed=embed)
await asyncio.sleep(time_)
await msg.edit(embed=final_emb)
async def replace_things_in_string_fancy_lemao(bot, array, string_):
author = array[0]
guild = array[1]
inviter_id = await bot.get_inviter(author.id, guild.id)
if inviter_id == 'Unknown':
inviter_name = 'Unknown'
inviter_discrim = 'Unknown'
inviter_tag = 'Unknown'
inviter_id_ = 'Unknown'
inviter_mention = 'Unknown'
inviter_avatar = 'https://cdn.discordapp.com/embed/avatars/1.png'
inviter_invites = 'Unknown'
else:
inviter__ = bot.get_user(inviter_id)
inviter_name = 'Unknown' if inviter__ is None else inviter__.name
inviter_discrim = 'Unknown' if inviter__ is None else inviter__.discriminator
inviter_tag = 'Unknown' if inviter__ is None else inviter_name + '#' + inviter_discrim
inviter_id_ = 'Unknown' if inviter__ is None else inviter__.id
inviter_mention = 'Unknown' if inviter__ is None else inviter__.mention
inviter_avatar = 'https://cdn.discordapp.com/embed/avatars/1.png' if inviter__ is None else inviter__.display_avatar.url
inviter_invites = 'Unknown' if inviter__ is None else await bot.fetch_invites(inviter_id_, guild.id)
nice = {
"{user_name}": author.name.replace("\\", "\\\\").replace("\"", "\\\""),
"{user_nickname}": author.display_name.replace("\\", "\\\\").replace("\"", "\\\""),
"{user_discrim}": str(author.discriminator),
"{user_tag}": (author.name + '#' + str(author.discriminator)).replace("\\", "\\\\").replace("\"", "\\\""),
"{user_id}": author.id,
"{user_mention}": author.mention,
"{user_avatar}": author.display_avatar.url,
"{guild_name}": guild.name.replace("\\", "\\\\").replace("\"", "\\\""),
"{guild_id}": guild.id,
"{guild_membercount}": guild.member_count,
"{guild_icon}": guild.icon.url if guild.icon is not None else 'https://cdn.discordapp.com/embed/avatars/1.png',
"{guild_owner_name}": guild.owner.name.replace("\\", "\\\\"),
"{guild_owner_id}": guild.owner_id,
"{guild_owner_mention}": guild.owner.mention,
"{user_invites}": await bot.fetch_invites(author.id, guild.id),
"{inviter_name}": inviter_name.replace("\\", "\\\\").replace("\"", "\\\""),
"{inviter_discrim}": inviter_discrim,
"{inviter_tag}": inviter_tag.replace("\\", "\\\\").replace("\"", "\\\""),
"{inviter_id}": inviter_id_,
"{inviter_mention}": inviter_mention,
"{inviter_avatar}": inviter_avatar,
"{inviter_invites}": inviter_invites,
}
for i, j in nice.items():
string_ = string_.replace(i, str(j))
return string_
async def process_embeds_from_json(bot, array, json, replace: bool = True):
embed = Embed()
if replace:
poggers = await replace_things_in_string_fancy_lemao(bot, array, json_but_pain.dumps(json))
uwu_json = json_but_pain.loads(poggers)
else:
uwu_json = json
content = None if "plainText" not in json else uwu_json['plainText']
embed_title = None if "title" not in json else uwu_json['title']
embed_url = None if "url" not in json else uwu_json['url']
embed_desc = None if "description" not in json else uwu_json['description']
embed_image = None if "image" not in json else uwu_json['image']
embed_thumbnail = None if "thumbnail" not in json else uwu_json['thumbnail']
embed_color = None if "color" not in json else uwu_json['color']
field_count = 0
if embed_color == "MAIN_COLOR":
embed_color = MAIN_COLOR
if embed_color == "RED_COLOR":
embed_color = RED_COLOR
embed_author = {}
embed_footer = {}
if "author" in json:
if "name" not in uwu_json['author']:
return 'pain author name'
embed_author.update({
"name": uwu_json['author']['name'],
"url": None if "url" not in uwu_json['author'] else uwu_json['author']['url'],
"icon_url": None if "icon_url" not in uwu_json['author'] else uwu_json['author']['icon_url']
})
if "footer" in json:
if "text" not in uwu_json['footer']:
return 'pain footer text'
embed_footer.update({
"text": uwu_json['footer']['text'],
"icon_url": None if "icon_url" not in uwu_json['footer'] else uwu_json['footer']['icon_url']
})
if "fields" in json:
for e in uwu_json['fields']:
if e['name'] != "" and e['value'] != "":
embed.add_field(
name=e['name'],
value=e['value'],
inline=e['inline']
)
field_count += 1
else:
return 'pain empty fields'
if embed_title is not None:
embed.title = embed_title
if embed_desc is not None:
embed.description = embed_desc
if embed_url is not None:
embed.url = embed_url
if embed_image is not None:
embed.set_image(url=embed_image)
if embed_thumbnail is not None:
embed.set_thumbnail(url=embed_thumbnail)
if embed_color is not None:
embed.color = embed_color
if len(embed_author) != 0:
if embed_author['url'] is None and embed_author['icon_url'] is None:
embed.set_author(name=embed_author['name'])
elif embed_author['url'] is None and embed_author['icon_url'] is not None:
embed.set_author(name=embed_author['name'], icon_url=embed_author['icon_url'])
elif embed_author['url'] is not None and embed_author['icon_url'] is None:
embed.set_author(name=embed_author['name'], url=embed_author['url'])
else:
embed.set_author(name=embed_author['name'], url=embed_author['url'], icon_url=embed_author['icon_url'])
if len(embed_footer) != 0:
if embed_footer['icon_url'] is None:
embed.set_footer(text=embed_footer['text'])
else:
embed.set_footer(text=embed_footer['text'], icon_url=embed_footer['icon_url'])
if (embed_url is not None and not validators.url(embed_url)) or (embed_image is not None and not validators.url(embed_image)) or (embed_thumbnail is not None and not validators.url(embed_thumbnail)):
return 'pain invalid urls'
if len(embed_author) != 0:
if embed_author['url'] is not None and not validators.url(embed_author['url']):
return 'pain invalid urls'
if embed_author['icon_url'] is not None and not validators.url(embed_author['icon_url']):
return 'pain invalid urls'
if len(embed_footer) != 0:
if embed_footer['icon_url'] is not None and not validators.url(embed_footer['icon_url']):
return 'pain invalid urls'
if embed_title is None and embed_desc is None and len(embed_author) == 0 and len(embed_footer) == 0 and field_count == 0 and embed_image is None:
return 'pain empty embed'
return [content, embed]
| 3,593 |
2,611 | # Lint as: python3
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""NestedMap dict structure."""
import re
import typing
from typing import (Any, Callable, Dict, List, Mapping, Optional, Sequence,
Tuple, TypeVar)
import lingvo.compat as tf
from typing_extensions import Literal
_NAME_PATTERN = re.compile(r'[A-Za-z_][A-Za-z0-9_]*')
_SQUARE_BRACKET_PATTERN = re.compile(r'([A-Za-z_][A-Za-z0-9_]*)\[(\d+)\]')
T = TypeVar('T')
NestedMapT = TypeVar('NestedMapT', bound='NestedMap')
@typing.overload
def _FromNestedDict(x: Mapping[str, Any]) -> 'NestedMap':
...
@typing.overload
def _FromNestedDict(x: T) -> T:
...
def _FromNestedDict(x):
"""Converts every dict in nested structure 'x' to a NestedMap."""
if isinstance(x, Mapping):
res = NestedMap()
for k, v in x.items():
res[k] = _FromNestedDict(v)
return res
elif isinstance(x, (list, tuple)):
return type(x)(_FromNestedDict(v) for v in x)
else:
return x
class NestedMap(Dict[str, Any]):
"""A simple helper to maintain a dict.
It is a sub-class of dict with the following extensions/restrictions:
- It supports attr access to its members (see examples below).
- Member keys have to be valid identifiers.
E.g.::
>>> foo = NestedMap()
>>> foo['x'] = 10
>>> foo.y = 20
>>> assert foo.x * 2 == foo.y
"""
# Disable pytype attribute checking.
_HAS_DYNAMIC_ATTRIBUTES = True
# keys in this list are not allowed in a NestedMap.
_RESERVED_KEYS = frozenset(dir(dict))
# sentinel value for deleting keys used in Filter.
_DELETE = object()
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
for key in self.keys():
assert isinstance(key, str), (
'Key in a NestedMap has to be a six.string_types. Currently type: %s,'
' value: %s' % (str(type(key)), str(key)))
NestedMap.CheckKey(key)
assert key not in NestedMap._RESERVED_KEYS, ('%s is a reserved key' % key)
def __setitem__(self, key: str, value: Any) -> None:
# Make sure key is a valid expression and is not one of the reserved
# attributes.
assert isinstance(
key,
str), ('Key in a NestedMap has to be a string type. Current type: %s, '
'value: %s' % (str(type(key)), str(key)))
NestedMap.CheckKey(key)
assert key not in NestedMap._RESERVED_KEYS, ('%s is a reserved key' % key)
super().__setitem__(key, value)
def __setattr__(self, name: str, value: Any) -> None:
self[name] = value
def __getattr__(self, name: str) -> Any:
try:
return self[name]
except KeyError as e:
raise AttributeError(e)
def __delattr__(self, name: str) -> None:
try:
del self[name]
except KeyError as e:
raise AttributeError(e)
def __getitem__(self, name: str) -> Any:
try:
return super().__getitem__(name)
except KeyError as e:
raise KeyError('%s; available attributes: %s' %
(e, sorted(list(self.keys()))))
def __delitem__(self, name: str) -> None:
try:
super().__delitem__(name)
except KeyError as e:
raise KeyError('%s; available attributes: %s' %
(e, sorted(list(self.keys()))))
def copy(self: NestedMapT) -> NestedMapT: # pylint: disable=invalid-name
# Don't delegate w/ super: dict.copy() -> dict.
return type(self)(self)
def __deepcopy__(self: NestedMapT, unused_memo) -> NestedMapT:
"""Deep-copies the structure but not the leaf objects."""
return self.DeepCopy()
def DeepCopy(self: NestedMapT) -> NestedMapT:
"""Deep-copies the structure but not the leaf objects."""
return self.Pack(self.Flatten())
@staticmethod
def FromNestedDict(x):
"""Converts every dict in nested structure 'x' to a NestedMap."""
return _FromNestedDict(x)
def ToNestedDict(self) -> Mapping[str, Any]:
"""Converts every NestedMap in nested structure to a 'dict' instance.
This is relevant for code that checks a dictionary's exact type, instead of
isinstance (e.g. parts of tf.data). In those cases, we need a 'dict' object,
not NestedMap.
Returns:
'dict' instance where nested NestedMaps are also converted to 'dict'.
"""
def _ToNestedDict(x: Any) -> Any:
"""Function used to recursively convert dictionaries/lists/tuples."""
if isinstance(x, dict): # NestedMap is a 'dict', will match here too.
return {k: _ToNestedDict(v) for k, v in x.items()}
elif isinstance(x, list):
return [_ToNestedDict(v) for v in x]
return x
return _ToNestedDict(self)
@staticmethod
def CheckKey(key: str) -> None:
"""Asserts that key is valid NestedMap key."""
if not (isinstance(key, str) and _NAME_PATTERN.match(key)):
raise ValueError('Invalid NestedMap key \'{}\''.format(key))
@staticmethod
def SquareBracketIndex(key: str) -> Tuple[str, Optional[int]]:
"""Extracts the name and the index from the indexed key (e.g., k[0])."""
m = _SQUARE_BRACKET_PATTERN.fullmatch(key)
if not m:
return key, None
else:
return str(m.groups()[0]), int(m.groups()[1])
def GetItem(self, key: str) -> Any:
"""Gets the value for the nested `key`.
Names with underscores will be considered as one key.
Args:
key: str of the form
`([A-Za-z_][A-Za-z0-9_]*)(.[A-Za-z_][A-Za-z0-9_]*)*.`.
Returns:
The value for the given nested key.
Raises:
KeyError: if a key is not present.
IndexError: when an intermediate item is a list and we try to access
an element which is out of range.
TypeError: when an intermediate item is a list and we try to access
an element of it with a string.
"""
current = self
for k in key.split('.'):
k, idx = self.SquareBracketIndex(k)
current = current[k]
if idx is not None:
current = current[idx]
return current
def Get(self, key: str, default: Optional[Any] = None) -> Any:
"""Gets the value for nested `key`, returns `default` if key does not exist.
Names with underscores will be considered as one key.
Args:
key: str of the form
`([A-Za-z_][A-Za-z0-9_]*)(.[A-Za-z_][A-Za-z0-9_]*)*.`.
default: Optional default value, defaults to None.
Returns:
The value for the given nested key or `default` if the key does not exist.
"""
try:
return self.GetItem(key)
except (KeyError, IndexError, TypeError):
return default
def Set(self, key: str, value: Any) -> None:
r"""Sets the value for a nested key.
There is limited support for indexing lists when square bracket indexing is
used, e.g., key[0], key[1], etc. Names with underscores will be considered
as one key. When key[idx] is set, all of the values with indices before idx
must be already set. E.g., setting key='a[2]' to value=42 when
key='a' wasn't referenced before will throw a ValueError. Setting key='a[0]'
will not.
Args:
key: str of the form key_part1.key_part2...key_partN where each key_part
is of the form `[A-Za-z_][A-Za-z0-9_]*` or
`[A-Za-z_][A-Za-z0-9_]*\[\d+\]`
value: The value to insert.
Raises:
ValueError if a sub key is not a NestedMap or dict or idx > list length
for key='key[idx]'.
"""
current = self
sub_keys = key.split('.')
for i, k in enumerate(sub_keys):
self.CheckKey(k) # CheckKey allows k to be of form k[\d+]
k, idx = self.SquareBracketIndex(k)
if idx is not None: # this is key with index pointing to a list item.
# create a list if not there yet.
if k not in current:
current[k] = []
if idx > len(current[k]):
raise ValueError('Error while setting key {}. The value under {} is a'
' list and the index {} is greater than the len={} '
'of this list'.format(key, k, idx, len(current[k])))
elif idx == len(current[k]):
current[k].extend([None]) # this None will be overwritten right away.
# We have reached the terminal node, set the value.
if i == (len(sub_keys) - 1):
if idx is None:
current[k] = value
else:
current[k][idx] = value
else:
if idx is None:
if k not in current:
current[k] = NestedMap()
current = current[k]
else:
if current[k][idx] is None:
current[k][idx] = NestedMap()
current = current[k][idx]
if not isinstance(current, (dict, NestedMap)):
raise ValueError('Error while setting key {}. Sub key "{}" is of type'
' {} but must be a dict or NestedMap.'
''.format(key, k, type(current)))
@typing.overload
def _RecursiveMap(self: NestedMapT,
fn: Callable[[str, Any], Any],
flatten: Literal[False] = False) -> NestedMapT:
...
@typing.overload
def _RecursiveMap(self, fn: Callable[[str, Any], Any],
flatten: Literal[True]) -> List[Any]:
...
def _RecursiveMap(self, fn: Callable[[str, Any], Any], flatten: bool = False):
"""Traverse recursively into lists, dicts, and NestedMaps applying `fn`.
Args:
fn: The function to apply to each item (leaf node).
flatten: If true, the result should be a single flat list. Otherwise the
result will have the same structure as this NestedMap.
Returns:
The result of applying fn.
"""
def Recurse(v: Any, key: str = '') -> Any:
"""Helper function for _RecursiveMap."""
if isinstance(v, dict):
ret = [] if flatten else type(v)()
deleted = False
for k in sorted(v.keys()):
res = Recurse(v[k], key + '.' + k if key else k)
if res is self._DELETE:
deleted = True
continue
elif flatten:
ret += res
else:
ret[k] = res
if not ret and deleted:
return self._DELETE
return ret
elif isinstance(v, list):
ret = []
deleted = False
for i, x in enumerate(v):
res = Recurse(x, '%s[%d]' % (key, i))
if res is self._DELETE:
deleted = True
continue
elif flatten:
ret += res
else:
ret.append(res)
if not ret and deleted:
return self._DELETE
return ret
else:
ret = fn(key, v)
if flatten:
ret = [ret]
return ret
res = Recurse(self)
if res is self._DELETE:
return [] if flatten else type(self)()
assert isinstance(res, (list, NestedMap))
return res
def Flatten(self) -> List[Any]:
"""Returns a list containing the flattened values in the `.NestedMap`.
Unlike py_utils.Flatten(), this will only descend into lists, dicts, and
NestedMaps and not tuples, or namedtuples.
"""
return self._RecursiveMap(lambda _, v: v, flatten=True)
def FlattenItems(self) -> List[Tuple[Any, Any]]:
"""Flatten the `.NestedMap` and returns <key, value> pairs in a list.
Returns:
A list of <key, value> pairs, where keys for nested entries will be
represented in the form of `foo.bar[10].baz`.
"""
return self._RecursiveMap(lambda k, v: (k, v), flatten=True)
def Pack(self: NestedMapT, lst: Sequence[Any]) -> NestedMapT:
"""Returns a copy of this with each value replaced by a value in lst."""
assert len(self.FlattenItems()) == len(lst)
v_iter = iter(lst)
return self._RecursiveMap(lambda unused_k, unused_v: next(v_iter))
def Transform(self: NestedMapT, fn: Callable[[Any], Any]) -> NestedMapT:
"""Returns a copy of this `.NestedMap` with fn applied on each value."""
return self._RecursiveMap(lambda _, v: fn(v))
def TransformWithKey(self: NestedMapT, fn: Callable[[str, Any],
Any]) -> NestedMapT:
"""Returns a copy of this `.NestedMap` with fn applied on each key/value."""
return self._RecursiveMap(fn)
def IsCompatible(self, other: 'NestedMap') -> bool:
"""Returns true if self and other are compatible.
If x and y are two compatible `.NestedMap`, `x.Pack(y.Flatten())` produces y
and vice versa.
Args:
other: Another `.NestedMap`.
"""
items = self._RecursiveMap(lambda k, _: k, flatten=True)
other_items = other._RecursiveMap(lambda k, _: k, flatten=True) # pylint: disable=protected-access
return items == other_items
def Filter(self: NestedMapT, fn: Callable[[Any], bool]) -> NestedMapT:
"""Returns a copy with entries where fn(entry) is True."""
return self.FilterKeyVal(lambda _, v: fn(v))
def FilterKeyVal(self: NestedMapT, fn: Callable[[str, Any],
bool]) -> NestedMapT:
"""Returns a copy of this `.NestedMap` filtered by fn.
If fn(key, entry) is True, the entry is copied into the returned NestedMap.
Otherwise, it is not copied.
Args:
fn: a callable of (string, entry)->boolean.
Returns:
A `.NestedMap` contains copied entries from this `'.NestedMap`.
"""
return self._RecursiveMap(lambda k, v: v if fn(k, v) else self._DELETE)
def _ToStrings(self) -> List[str]:
"""Returns debug strings in a list for this `.NestedMap`."""
kv = self.FlattenItems()
maxlen = max([len(k) for k, _ in kv]) if kv else 0
return sorted([k + ' ' * (4 + maxlen - len(k)) + str(v) for k, v in kv])
def DebugString(self) -> str:
"""Returns a debug string for this `.NestedMap`."""
return '\n'.join(self._ToStrings())
def VLog(self,
level: Optional[int] = None,
prefix: Optional[str] = None) -> None:
"""Logs the debug string at the level."""
if level is None:
level = 0
if prefix is None:
prefix = 'nmap: '
for l in self._ToStrings():
tf.logging.vlog(level, '%s %s', prefix, l)
def __dir__(self) -> List[str]:
"""dir() that includes flattened keys in returned output."""
keys = self._RecursiveMap(lambda k, v: k, flatten=True)
return keys + super().__dir__() # pytype: disable=attribute-error
| 6,149 |
354 | #ifndef _EGLUSTRUTIL_HPP
#define _EGLUSTRUTIL_HPP
/*-------------------------------------------------------------------------
* drawElements Quality Program EGL Utilities
* ------------------------------------------
*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*//*!
* \file
* \brief EGL String Utilities.
*//*--------------------------------------------------------------------*/
#include "tcuDefs.hpp"
#include "tcuFormatUtil.hpp"
namespace eglu
{
struct ConfigAttribValueFmt
{
deUint32 attribute;
int value;
ConfigAttribValueFmt (deUint32 attribute_, int value_) : attribute(attribute_), value(value_) {}
};
struct SurfaceAttribValueFmt
{
deUint32 attribute;
int value;
SurfaceAttribValueFmt (deUint32 attribute_, int value_) : attribute(attribute_), value(value_) {}
};
struct ContextAttribValueFmt
{
deUint32 attribute;
int value;
ContextAttribValueFmt (deUint32 attribute_, int value_) : attribute(attribute_), value(value_) {}
};
struct ConfigAttribListFmt
{
const int* attribs;
ConfigAttribListFmt (const int* attribs_) : attribs(attribs_) {}
};
struct SurfaceAttribListFmt
{
const int* attribs;
SurfaceAttribListFmt (const int* attribs_) : attribs(attribs_) {}
};
struct ContextAttribListFmt
{
const int* attribs;
ContextAttribListFmt (const int* attribs_) : attribs(attribs_) {}
};
inline ConfigAttribValueFmt getConfigAttribValueStr (deUint32 attribute, int value) { return ConfigAttribValueFmt(attribute, value); }
std::ostream& operator<< (std::ostream& str, const ConfigAttribValueFmt& attribFmt);
inline SurfaceAttribValueFmt getSurfaceAttribValueStr (deUint32 attribute, int value) { return SurfaceAttribValueFmt(attribute, value); }
std::ostream& operator<< (std::ostream& str, const SurfaceAttribValueFmt& attribFmt);
inline ContextAttribValueFmt getContextAttribValueStr (deUint32 attribute, int value) { return ContextAttribValueFmt(attribute, value); }
std::ostream& operator<< (std::ostream& str, const ContextAttribValueFmt& attribFmt);
inline ConfigAttribListFmt getConfigAttribListStr (const int* attribs) { return ConfigAttribListFmt(attribs); }
std::ostream& operator<< (std::ostream& str, const ConfigAttribListFmt& fmt);
inline SurfaceAttribListFmt getSurfaceAttribListStr (const int* attribs) { return SurfaceAttribListFmt(attribs); }
std::ostream& operator<< (std::ostream& str, const SurfaceAttribListFmt& fmt);
inline ContextAttribListFmt getContextAttribListStr (const int* attribs) { return ContextAttribListFmt(attribs); }
std::ostream& operator<< (std::ostream& str, const ContextAttribListFmt& fmt);
#include "egluStrUtilPrototypes.inl"
} // eglu
#endif // _EGLUSTRUTIL_HPP
| 1,112 |
343 | #include "stdafx.h"
RH_C_FUNCTION ON_PointGrid* ON_PointGrid_New(int rows, int columns)
{
return new ON_PointGrid(rows, columns);
}
| 54 |
2,453 | <filename>XVim2/XcodeHeader/IDESourceEditor/_TtC15IDESourceEditor39StructuredSelectionActionMenuController.h
//
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Sep 30 2020 21:18:12).
//
// Copyright (C) 1997-2019 <NAME>.
//
#import "_$s12SourceEditor20ActionMenuControllerCN.h"
@class MISSING_TYPE;
__attribute__((visibility("hidden")))
@interface _TtC15IDESourceEditor39StructuredSelectionActionMenuController : _$s12SourceEditor20ActionMenuControllerCN
{
MISSING_TYPE *range;
MISSING_TYPE *context;
MISSING_TYPE *nameRange;
}
- (void).cxx_destruct;
- (id)initWithCoder:(id)arg1;
@end
| 223 |
411 | // License: BSD 3 clause
#include "tick/hawkes/model/base/model_hawkes_list.h"
#include "tick/hawkes/model/model_hawkes_utils.h"
ModelHawkesList::ModelHawkesList(const int max_n_threads,
const unsigned int optimization_level)
: ModelHawkes(max_n_threads, optimization_level),
n_realizations(0),
timestamps_list(0) {
n_jumps_per_realization = VArrayULong::new_ptr(n_realizations);
end_times = VArrayDouble::new_ptr(n_realizations);
}
void ModelHawkesList::set_data(const SArrayDoublePtrList2D ×tamps_list,
const VArrayDoublePtr end_times) {
const auto timestamps_list_descriptor =
describe_timestamps_list(timestamps_list, end_times);
n_realizations = timestamps_list_descriptor.n_realizations;
set_n_nodes(timestamps_list_descriptor.n_nodes);
n_jumps_per_node = timestamps_list_descriptor.n_jumps_per_node;
n_jumps_per_realization = timestamps_list_descriptor.n_jumps_per_realization;
this->timestamps_list = timestamps_list;
this->end_times = end_times;
weights_computed = false;
}
unsigned int ModelHawkesList::get_n_threads() const {
return std::min(this->max_n_threads,
static_cast<unsigned int>(n_nodes * n_realizations));
}
| 531 |
1,738 | /*
* All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
* its licensors.
*
* For complete copyright and license terms please see the LICENSE at the root of this
* distribution (the "License"). All use of this software is governed by the License,
* or, if provided, by the license below or the license accompanying this file. Do not
* remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
*/
// Original file Copyright Crytek GMBH or its affiliates, used under license.
#include "pch.h"
#include "BlockPalette.h"
#include "BlockPaletteContent.h"
#include <QPainter>
#include <QMouseEvent>
#include <QMenu>
namespace {
enum
{
SELECTION_WIDTH = 4,
WIDGET_MARGIN = 4,
ELEMENT_PADDING = 3,
ELEMENT_MARGIN = 5,
MINIMAL_DROP_DISTANCE = 24
};
struct SLayoutItem
{
QRect rect;
};
typedef std::vector<SLayoutItem> SLayoutItems;
}
struct BlockPaletteLayout
{
int height;
SLayoutItems items;
};
static QRect AddElementToLayout(int* currentTop, int* currentLeft, int width, int elementWidth, int lineHeight, int padding)
{
if (*currentLeft + padding + elementWidth < width)
{
QRect result(*currentLeft, *currentTop, elementWidth, lineHeight);
*currentLeft += padding + elementWidth;
return result;
}
else
{
*currentLeft = padding + elementWidth + padding;
*currentTop += padding + lineHeight;
return QRect(padding, *currentTop, elementWidth, lineHeight);
}
}
static void CalculateLayout(BlockPaletteLayout* layout, const BlockPaletteContent& content, const QFont& font, int width)
{
layout->items.clear();
int padding = ELEMENT_PADDING;
layout->height = padding;
int currentLeft = padding;
int margin = WIDGET_MARGIN;
QFontMetrics metrics(font);
int lineHeight = metrics.height() + margin * 2;
if (lineHeight < 24)
{
lineHeight = 24;
}
layout->items.reserve(layout->items.size());
for (size_t i = 0; i < content.items.size(); ++i)
{
const BlockPaletteItem& item = content.items[i];
SLayoutItem litem;
int elementWidth = metrics.horizontalAdvance(QString::fromLocal8Bit(item.name.c_str())) + 2 * ELEMENT_MARGIN;
litem.rect = AddElementToLayout(&layout->height, ¤tLeft, width, elementWidth, lineHeight, padding);
layout->items.push_back(litem);
}
layout->height += padding + lineHeight;
}
static int HitItem(const SLayoutItems& items, const QPoint& pos)
{
for (size_t i = 0; i < items.size(); ++i)
{
const SLayoutItem& item = items[i];
if (item.rect.contains(pos))
{
return int(i);
}
}
return -1;
}
static void HitItems(std::vector<int>* selectedItems, const SLayoutItems& items, const QRect& rect)
{
selectedItems->clear();
for (size_t i = 0; i < items.size(); ++i)
{
const SLayoutItem& item = items[i];
if (item.rect.intersects(rect))
{
selectedItems->push_back(int(i));
}
}
}
static int FindDropIndex(const SLayoutItems& items, const QPoint& p, int excludingIndex)
{
if (items.empty())
{
return -1;
}
typedef std::vector<std::pair<int, int> > DistanceToIndex;
DistanceToIndex distances;
for (size_t i = 0; i < items.size(); ++i)
{
const QRect& r = items[i].rect;
if (i < excludingIndex || i > excludingIndex + 1)
{
distances.push_back(std::make_pair((p - r.topLeft()).manhattanLength(), int(i)));
distances.push_back(std::make_pair((p - r.bottomLeft()).manhattanLength(), int(i)));
}
if (i + 1 < excludingIndex || i + 1 > excludingIndex + 1)
{
distances.push_back(std::make_pair((p - r.topRight()).manhattanLength(), int(i + 1)));
distances.push_back(std::make_pair((p - r.bottomRight()).manhattanLength(), int(i + 1)));
}
}
if (distances.empty())
{
return -1;
}
std::sort(distances.begin(), distances.end());
if (distances[0].first < MINIMAL_DROP_DISTANCE)
{
return distances[0].second;
}
else
{
return -1;
}
}
static void DrawItem(QPainter& painter, const SLayoutItem& litem, const BlockPaletteItem& item, bool selected, bool hasFocus, const QPalette& palette, int hotkey)
{
QRectF rect = QRectF(litem.rect.adjusted(1, 2, -1, -2));
float ratio = rect.width() != 0 ? (rect.height() != 0 ? float(rect.width()) / float(rect.height()) : 1.0f) : 1.0f;
float radius = 0.2f;
float rx = radius * 200.0f / ratio;
float ry = radius * 200.0f;
if (selected)
{
QRectF selectionRect = rect.adjusted(-SELECTION_WIDTH * 0.5f + 0.5f, -SELECTION_WIDTH * 0.5f + 0.5f, SELECTION_WIDTH * 0.5f - 0.5f, SELECTION_WIDTH * 0.5f - 0.5f);
painter.setPen(QPen(palette.color(hasFocus ? QPalette::Highlight : QPalette::Shadow), SELECTION_WIDTH));
painter.setBrush(QBrush(Qt::NoBrush));
painter.drawRoundedRect(selectionRect, rx, ry, Qt::RelativeSize);
}
QRectF shadowRect = rect.adjusted(-1.0f, -0.5f, 1.0f, 1.5f);
painter.setPen(QPen(QColor(0, 0, 0, 128), 2.0f));
painter.setBrush(QBrush(Qt::NoBrush));
painter.drawRoundedRect(shadowRect, rx, ry, Qt::RelativeSize);
painter.setPen(QPen(QColor(item.color.r, item.color.g, item.color.b, 255)));
painter.setBrush(QBrush(QColor(item.color.r, item.color.g, item.color.b, 255)));
painter.drawRoundedRect(rect, rx, ry, Qt::RelativeSize);
painter.setBrush(QColor(0, 0, 0));
painter.setPen(QColor(0, 0, 0));
painter.drawText(litem.rect.adjusted(hotkey != -1 ? ELEMENT_MARGIN : 0, 0, 0, 0), QString::fromLocal8Bit(item.name.c_str()), QTextOption(Qt::AlignCenter));
if (hotkey != -1)
{
QRectF hotkeyRect = rect.adjusted(2, 1, -2, -1);
QFont oldFont = painter.font();
QFont font = oldFont;
font.setBold(true);
font.setPointSizeF(oldFont.pointSizeF() * 0.666f);
painter.setFont(font);
QString str;
str.asprintf("%d", hotkey);
painter.setBrush(QColor(0, 0, 0, 128));
painter.drawText(hotkeyRect, str, QTextOption(Qt::AlignBottom | Qt::AlignLeft));
painter.setFont(oldFont);
}
}
struct BlockPalette::MouseHandler
{
virtual ~MouseHandler() { Finish(); }
virtual void MousePressEvent(QMouseEvent* ev) = 0;
virtual void MouseMoveEvent(QMouseEvent* ev) = 0;
virtual void MouseReleaseEvent(QMouseEvent* ev) = 0;
virtual void Finish() {}
virtual void PaintOver(QPainter& painter) {}
virtual void PaintUnder(QPainter& painter) {}
};
struct BlockPalette::SelectionHandler
: MouseHandler
{
BlockPalette* m_palette;
QPoint m_startPoint;
QRect m_rect;
SelectionHandler(BlockPalette* palette)
: m_palette(palette)
{
}
void Finish() override
{
m_palette->SignalSelectionChanged();
}
void MousePressEvent(QMouseEvent* ev) override
{
m_startPoint = ev->pos();
m_rect = QRect(m_startPoint, m_startPoint + QPoint(1, 1));
}
void MouseMoveEvent(QMouseEvent* ev) override
{
m_rect = QRect(m_startPoint, ev->pos() + QPoint(1, 1));
std::vector<int> selectedItems;
HitItems(&selectedItems, m_palette->m_layout->items, m_rect);
if (m_palette->m_selectedItems.size() != selectedItems.size())
{
m_palette->m_selectedItems = selectedItems;
m_palette->SignalSelectionChanged();
}
}
void MouseReleaseEvent(QMouseEvent* ev) override
{
Finish();
}
void PaintOver(QPainter& painter)
{
painter.save();
QRect rect = m_rect.intersected(QRect(1, 1, m_palette->width() - 2, m_palette->height() - 2));
QColor highlightColor = m_palette->palette().color(QPalette::Highlight);
QColor highlightColorA = QColor(highlightColor.red(), highlightColor.green(), highlightColor.blue(), 128);
painter.setPen(QPen(highlightColor));
painter.setBrush(QBrush(highlightColorA));
painter.drawRect(QRectF(rect));
painter.restore();
}
};
struct BlockPalette::DragHandler
: MouseHandler
{
BlockPalette* m_palette;
QPoint m_startPoint;
QPoint m_lastPoint;
bool m_moved;
int m_dropIndex;
DragHandler(BlockPalette* palette)
: m_palette(palette)
, m_moved(false)
, m_dropIndex(-1)
{
}
bool DragStarted() const
{
return (m_lastPoint - m_startPoint).manhattanLength() > 5;
}
void Finish() override
{
if (m_moved)
{
if (m_dropIndex != -1)
{
std::vector<int> oldToNewIndex;
oldToNewIndex.resize(m_palette->m_content->items.size(), -1);
BlockPaletteItems items;
int selectionStart = 0;
for (size_t i = 0; i < m_dropIndex; ++i)
{
if (std::find(m_palette->m_draggedItems.begin(), m_palette->m_draggedItems.end(), i) == m_palette->m_draggedItems.end())
{
oldToNewIndex[i] = items.size();
items.push_back(m_palette->m_content->items[i]);
++selectionStart;
}
}
for (size_t i = 0; i < m_palette->m_draggedItems.size(); ++i)
{
int index = m_palette->m_draggedItems[i];
oldToNewIndex[index] = items.size();
items.push_back(m_palette->m_content->items[index]);
}
for (size_t i = m_dropIndex; i < m_palette->m_content->items.size(); ++i)
{
if (std::find(m_palette->m_draggedItems.begin(), m_palette->m_draggedItems.end(), i) == m_palette->m_draggedItems.end())
{
oldToNewIndex[i] = items.size();
items.push_back(m_palette->m_content->items[i]);
}
}
for (size_t i = 0; i < m_palette->m_hotkeys.size(); ++i)
{
int oldIndex = m_palette->m_hotkeys[i];
if (size_t(oldIndex) < oldToNewIndex.size())
{
m_palette->m_hotkeys[i] = oldToNewIndex[oldIndex];
}
}
m_palette->m_content->items.swap(items);
m_palette->m_selectedItems.clear();
for (size_t i = selectionStart; i < selectionStart + m_palette->m_draggedItems.size(); ++i)
{
m_palette->m_selectedItems.push_back(i);
}
m_palette->UpdateLayout();
m_palette->SignalChanged();
m_palette->SignalSelectionChanged();
}
}
m_palette->m_draggedItems.clear();
}
void MousePressEvent(QMouseEvent* ev) override
{
m_startPoint = ev->pos();
m_lastPoint = m_startPoint;
}
void MouseMoveEvent(QMouseEvent* ev) override
{
bool wasDraggingBefore = DragStarted();
m_lastPoint = ev->pos();
bool dragStarted = DragStarted();
if (!wasDraggingBefore && DragStarted())
{
m_palette->m_draggedItems = m_palette->m_selectedItems;
m_moved = true;
}
if (wasDraggingBefore && !DragStarted())
{
m_palette->m_draggedItems.clear();
}
if (dragStarted && !m_palette->m_selectedItems.empty())
{
m_dropIndex = FindDropIndex(m_palette->m_layout->items, m_lastPoint, m_palette->m_selectedItems[0]);
}
else
{
m_dropIndex = -1;
}
}
void MouseReleaseEvent(QMouseEvent* ev) override
{
if (m_palette->m_addWithSingleClick && !m_moved)
{
int itemIndex = HitItem(m_palette->m_layout->items, ev->pos());
if (itemIndex >= 0)
{
m_palette->ItemClicked(itemIndex);
}
}
Finish();
}
void PaintUnder(QPainter& painter) override
{
if (m_dropIndex != -1)
{
QPoint p0, p1;
if (m_dropIndex == m_palette->m_layout->items.size())
{
QRect itemRect = m_palette->m_layout->items.back().rect;
p0 = itemRect.topRight();
p1 = itemRect.bottomRight();
}
else
{
QRect itemRect = m_palette->m_layout->items[m_dropIndex].rect;
p0 = itemRect.topLeft();
p1 = itemRect.bottomLeft();
}
painter.setPen(QPen(m_palette->palette().color(QPalette::Highlight), 8, Qt::SolidLine, Qt::RoundCap));
painter.drawLine(p0, p1);
}
}
void PaintOver(QPainter& painter) override
{
if (DragStarted())
{
if (m_palette->m_selectedItems.empty())
{
return;
}
int index = m_palette->m_selectedItems[0];
SLayoutItem litem = m_palette->m_layout->items[index];
litem.rect = litem.rect.translated(m_lastPoint - m_startPoint);
DrawItem(painter, litem, m_palette->m_content->items[index], true, true, m_palette->palette(), -1);
}
}
};
BlockPalette::BlockPalette(QWidget* parent)
: QWidget(parent)
, m_layout(new BlockPaletteLayout())
, m_content(new BlockPaletteContent())
, m_addWithSingleClick(false)
{
setSizePolicy(QSizePolicy::Expanding, QSizePolicy::Minimum);
setMinimumHeight(28);
setFocusPolicy(Qt::StrongFocus);
}
BlockPalette::~BlockPalette()
{
}
void BlockPalette::UpdateLayout()
{
CalculateLayout(m_layout.get(), *m_content, font(), width());
update();
setMinimumHeight(m_layout->height);
}
void BlockPalette::SetContent(const BlockPaletteContent& content)
{
*m_content = content;
UpdateLayout();
}
void BlockPalette::SetAddEventWithSingleClick(bool addWithSingleClick)
{
m_addWithSingleClick = addWithSingleClick;
}
void BlockPalette::paintEvent(QPaintEvent* ev)
{
bool hasFocus = this->hasFocus();
QPainter painter(this);
painter.setRenderHint(QPainter::Antialiasing);
painter.translate(0.5f, 0.5f);
if (m_mouseHandler)
{
m_mouseHandler->PaintUnder(painter);
}
for (size_t i = 0; i < m_layout->items.size(); ++i)
{
bool dragged = std::find(m_draggedItems.begin(), m_draggedItems.end(), int(i)) != m_draggedItems.end();
;
if (dragged)
{
continue;
}
bool selected = std::find(m_selectedItems.begin(), m_selectedItems.end(), int(i)) != m_selectedItems.end();
const BlockPaletteItem& item = m_content->items[i];
const SLayoutItem& litem = m_layout->items[i];
int hotkey = -1;
for (size_t j = 0; j < m_hotkeys.size(); ++j)
{
if (m_hotkeys[j] == i)
{
hotkey = j;
break;
}
}
DrawItem(painter, litem, item, selected, hasFocus, palette(), hotkey);
}
if (m_mouseHandler)
{
m_mouseHandler->PaintOver(painter);
}
}
void BlockPalette::ItemClicked(int itemIndex)
{
if (size_t(itemIndex) >= m_content->items.size())
{
return;
}
const BlockPaletteItem& item = m_content->items[itemIndex];
SignalItemClicked(item);
}
void BlockPalette::AssignHotkey(int hotkey)
{
if (size_t(hotkey) >= 10)
{
return;
}
m_hotkeys.resize(10, -1);
for (size_t i = 0; i < m_hotkeys.size(); ++i)
{
if (m_hotkeys[i] == m_selectedItems[0])
{
m_hotkeys[i] = -1;
}
}
m_hotkeys[hotkey] = m_selectedItems[0];
update();
}
void BlockPalette::mousePressEvent(QMouseEvent* ev)
{
if (ev->button() == Qt::LeftButton)
{
int hitItem = HitItem(m_layout->items, ev->pos());
if (hitItem >= 0)
{
m_selectedItems.clear();
m_selectedItems.push_back(hitItem);
m_mouseHandler.reset(new DragHandler(this));
m_mouseHandler->MousePressEvent(ev);
SignalSelectionChanged();
update();
}
else
{
m_selectedItems.clear();
m_mouseHandler.reset(new SelectionHandler(this));
m_mouseHandler->MousePressEvent(ev);
SignalSelectionChanged();
update();
}
}
else if (ev->button() == Qt::RightButton)
{
int hitItem = HitItem(m_layout->items, ev->pos());
if (hitItem >= 0)
{
if (std::find(m_selectedItems.begin(), m_selectedItems.end(), hitItem) == m_selectedItems.end())
{
m_selectedItems.clear();
m_selectedItems.push_back(hitItem);
SignalSelectionChanged();
}
}
int selectionCount = m_selectedItems.size();
QMenu contextMenu;
contextMenu.setDefaultAction(contextMenu.addAction("Add", this, SLOT(OnMenuAdd())));
contextMenu.addAction("Delete", this, SLOT(OnMenuDelete()))->setEnabled(selectionCount != 0);
contextMenu.addSeparator();
if (selectionCount == 1)
{
QMenu* hotkeyMenu = contextMenu.addMenu("Assign Hotkey");
for (int i = 0; i < 10; ++i)
{
QString text;
text.asprintf("%d", (i + 1) % 10);
QString shortcut;
shortcut.asprintf("Ctrl+%d", (i + 1) % 10);
hotkeyMenu->addAction(text, this, SLOT(OnMenuAssignHotkey()), QKeySequence(shortcut))->setData(int((i + 1) % 10));
}
}
else
{
contextMenu.addAction("Assign Hotkey")->setEnabled(false);
}
contextMenu.addSeparator();
QAction* addWithSingleClick = contextMenu.addAction("Add Events with Single Click", this, SLOT(OnMenuAddWithSingleClick()));
addWithSingleClick->setCheckable(true);
addWithSingleClick->setChecked(m_addWithSingleClick);
contextMenu.exec(QCursor::pos());
}
}
void BlockPalette::mouseMoveEvent(QMouseEvent* ev)
{
if (m_mouseHandler.get())
{
m_mouseHandler->MouseMoveEvent(ev);
update();
}
}
void BlockPalette::mouseReleaseEvent(QMouseEvent* ev)
{
if (m_mouseHandler.get())
{
m_mouseHandler->MouseReleaseEvent(ev);
m_mouseHandler.reset();
update();
}
}
void BlockPalette::mouseDoubleClickEvent(QMouseEvent* ev)
{
int itemIndex = HitItem(m_layout->items, ev->pos());
if (itemIndex >= 0)
{
if (!m_addWithSingleClick)
{
ItemClicked(itemIndex);
}
}
else
{
OnMenuAdd();
}
}
void BlockPalette::keyPressEvent(QKeyEvent* ev)
{
if (!m_selectedItems.empty())
{
if ((ev->modifiers() & Qt::CTRL) != 0)
{
if (ev->key() >= Qt::Key_0 && ev->key() <= Qt::Key_9)
{
int keyIndex = int(ev->key()) - Qt::Key_0;
AssignHotkey(keyIndex);
}
}
}
if ((ev->modifiers() & Qt::CTRL) == 0)
{
if (ev->key() >= Qt::Key_0 && ev->key() <= Qt::Key_9)
{
int keyIndex = int(ev->key()) - Qt::Key_0;
size_t index = (size_t)m_hotkeys[keyIndex];
if (index < m_content->items.size())
{
ItemClicked(index);
}
}
}
}
void BlockPalette::resizeEvent(QResizeEvent* ev)
{
UpdateLayout();
}
void BlockPalette::wheelEvent(QWheelEvent* ev)
{
}
void BlockPalette::GetSelectedIds(BlockPaletteSelectedIds* ids) const
{
ids->resize(m_selectedItems.size());
for (size_t i = 0; i < m_selectedItems.size(); ++i)
{
BlockPaletteSelectedId& id = (*ids)[i];
int index = m_selectedItems[i];
if (size_t(index) < m_content->items.size())
{
id.userId = m_content->items[index].userId;
}
else
{
id.userId = 0;
}
}
}
const BlockPaletteItem* BlockPalette::GetItemByHotkey(int hotkey) const
{
if (size_t(hotkey) >= m_hotkeys.size())
{
return 0;
}
int index = m_hotkeys[hotkey];
if (size_t(index) >= m_content->items.size())
{
return 0;
}
return &m_content->items[index];
}
void BlockPalette::OnMenuAdd()
{
BlockPaletteItem item;
item.name = "Preset";
m_content->items.push_back(item);
SignalChanged();
m_selectedItems.clear();
m_selectedItems.push_back(int(m_content->items.size()) - 1);
SignalSelectionChanged();
UpdateLayout();
}
void BlockPalette::OnMenuAddWithSingleClick()
{
m_addWithSingleClick = !m_addWithSingleClick;
}
void BlockPalette::OnMenuDelete()
{
std::vector<int> selectedItems;
m_selectedItems.swap(selectedItems);
std::sort(selectedItems.begin(), selectedItems.end());
for (int i = int(selectedItems.size()) - 1; i >= 0; --i)
{
int index = selectedItems[i];
m_content->items.erase(m_content->items.begin() + index);
for (size_t j = 0; j < m_hotkeys.size(); ++j)
{
if (m_hotkeys[j] == index)
{
m_hotkeys[j] = 0;
}
else if (m_hotkeys[j] > index)
{
--m_hotkeys[j];
}
}
}
SignalChanged();
SignalSelectionChanged();
UpdateLayout();
}
void BlockPalette::OnMenuAssignHotkey()
{
if (QAction* action = qobject_cast<QAction*>(sender()))
{
AssignHotkey(action->data().toInt());
}
}
void BlockPalette::Serialize(Serialization::IArchive& ar)
{
ar(m_addWithSingleClick, "addWithSingleClick");
ar(m_hotkeys, "hotkeys");
}
#include <CharacterTool/BlockPalette.moc>
| 10,556 |
1,056 | <filename>java/java.hints/test/unit/data/org/netbeans/test/java/hints/IncorrectType57991.java
/*
* AbstractClass.java
*
* Created on March 12, 2005, 7:22 PM
*/
package org.netbeans.test.java.hints;
import java.io.IOException;
import javax.swing.text.BadLocationException;
/**
*
* @author lahvac
*/
public abstract class IncorrectType57991 {
/** Creates a new instance of AbstractClass */
public IncorrectType57991() {
}
public void test() {
this.foo = "bar";
}
}
| 205 |
783 | void stage_cu()
{
// Writing instruction memory
Xil_Out32(0x50001004, 0);
Xil_Out32(0x50001000, 0xD2D60008);
Xil_Out32(0x50001004, 4);
Xil_Out32(0x50001000, 0x04121204);
Xil_Out32(0x50001004, 8);
Xil_Out32(0x50001000, 0xBF810000);
// Writing SGPRs for wavefront 1
Xil_Out32(0x50002004, 0);
Xil_Out32(0x50002008, 0xE);
Xil_Out32(0x5000200C, 0x23);
Xil_Out32(0x50002010, 0x17);
Xil_Out32(0x50002014, 0x1A);
Xil_Out32(0x50002000, 1);
Xil_Out32(0x50002004, 16);
Xil_Out32(0x50002008, 0x17);
Xil_Out32(0x5000200C, 0x1B);
Xil_Out32(0x50002010, 0x2E);
Xil_Out32(0x50002014, 0x5);
Xil_Out32(0x50002000, 1);
Xil_Out32(0x50002004, 32);
Xil_Out32(0x50002008, 0x1);
Xil_Out32(0x5000200C, 0x1A);
Xil_Out32(0x50002010, 0x0);
Xil_Out32(0x50002014, 0x0);
Xil_Out32(0x50002000, 1);
}
| 465 |
5,893 | from typing import List
from deeppavlov.core.common.registry import register
@register("sentseg_restore_sent")
def SentSegRestoreSent(batch_words: List[List[str]], batch_tags: List[List[str]]) -> List[str]:
ret = []
for words, tags in zip(batch_words, batch_tags):
if len(tags) == 0:
ret.append("")
continue
sent = words[0]
punct = "" if tags[0] == "O" else tags[0][-1]
for word, tag in zip(words[1:], tags[1:]):
if tag != "O":
sent += punct
punct = tag[-1]
sent += " " + word
sent += punct
ret.append(sent)
return ret
| 319 |
488 | <gh_stars>100-1000
namespace N {
template <typename Ta0>
struct A {};
}
namespace M {
template <typename Tb0>
struct B {};
}
using namespace M;
struct C {
template<typename Tf0>
void f() {
using B = B<Tf0>;
using X = typename B::X;
N::A<X> a;
}
};
| 130 |
450 | <filename>lib/Runtime/operator/pad.c
#include <onnc/Runtime/operator/pad.h>
#include <stdint.h>
#include <stdbool.h>
#include <string.h>
#define CONSTANT_MODE 0
#define EDGE_MODE 1
#define REFLECT_MODE 2
static inline void calculate_axis_dis(int32_t ndim, const int32_t * restrict dims, int32_t * restrict axis_dis){
int32_t base = axis_dis[ndim-1] = 1;
for(int32_t dim = ndim - 2 ; dim >= 0 ; --dim){
axis_dis[dim] = base * dims[dim+1];
base = axis_dis[dim];
}
}
static inline bool next_dim(int32_t ndim, int32_t * restrict dim,
const int32_t * restrict dim_base, const int32_t * restrict dim_max) {
if(ndim == 0)
return false;
do {
ndim = ndim - 1;
dim[ndim] += 1;
if (dim[ndim] < dim_max[ndim]) {
return true;
} else { // reach dimension max
if (ndim == 0) { // all dimension done
return false;
}
dim[ndim] = dim_base[ndim];
}
} while(true);
}
static inline int64_t dim_to_offset(int32_t ndim, const int32_t * restrict dim,
const int32_t * restrict axisDistance) {
int64_t offset = 0;
for (int32_t i = ndim - 1; i >= 0; --i) {
offset += dim[i] * axisDistance[i];
}
return offset;
}
static inline void add_initial_to_output(
const float * restrict input, int32_t input_ndim, const int32_t * restrict input_dims,
int32_t * restrict axis_dis, int32_t * pads, int32_t * restrict axis_pad_dis,
float * restrict output
){
int32_t iter_index[input_ndim];
int32_t fill_index[input_ndim];
memset(iter_index, 0, sizeof(int32_t) * input_ndim);
int32_t iter_base[input_ndim];
memset(iter_base, 0, sizeof(int32_t) * input_ndim);
do{
memcpy(fill_index, iter_index, sizeof(int32_t) * input_ndim);
for(int32_t dim = 0 ; dim < input_ndim; dim++){
fill_index[dim] += pads[dim];
}
output[dim_to_offset(input_ndim, fill_index, axis_pad_dis)]
= input[dim_to_offset(input_ndim, iter_index, axis_dis)];
}while(next_dim(input_ndim, iter_index, iter_base, input_dims));
}
static inline bool in_obj_area(int32_t * restrict index, int32_t * restrict object, int32_t ndim){
for(int32_t dim = 0 ; dim < ndim ; dim++){
if(! (index[dim] >= object[dim] && index[dim] <= object[dim + ndim])){
return false;
}
}
return true;
}
static inline void pad_along_axis(
float * restrict output, int32_t output_ndim, const int32_t * restrict output_dims,
const int32_t * restrict pads, const int32_t * restrict axis_pad_dis, const int32_t axis,
int32_t * restrict object_area, const int32_t mode
){
/* iterative walk though dim which axis dim equal to object edge */
int32_t iter_dim_num = axis;
/* initial iter_base */
int32_t iter_base[iter_dim_num];
memcpy(iter_base, object_area, sizeof(int32_t) * iter_dim_num);
/* initial iter_dim_max */
int32_t iter_dim_max[iter_dim_num];
memcpy(iter_dim_max, &object_area[output_ndim], sizeof(int32_t) * iter_dim_num);
for(int32_t dim = 0 ; dim < iter_dim_num ; dim++) iter_dim_max[dim] += 1;
/* initial iter_index */
int32_t iter_index[iter_dim_num];
memcpy(iter_index, iter_base, sizeof(int32_t) * iter_dim_num);
/* initial iter_axis_dis */
int32_t iter_axis_dis[iter_dim_num];
memcpy(iter_axis_dis, axis_pad_dis, sizeof(int32_t) * iter_dim_num);
switch(mode){
case EDGE_MODE:{
do{
int32_t base_offset = dim_to_offset(iter_dim_num, iter_index, iter_axis_dis);
/* deal with begin pad */
int32_t clone_start_index = base_offset + object_area[axis] * axis_pad_dis[axis];
for(int32_t pad = 0; pad < pads[axis] ; pad++){
int32_t copy_start_offset = base_offset + pad * axis_pad_dis[axis];
memcpy(&output[copy_start_offset], &output[clone_start_index], sizeof(float) * axis_pad_dis[axis]);
}
/* deal with end pad */
int32_t clone_end_index = base_offset + object_area[axis+output_ndim] * axis_pad_dis[axis];
int32_t end_bound = object_area[axis+output_ndim] + pads[axis + output_ndim];
for(int32_t pad = object_area[axis+output_ndim] + 1 ; pad <= end_bound; pad++){
int32_t copy_end_offset = base_offset + pad * axis_pad_dis[axis];
memcpy(&output[copy_end_offset], &output[clone_end_index], sizeof(float) * axis_pad_dis[axis]);
}
}while(next_dim(iter_dim_num, iter_index, iter_base, iter_dim_max));
break;
}
case REFLECT_MODE:{
do{
int32_t base_offset = dim_to_offset(iter_dim_num, iter_index, iter_axis_dis);
/* deal with begin pad */
for(int32_t pad = 0; pad < pads[axis] ; pad++){
int32_t copy_start_offset = base_offset + pad * axis_pad_dis[axis];
int32_t reflect_dis = object_area[axis] - pad;
int32_t clone_start_index = base_offset + (object_area[axis] + reflect_dis) * axis_pad_dis[axis];
memcpy(&output[copy_start_offset], &output[clone_start_index], sizeof(float) * axis_pad_dis[axis]);
}
/* deal with end pad */
int32_t end_bound = object_area[axis+output_ndim] + pads[axis + output_ndim];
for(int32_t pad = object_area[axis+output_ndim] + 1 ; pad <= end_bound; pad++){
int32_t copy_end_offset = base_offset + pad * axis_pad_dis[axis];
int32_t reflect_dis = pad - object_area[axis+output_ndim];
int32_t clone_end_index = base_offset + (object_area[axis+output_ndim] - reflect_dis) * axis_pad_dis[axis];
memcpy(&output[copy_end_offset], &output[clone_end_index], sizeof(float) * axis_pad_dis[axis]);
}
}while(next_dim(iter_dim_num, iter_index, iter_base, iter_dim_max));
break;
}
}
}
static inline void padding(
float * restrict output, int32_t output_ndim, const int32_t * restrict output_dims,
const int32_t * restrict pads, const int32_t * restrict axis_pad_dis,
int32_t * restrict object_area, const char * restrict mode, float value
){
int32_t mode_no = 0;
if(strcmp(mode, "constant") == 0) mode_no = 0;
else if(strcmp(mode, "edge") == 0) mode_no = 1;
else if(strcmp(mode, "reflect") == 0) mode_no = 2;
int32_t iter_index[output_ndim];
memset(iter_index, 0, sizeof(int32_t) * output_ndim);
switch(mode_no){
case CONSTANT_MODE: {
int32_t iter_base[output_ndim];
memset(iter_base, 0, sizeof(int32_t) * output_ndim);
do{
if(in_obj_area(iter_index, object_area, output_ndim)) continue;
int32_t offset = dim_to_offset(output_ndim, iter_index, axis_pad_dis);
output[offset] = value;
}while(next_dim(output_ndim, iter_index, iter_base, output_dims));
break;
}
case EDGE_MODE: {
for(int32_t axis = output_ndim - 1; axis >= 0 ; axis--){
pad_along_axis(output, output_ndim, output_dims, pads, axis_pad_dis, axis, object_area, EDGE_MODE);
}
break;
}
case REFLECT_MODE:{
for(int32_t axis = output_ndim - 1; axis >= 0 ; axis--){
pad_along_axis(output, output_ndim, output_dims, pads, axis_pad_dis, axis, object_area, REFLECT_MODE);
}
break;
}
}
}
void ONNC_RUNTIME_pad_float(
void * restrict onnc_runtime_context
,const float * restrict input_data
,int32_t input_data_ndim, const int32_t * restrict input_data_dims
,float * restrict output_output
,int32_t output_output_ndim, const int32_t * restrict output_output_dims
,const char * restrict mode
,int32_t * restrict pads
,int32_t number_of_pads
,float value
) {
/* calculate axis_dis */
int32_t axis_dis[input_data_ndim];
calculate_axis_dis(input_data_ndim, input_data_dims, axis_dis);
/* calculate input_pad_dims */
int32_t input_pad_dims[input_data_ndim];
memcpy(input_pad_dims, input_data_dims, sizeof(int32_t) * input_data_ndim);
for(int32_t dim = 0 ; dim < input_data_ndim ; dim++){
input_pad_dims[dim] += (pads[dim] + pads[dim + input_data_ndim]);
}
/* calculate axis_pad_dis */
int32_t axis_pad_dis[input_data_ndim];
calculate_axis_dis(input_data_ndim, input_pad_dims, axis_pad_dis);
/* add initial value to output with new index */
/* TODO: negative padding */
add_initial_to_output(input_data, input_data_ndim, input_data_dims, axis_dis,
pads, axis_pad_dis, output_output);
/* initial object area */
int32_t object_area[2*input_data_ndim];
for(int32_t dim = 0 ; dim < input_data_ndim ; dim++){
object_area[dim] = pads[dim];
object_area[dim + input_data_ndim] = input_data_dims[dim] - 1 + pads[dim];
}
/* implement padding */
padding(output_output, output_output_ndim, output_output_dims, pads, axis_pad_dis, object_area, mode, value);
}
| 4,313 |
1,123 | import tensorflow as tf
import mlsql_model
import mlsql
import sys
import mlsql_tf
rd = mlsql.read_data()
p = mlsql.params()
fitParams = p["fitParam"]
tf.reset_default_graph
config = tf.ConfigProto()
gpuPercent = float(mlsql.get_param(fitParams, "gpuPercent", -1))
featureSize = int(mlsql.get_param(fitParams, "featureSize", -1))
wordEmbeddingSize = int(mlsql.get_param(fitParams, "wordEmbeddingSize", -1))
sequenceLen = featureSize / wordEmbeddingSize
label_size = int(mlsql.get_param(fitParams, "labelSize", -1))
layer_group = [int(i) for i in mlsql.get_param(fitParams, "layerGroup", "300").split(",")]
print_interval = int(mlsql.get_param(fitParams, "printInterval", 1))
window_group = [int(i) for i in mlsql.get_param(fitParams, "windowGroup", "5,10,15").split(",")]
batch_size = int(mlsql.get_param(fitParams, "batchSize", 32))
epochs = int(mlsql.get_param(fitParams, "epochs", 1))
input_col = mlsql.get_param(fitParams, "inputCol", "features")
label_col = mlsql.get_param(fitParams, "labelCol", "label")
tempModelLocalPath = p["internalSystemParam"]["tempModelLocalPath"]
if featureSize < 0 or label_size < 0 or wordEmbeddingSize < 0:
raise RuntimeError("featureSize or labelSize or wordEmbeddingSize is required")
if gpuPercent > 0:
config.gpu_options.per_process_gpu_memory_fraction = gpuPercent
INITIAL_LEARNING_RATE = 0.001
INITIAL_KEEP_PROB = 0.9
sess = tf.Session(config=config)
input_x = tf.placeholder(tf.float32, [None, featureSize], name=input_col)
_input_x = tf.reshape(input_x, [-1, sequenceLen, wordEmbeddingSize, 1])
input_y = tf.placeholder(tf.float32, [None, label_size], name="input_y")
global_step = tf.Variable(0, name='global_step', trainable=False)
buffer = []
for vw in window_group:
conv_layout_num = 0
pool_layout_num = 0
conv1 = mlsql_tf.conv_poo_layer(_input_x, 1, 16, filter_width=wordEmbeddingSize, filter_height=vw,
name="conv1_" + str(vw))
conv_layout_num += 1
pool_layout_num += 0
conv_out = mlsql_tf.conv_poo_layer(conv1, 16, 32, filter_width=1, filter_height=vw,
name="conv2_" + str(vw))
conv_layout_num += 1
pool_layout_num += 0
flattened = tf.reshape(conv_out, [-1, (
sequenceLen + conv_layout_num + pool_layout_num - conv_layout_num * vw - pool_layout_num * vw) * 32])
buffer.append(flattened)
final_flattened = tf.concat(buffer, 1)
fc1 = mlsql_tf.fc_layer(final_flattened, int(final_flattened.shape[1]), 1024, "relu", "fc1")
_logits = mlsql_tf.fc_layer(fc1, 1024, label_size, None, "fc2")
tf.identity(_logits, name=label_col)
with tf.name_scope("xent"):
xent = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits(logits=_logits, labels=input_y), name="xent"
)
tf.summary.scalar("xent", xent)
with tf.name_scope("train"):
learning_rate = tf.train.exponential_decay(INITIAL_LEARNING_RATE, global_step,
1200, 0.8, staircase=True)
train_step = tf.train.AdamOptimizer(learning_rate).minimize(xent, global_step=global_step)
with tf.name_scope("accuracy"):
correct_prediction = tf.equal(tf.argmax(_logits, 1), tf.argmax(input_y, 1))
accurate = tf.reduce_mean(tf.cast(correct_prediction, tf.float32), name="accuracy")
tf.summary.scalar("accuracy", accurate)
summ = tf.summary.merge_all()
sess.run(tf.global_variables_initializer())
# writer = tf.summary.FileWriter(TENSOR_BORAD_DIR)
# writer.add_graph(sess.graph)
#
# writer0 = tf.summary.FileWriter(TENSOR_BORAD_DIR + "/0")
# writer0.add_graph(sess.graph)
saver = tf.train.Saver()
TEST_X, TEST_Y = mlsql.get_validate_data()
TEST_Y = [item.toArray() for item in TEST_Y]
for ep in range(epochs):
for items in rd(max_records=batch_size):
X = [item[input_col].toArray() for item in items]
Y = [item[label_col].toArray() for item in items]
_, gs = sess.run([train_step, global_step],
feed_dict={input_x: X, input_y: Y})
if gs % print_interval == 0:
[train_accuracy, s, loss] = sess.run([accurate, summ, xent],
feed_dict={input_x: X, input_y: Y})
[test_accuracy, test_s, test_lost] = sess.run([accurate, summ, xent],
feed_dict={input_x: TEST_X, input_y: TEST_Y})
print('train_accuracy %g,test_accuracy %g, loss: %g,test_lost: %g, global step: %d, ep:%d' % (
train_accuracy,
test_accuracy,
loss,
test_lost,
gs, ep))
sys.stdout.flush()
mlsql_model.save_model(tempModelLocalPath, sess, input_x, _logits, True)
sess.close()
| 2,166 |
831 | package p1.p2;
import android.app.Activity;
import android.os.Bundle;
/**
* Tests that navigation to the SDK base class will work.
*/
public class MyActivity extends Act<caret>ivity {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
}
}
| 108 |
1,204 | <gh_stars>1000+
/*
* Copyright 2015 <NAME>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gs.collections.impl.block.factory;
import java.util.Collection;
import com.gs.collections.api.block.function.Function2;
import com.gs.collections.api.block.procedure.Procedure;
import com.gs.collections.api.block.procedure.Procedure2;
import com.gs.collections.api.block.procedure.primitive.ObjectIntProcedure;
import com.gs.collections.impl.test.Verify;
import org.junit.Test;
public class FunctionsSerializationTest
{
@Test
public void throwing()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "Q2hlY2tlZEZ1bmN0aW9uAAAAAAAAAAECAAB4cHA=",
Functions.throwing(null));
}
@Test
public void getPassThru()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>
Functions.getPassThru());
}
@Test
public void getTrue()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>",
Functions.getTrue());
}
@Test
public void getFalse()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>
Functions.getFalse());
}
@Test
public void getIntegerPassThru()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>ABAgAAeHA=",
Functions.getIntegerPassThru());
}
@Test
public void getLongPassThru()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "b25nUGFzc1RocnVGdW5jdGlvbgAAAAAAAAABAgAAeHA=",
Functions.getLongPassThru());
}
@Test
public void getDoublePassThru()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>
Functions.getDoublePassThru());
}
@Test
public void getStringTrim()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>",
Functions.getStringTrim());
}
@Test
public void getFixedValue()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "eHBw",
Functions.getFixedValue(null));
}
@Test
public void getToClass()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>
Functions.getToClass());
}
@Test
public void getMathSinFunction()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>",
Functions.getMathSinFunction());
}
@Test
public void squaredInteger()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>
Functions.squaredInteger());
}
@Test
public void getToString()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>
Functions.getToString());
}
@Test
public void getStringToInteger()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>
Functions.getStringToInteger());
}
@Test
public void withDefault()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "<KEY>",
Functions.withDefault(Functions.getPassThru(), null));
}
@Test
public void nullSafe()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "<KEY>
Functions.nullSafe(Functions.getPassThru()));
}
@Test
public void firstNotNullValue()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "<KEY>
Functions.firstNotNullValue());
}
@Test
public void firstNotEmptyStringValue()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "AAA=",
Functions.firstNotEmptyStringValue());
}
@Test
public void firstNotEmptyCollectionValue()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "eHAAAAAA",
Functions.<Integer, String, Collection<String>>firstNotEmptyCollectionValue());
}
@Test
public void synchronizedEach()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "<KEY>
Functions.synchronizedEach(null));
}
@Test
public void bind_procedure()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "<KEY>",
Functions.bind((Procedure<Object>) null, null));
}
@Test
public void bind_procedure2()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "<KEY>
Functions.bind((Procedure2<Object, Object>) null, null));
}
@Test
public void bind_object_int_procedure()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "<KEY>
Functions.bind((ObjectIntProcedure<Object>) null, null));
}
@Test
public void bind_function2_parameter()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "<KEY>",
Functions.bind((Function2<Object, Object, Object>) null, null));
}
@Test
public void getKeyFunction()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>
Functions.getKeyFunction());
}
@Test
public void getValueFunction()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>
Functions.getValueFunction());
}
@Test
public void getSizeOf()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "<KEY>
Functions.getSizeOf());
}
@Test
public void chain()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "<KEY>
Functions.chain(null, null));
}
@Test
public void chainChain()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "<KEY>",
Functions.chain(null, null).chain(null));
}
@Test
public void chainBoolean()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "b29sZWFuRnVuY3Rpb25DaGFpbgAAAAAAAAABAgACTAAJZnVuY3Rpb24xdAA<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "<KEY>",
Functions.chainBoolean(null, null));
}
@Test
public void chainByte()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "<KEY>",
Functions.chainByte(null, null));
}
@Test
public void chainChar()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>5DaGFpbgAAAAAAAAABAgACTAAJZnVuY3Rpb24xdAAwTGNvbS9ncy9jb2xsZWN0\n"
+ "<KEY>"
+ "<KEY>",
Functions.chainChar(null, null));
}
@Test
public void chainDouble()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "eHBwcA==",
Functions.chainDouble(null, null));
}
@Test
public void chainFloat()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "cHA=",
Functions.chainFloat(null, null));
}
@Test
public void chainInt()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "<KEY>",
Functions.chainInt(null, null));
}
@Test
public void chainLong()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "<KEY>",
Functions.chainLong(null, null));
}
@Test
public void chainShort()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "<KEY>"
+ "cHA=",
Functions.chainShort(null, null));
}
@Test
public void getOneFunction()
{
Verify.assertSerializedForm(
1L,
"rO0ABXNyAENjb20uZ3MuY29sbGVjdGlvbnMuaW1wbC5ibG9jay5mYWN0b3J5LkZ1bmN0aW9ucyRG\n"
+ "<KEY>
Functions.firstOfPair());
}
@Test
public void getTwoFunction()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>
Functions.secondOfPair());
}
@Test
public void classForName()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>"
+ "<KEY>
Functions.classForName());
}
@Test
public void getSwappedPairFunction()
{
Verify.assertSerializedForm(
1L,
"<KEY>"
+ "<KEY>
Functions.swappedPair());
}
}
| 7,418 |
2,032 | <reponame>huayl/phxqueue<gh_stars>1000+
/*
Tencent is pleased to support the open source community by making PhxQueue available.
Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the BSD 3-Clause License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
<https://opensource.org/licenses/BSD-3-Clause>
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
#include "phxqueue/config/storeconfig.h"
#include "phxqueue/comm.h"
namespace phxqueue {
namespace config {
using namespace std;
class StoreConfig::StoreConfigImpl {
public:
StoreConfigImpl() {}
virtual ~StoreConfigImpl() {}
map<int, shared_ptr<proto::Store>> store_id2store;
map<uint64_t, int> addr2store_id;
};
StoreConfig::StoreConfig() : impl_(new StoreConfigImpl()){
assert(impl_);
}
StoreConfig::~StoreConfig() {}
comm::RetCode StoreConfig::ReadConfig(proto::StoreConfig &proto) {
QLVerb("start");
// sample
proto.Clear();
proto::Store *store = nullptr;
comm::proto::Addr *addr = nullptr;
// store 1
{
store = proto.add_stores();
store->set_store_id(1);
store->set_scale(100);
store->add_pub_ids(1);
store->add_pub_ids(2);
addr = store->add_addrs();
addr->set_ip("127.0.0.1");
addr->set_port(5100);
addr->set_paxos_port(5101);
addr = store->add_addrs();
addr->set_ip("127.0.0.1");
addr->set_port(5200);
addr->set_paxos_port(5201);
addr = store->add_addrs();
addr->set_ip("127.0.0.1");
addr->set_port(5300);
addr->set_paxos_port(5301);
}
return comm::RetCode::RET_OK;
}
comm::RetCode StoreConfig::Rebuild() {
bool need_check = NeedCheck();
QLVerb("start");
impl_->store_id2store.clear();
impl_->addr2store_id.clear();
auto &&proto = GetProto();
for (int i{0}; i < proto.stores_size(); ++i) {
const auto &store(proto.stores(i));
if (!store.store_id()) continue;
if (need_check) PHX_ASSERT(impl_->store_id2store.end() == impl_->store_id2store.find(store.store_id()), ==, true);
impl_->store_id2store.emplace(store.store_id(), make_shared<proto::Store>(store));
for (int j{0}; j < store.addrs_size(); ++j) {
auto &&addr = store.addrs(j);
if (need_check) PHX_ASSERT(impl_->addr2store_id.end() == impl_->addr2store_id.find(comm::utils::EncodeAddr(addr)), ==, true);
impl_->addr2store_id.emplace(comm::utils::EncodeAddr(addr), store.store_id());
QLVerb("add addr(%s:%d:%d) store_id %d", addr.ip().c_str(), addr.port(), addr.paxos_port(), store.store_id());
}
}
return comm::RetCode::RET_OK;
}
comm::RetCode StoreConfig::GetAllStore(std::vector<shared_ptr<const proto::Store>> &stores) const {
for (auto &&it : impl_->store_id2store) {
stores.push_back(it.second);
}
return comm::RetCode::RET_OK;
}
comm::RetCode StoreConfig::GetAllStoreID(std::set<int> &store_ids) const {
for (auto &&it : impl_->store_id2store) {
store_ids.insert(it.first);
}
return comm::RetCode::RET_OK;
}
comm::RetCode StoreConfig::GetStoreByStoreID(const int store_id, shared_ptr<const proto::Store> &store) const {
auto it(impl_->store_id2store.find(store_id));
if (it == impl_->store_id2store.end()) return comm::RetCode::RET_ERR_RANGE_STORE;
store = it->second;
return comm::RetCode::RET_OK;
}
comm::RetCode StoreConfig::GetStoreIDByAddr(const comm::proto::Addr &addr, int &store_id) const {
auto &&encoded_addr = comm::utils::EncodeAddr(addr);
auto &&it = impl_->addr2store_id.find(encoded_addr);
if (impl_->addr2store_id.end() == it) return comm::RetCode::RET_ERR_RANGE_ADDR;
store_id = it->second;
return comm::RetCode::RET_OK;
}
comm::RetCode StoreConfig::GetStoreByAddr(const comm::proto::Addr &addr, std::shared_ptr<const proto::Store> &store) const {
comm::RetCode ret;
int store_id;
if (comm::RetCode::RET_OK != (ret = GetStoreIDByAddr(addr, store_id))) return ret;
return GetStoreByStoreID(store_id, store);
}
} // namespace config
} // namespace phxqueue
| 1,823 |
5,169 | {
"name": "libmsgs",
"version": "0.3.1",
"summary": "Msgs.io library for iOS",
"description": "## What is Msgs.io?\n\nMsgs.io provides an elegant, high volume push notification service for iOS and Android (and with a limited set of functionality also BlackBerry).\n\nAre your users getting tired of push notifications? Msgs.io supports multiple channels and time/date/day based subscriptions.\n\nNote: We are currently in private alpha, only using the services for select customers.",
"homepage": "http://msgs.io",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"authors": {
"<NAME>": "<EMAIL>"
},
"platforms": {
"ios": "6.0"
},
"source": {
"git": "https://github.com/egeniq/libmsgs-ios.git",
"tag": "0.3.1"
},
"source_files": "src/**/*.{h,m}",
"frameworks": [
"SystemConfiguration",
"MobileCoreServices"
],
"requires_arc": true
}
| 326 |
16,461 | <reponame>zakharchenkoAndrii/expo<gh_stars>1000+
//
// ABI42_0_0AIRGoogleMapOverlayManager.h
// Created by <NAME> on 3/5/17.
//
#import <Foundation/Foundation.h>
#import <ABI42_0_0React/ABI42_0_0RCTViewManager.h>
@interface ABI42_0_0AIRGoogleMapOverlayManager : ABI42_0_0RCTViewManager
@end
| 133 |
404 | <filename>k-distribution/include/kframework/ocaml/fake_load_terms.c<gh_stars>100-1000
#include <caml/mlvalues.h>
#include <caml/alloc.h>
#include <caml/memory.h>
value load_native_binary(value unit) {
CAMLparam1(unit);
CAMLlocal1(s);
s = caml_alloc_string(0);
CAMLreturn(s);
}
| 120 |
692 | # test _thread.get_ident() function
#
# MIT license; Copyright (c) 2016 <NAME> on behalf of Pycom Ltd
import _thread
def thread_entry():
tid = _thread.get_ident()
print('thread', type(tid) == int, tid != 0, tid != tid_main)
global finished
finished = True
tid_main = _thread.get_ident()
print('main', type(tid_main) == int, tid_main != 0)
finished = False
_thread.start_new_thread(thread_entry, ())
while not finished:
pass
print('done')
| 167 |
1,723 | /******************************************************************************
* Copyright 2020 Google LLC
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
#include "include.h"
static void initialize_sntp(void) {
ESP_LOGI(TAG, "Initializing SNTP");
sntp_setoperatingmode(SNTP_OPMODE_POLL);
sntp_setservername(0, "time.google.com");
sntp_init();
}
static void obtain_time(void) {
initialize_sntp();
// wait for time to be set
time_t now = 0;
struct tm timeinfo = {0};
while (timeinfo.tm_year < (2016 - 1900)) {
ESP_LOGI(TAG, "Waiting for system time to be set...");
vTaskDelay(2000 / portTICK_PERIOD_MS);
time(&now);
localtime_r(&now, &timeinfo);
}
ESP_LOGI(TAG, "Time is set...");
}
void publish_telemetry_event(iotc_context_handle_t context_handle, iotc_timed_task_handle_t timed_task, void *user_data) {
char *publish_topic = NULL;
asprintf(&publish_topic, PUBLISH_TOPIC_EVENT, CONFIG_GIOT_DEVICE_ID);
char *publish_message = NULL;
asprintf(&publish_message, TEMPERATURE_DATA, ((temprature_sens_read() - 32) / 1.8));
ESP_LOGI(TAG, "publishing msg \"%s\" to topic: \"%s\"", publish_message, publish_topic);
iotc_publish(context_handle, publish_topic, publish_message, iotc_example_qos, /*callback=*/NULL, /*user_data=*/NULL);
free(publish_topic);
free(publish_message);
}
void iotc_mqttlogic_subscribe_callback(iotc_context_handle_t in_context_handle, iotc_sub_call_type_t call_type, const iotc_sub_call_params_t *const params, iotc_state_t state, void *user_data) {
char *sub_message = (char *)malloc(params->message.temporary_payload_data_length + 1);
memcpy(sub_message, params->message.temporary_payload_data, params->message.temporary_payload_data_length);
sub_message[params->message.temporary_payload_data_length] = '\0';
ESP_LOGI(TAG, "Delegate Message Payload: %s", sub_message);
if (strcmp(subscribe_topic_command, params->message.topic) == 0)
{
gpio_pad_select_gpio(BLINK_GPIO);
/* Set the GPIO as a push/pull output */
gpio_set_direction(BLINK_GPIO, GPIO_MODE_OUTPUT);
int value;
sscanf(sub_message, "light: %d", &value);
ESP_LOGI(TAG, "value: %d\n", value);
if (value == 1)
{
ESP_LOGI(TAG, "ON");
gpio_set_level(BLINK_GPIO, 1);
}
else if (value == 0)
{
gpio_set_level(BLINK_GPIO, 0);
}
}
free(sub_message);
}
void on_connection_state_changed(iotc_context_handle_t in_context_handle, void *data, iotc_state_t state) {
iotc_connection_data_t *conn_data = (iotc_connection_data_t *)data;
switch (conn_data->connection_state) {
case IOTC_CONNECTION_STATE_OPENED:
printf("connected!\n");
asprintf(&subscribe_topic_command, SUBSCRIBE_TOPIC_COMMAND, CONFIG_GIOT_DEVICE_ID);
printf("subscribe to topic: \"%s\"\n", subscribe_topic_command);
iotc_subscribe(in_context_handle, subscribe_topic_command, IOTC_MQTT_QOS_AT_LEAST_ONCE, &iotc_mqttlogic_subscribe_callback, NULL);
asprintf(&subscribe_topic_config, SUBSCRIBE_TOPIC_CONFIG, CONFIG_GIOT_DEVICE_ID);
printf("subscribe to topic: \"%s\"\n", subscribe_topic_config);
iotc_subscribe(in_context_handle, subscribe_topic_config, IOTC_MQTT_QOS_AT_LEAST_ONCE, &iotc_mqttlogic_subscribe_callback, NULL);
/* Create a timed task to publish every 10 seconds. */
delayed_publish_task = iotc_schedule_timed_task(in_context_handle, publish_telemetry_event, 10, 15, NULL);
break;
case IOTC_CONNECTION_STATE_OPEN_FAILED:
printf("ERROR!\tConnection has failed reason %d\n\n", state);
/* exit it out of the application by stopping the event loop. */
iotc_events_stop();
break;
case IOTC_CONNECTION_STATE_CLOSED:
free(subscribe_topic_command);
free(subscribe_topic_config);
if (IOTC_INVALID_TIMED_TASK_HANDLE != delayed_publish_task) {
iotc_cancel_timed_task(delayed_publish_task);
delayed_publish_task = IOTC_INVALID_TIMED_TASK_HANDLE;
}
if (state == IOTC_STATE_OK) {
iotc_events_stop();
} else {
printf("connection closed - reason %d!\n", state);
iotc_connect(in_context_handle, conn_data->username, conn_data->password, conn_data->client_id, conn_data->connection_timeout, conn_data->keepalive_timeout, &on_connection_state_changed);
}
break;
default:
printf("wrong value\n");
break;
}
}
static esp_err_t wifi_event_handler(void *ctx, system_event_t *event) {
switch (event->event_id) {
case SYSTEM_EVENT_STA_START:
esp_wifi_connect();
break;
case SYSTEM_EVENT_STA_GOT_IP:
xEventGroupSetBits(wifi_event_group, CONNECTED_BIT);
break;
case SYSTEM_EVENT_STA_DISCONNECTED:
esp_wifi_connect();
xEventGroupClearBits(wifi_event_group, CONNECTED_BIT);
break;
default:
break;
}
return ESP_OK;
}
static void wifi_init(void) {
tcpip_adapter_init();
wifi_event_group = xEventGroupCreate();
ESP_ERROR_CHECK(esp_event_loop_init(wifi_event_handler, NULL));
wifi_init_config_t cfg = WIFI_INIT_CONFIG_DEFAULT();
ESP_ERROR_CHECK(esp_wifi_init(&cfg));
ESP_ERROR_CHECK(esp_wifi_set_storage(WIFI_STORAGE_RAM));
wifi_config_t wifi_config = {
.sta = {
.ssid = CONFIG_ESP_WIFI_SSID,
.password = <PASSWORD>,
},
};
ESP_ERROR_CHECK(esp_wifi_set_mode(WIFI_MODE_STA));
ESP_ERROR_CHECK(esp_wifi_set_config(ESP_IF_WIFI_STA, &wifi_config));
ESP_LOGI(TAG, "start the WIFI SSID:[%s]", CONFIG_ESP_WIFI_SSID);
ESP_ERROR_CHECK(esp_wifi_start());
ESP_LOGI(TAG, "Waiting for wifi");
xEventGroupWaitBits(wifi_event_group, CONNECTED_BIT, false, true, portMAX_DELAY);
}
static void mqtt_task(void *pvParameters) {
iotc_crypto_key_data_t iotc_connect_private_key_data;
iotc_connect_private_key_data.crypto_key_signature_algorithm = IOTC_CRYPTO_KEY_SIGNATURE_ALGORITHM_ES256;
iotc_connect_private_key_data.crypto_key_union_type = IOTC_CRYPTO_KEY_UNION_TYPE_PEM;
iotc_connect_private_key_data.crypto_key_union.key_pem.key = (char *) ec_pv_key_start;
/* initialize iotc library and create a context to use to connect to the
* GCP IoT Core Service. */
const iotc_state_t error_init = iotc_initialize();
if (IOTC_STATE_OK != error_init) {
printf(" iotc failed to initialize, error: %d\n", error_init);
vTaskDelete(NULL);
}
iotc_context = iotc_create_context();
if (IOTC_INVALID_CONTEXT_HANDLE >= iotc_context) {
printf(" iotc failed to create context, error: %d\n", -iotc_context);
vTaskDelete(NULL);
}
const uint16_t connection_timeout = 0;
const uint16_t keepalive_timeout = 20;
char jwt[IOTC_JWT_SIZE] = {0};
size_t bytes_written = 0;
iotc_state_t state = iotc_create_iotcore_jwt(CONFIG_GIOT_PROJECT_ID, 3600, &iotc_connect_private_key_data, jwt, IOTC_JWT_SIZE, &bytes_written);
if (IOTC_STATE_OK != state) {
printf("iotc_create_iotcore_jwt returned with error: %ul", state);
vTaskDelete(NULL);
}
char *device_path = NULL;
asprintf(&device_path, DEVICE_PATH, CONFIG_GIOT_PROJECT_ID, CONFIG_GIOT_LOCATION, CONFIG_GIOT_REGISTRY_ID, CONFIG_GIOT_DEVICE_ID);
iotc_connect(iotc_context, NULL, jwt, device_path, connection_timeout, keepalive_timeout, &on_connection_state_changed);
free(device_path);
iotc_events_process_blocking();
iotc_delete_context(iotc_context);
iotc_shutdown();
vTaskDelete(NULL);
}
void app_main() {
//Initialize NVS
esp_err_t ret = nvs_flash_init();
if (ret == ESP_ERR_NVS_NO_FREE_PAGES || ret == ESP_ERR_NVS_NEW_VERSION_FOUND) {
ESP_ERROR_CHECK(nvs_flash_erase());
ret = nvs_flash_init();
}
ESP_ERROR_CHECK(ret);
ESP_LOGI(TAG, "ESP_WIFI_MODE_STA");
wifi_init();
obtain_time();
xTaskCreate(&mqtt_task, "mqtt_task", 8192, NULL, 5, NULL);
}
| 3,923 |
1,402 | <gh_stars>1000+
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: <NAME>, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file testCal3DS2.cpp
* @brief Unit tests for Cal3DS2 calibration model.
*/
#include <CppUnitLite/TestHarness.h>
#include <gtsam/base/Testable.h>
#include <gtsam/base/TestableAssertions.h>
#include <gtsam/base/numericalDerivative.h>
#include <gtsam/geometry/Cal3DS2.h>
using namespace gtsam;
GTSAM_CONCEPT_TESTABLE_INST(Cal3DS2)
GTSAM_CONCEPT_MANIFOLD_INST(Cal3DS2)
static Cal3DS2 K(500, 100, 0.1, 320, 240, 1e-3, 2.0 * 1e-3, 3.0 * 1e-3,
4.0 * 1e-3);
static Point2 p(2, 3);
/* ************************************************************************* */
TEST(Cal3DS2, Uncalibrate) {
Vector k = K.k();
double r = p.x() * p.x() + p.y() * p.y();
double g = 1 + k[0] * r + k[1] * r * r;
double tx = 2 * k[2] * p.x() * p.y() + k[3] * (r + 2 * p.x() * p.x());
double ty = k[2] * (r + 2 * p.y() * p.y()) + 2 * k[3] * p.x() * p.y();
Vector v_hat = (Vector(3) << g * p.x() + tx, g * p.y() + ty, 1.0).finished();
Vector v_i = K.K() * v_hat;
Point2 p_i(v_i(0) / v_i(2), v_i(1) / v_i(2));
Point2 q = K.uncalibrate(p);
CHECK(assert_equal(q, p_i));
}
TEST(Cal3DS2, Calibrate) {
Point2 pn(0.5, 0.5);
Point2 pi = K.uncalibrate(pn);
Point2 pn_hat = K.calibrate(pi);
CHECK(traits<Point2>::Equals(pn, pn_hat, 1e-5));
}
Point2 uncalibrate_(const Cal3DS2& k, const Point2& pt) {
return k.uncalibrate(pt);
}
/* ************************************************************************* */
TEST(Cal3DS2, Duncalibrate1) {
Matrix computed;
K.uncalibrate(p, computed, boost::none);
Matrix numerical = numericalDerivative21(uncalibrate_, K, p, 1e-7);
CHECK(assert_equal(numerical, computed, 1e-5));
Matrix separate = K.D2d_calibration(p);
CHECK(assert_equal(numerical, separate, 1e-5));
}
/* ************************************************************************* */
TEST(Cal3DS2, Duncalibrate2) {
Matrix computed;
K.uncalibrate(p, boost::none, computed);
Matrix numerical = numericalDerivative22(uncalibrate_, K, p, 1e-7);
CHECK(assert_equal(numerical, computed, 1e-5));
Matrix separate = K.D2d_intrinsic(p);
CHECK(assert_equal(numerical, separate, 1e-5));
}
Point2 calibrate_(const Cal3DS2& k, const Point2& pt) {
return k.calibrate(pt);
}
/* ************************************************************************* */
TEST(Cal3DS2, Dcalibrate) {
Point2 pn(0.5, 0.5);
Point2 pi = K.uncalibrate(pn);
Matrix Dcal, Dp;
K.calibrate(pi, Dcal, Dp);
Matrix numerical1 = numericalDerivative21(calibrate_, K, pi, 1e-7);
CHECK(assert_equal(numerical1, Dcal, 1e-5));
Matrix numerical2 = numericalDerivative22(calibrate_, K, pi, 1e-7);
CHECK(assert_equal(numerical2, Dp, 1e-5));
}
/* ************************************************************************* */
TEST(Cal3DS2, Equal) { CHECK(assert_equal(K, K, 1e-5)); }
/* ************************************************************************* */
TEST(Cal3DS2, Retract) {
Cal3DS2 expected(500 + 1, 100 + 2, 0.1 + 3, 320 + 4, 240 + 5, 1e-3 + 6,
2.0 * 1e-3 + 7, 3.0 * 1e-3 + 8, 4.0 * 1e-3 + 9);
EXPECT_LONGS_EQUAL(Cal3DS2::Dim(), 9);
EXPECT_LONGS_EQUAL(expected.dim(), 9);
Vector9 d;
d << 1, 2, 3, 4, 5, 6, 7, 8, 9;
Cal3DS2 actual = K.retract(d);
CHECK(assert_equal(expected, actual, 1e-7));
CHECK(assert_equal(d, K.localCoordinates(actual), 1e-7));
}
/* ************************************************************************* */
TEST(Cal3DS2, Print) {
Cal3DS2 cal(1, 2, 3, 4, 5, 6, 7, 8, 9);
std::stringstream os;
os << "fx: " << cal.fx() << ", fy: " << cal.fy() << ", s: " << cal.skew()
<< ", px: " << cal.px() << ", py: " << cal.py() << ", k1: " << cal.k1()
<< ", k2: " << cal.k2() << ", p1: " << cal.p1() << ", p2: " << cal.p2();
EXPECT(assert_stdout_equal(os.str(), cal));
}
/* ************************************************************************* */
int main() {
TestResult tr;
return TestRegistry::runAllTests(tr);
}
/* ************************************************************************* */
| 1,710 |
493 | <gh_stars>100-1000
/**
* Copyright (C) 2016 Turi
* All rights reserved.
*
* This software may be modified and distributed under the terms
* of the BSD license. See the LICENSE file for details.
*/
#ifndef GRAPHLAB_UNITY_TOOLKIT_INVOCATION_HPP
#define GRAPHLAB_UNITY_TOOLKIT_INVOCATION_HPP
#include <string>
#include <functional>
#include <unity/lib/variant.hpp>
#include <unity/lib/toolkit_class_registry.hpp>
namespace graphlab {
/**
* \ingroup unity
* The arguments used to invoke the toolkit execution.
* See \ref toolkit_function_specification for details.
*/
struct toolkit_function_invocation {
toolkit_function_invocation() {
progress = [=](std::string s) {
logstream(LOG_INFO) << "PROGRESS: " << s << std::endl;
};
}
/**
* The parameters passed to the toolkit from the user.
* The options set will be cleaned: every option in
* \ref toolkit_function_specification::default_options will show appear here,
* and there will not be extraneous options.
*/
variant_map_type params;
/**
* A pointer to a function which prints execution progress.
*/
std::function<void(std::string)> progress;
/**
* A pointer to the class registry.
*/
toolkit_class_registry* classes;
};
}
#endif
| 425 |
340 | <reponame>definitelyNotFBI/utt
// Copyright 2018 VMware, all rights reserved
//
// Storage key comparators implementation.
#ifdef USE_ROCKSDB
#include "rocksdb/key_comparator.h"
#include "Logger.hpp"
#include "hex_tools.h"
#include "sliver.hpp"
#include "rocksdb/client.h"
#include <chrono>
using logging::Logger;
namespace concord {
namespace storage {
namespace rocksdb {
int KeyComparator::Compare(const ::rocksdb::Slice& _a, const ::rocksdb::Slice& _b) const {
int ret = key_comparator_->composedKeyComparison(_a.data(), _a.size(), _b.data(), _b.size());
LOG_TRACE(logger_, "Compared " << _a.ToString(true) << " with " << _b.ToString(true) << ", returning " << ret);
return ret;
}
} // namespace rocksdb
} // namespace storage
} // namespace concord
#endif
| 282 |
2,219 | // Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "quic/masque/masque_utils.h"
namespace quic {
ParsedQuicVersionVector MasqueSupportedVersions() {
QuicVersionInitializeSupportForIetfDraft();
ParsedQuicVersionVector versions;
for (const ParsedQuicVersion& version : AllSupportedVersions()) {
// Use all versions that support IETF QUIC.
if (version.UsesHttp3()) {
QuicEnableVersion(version);
versions.push_back(version);
}
}
QUICHE_CHECK(!versions.empty());
return versions;
}
QuicConfig MasqueEncapsulatedConfig() {
QuicConfig config;
config.SetMaxPacketSizeToSend(kMasqueMaxEncapsulatedPacketSize);
return config;
}
std::string MasqueModeToString(MasqueMode masque_mode) {
switch (masque_mode) {
case MasqueMode::kInvalid:
return "Invalid";
case MasqueMode::kLegacy:
return "Legacy";
case MasqueMode::kOpen:
return "Open";
}
return absl::StrCat("Unknown(", static_cast<int>(masque_mode), ")");
}
std::ostream& operator<<(std::ostream& os, const MasqueMode& masque_mode) {
os << MasqueModeToString(masque_mode);
return os;
}
} // namespace quic
| 444 |
1,435 | <gh_stars>1000+
#!/usr/bin/env python
import matplotlib.pyplot as plt
import pywt
import pywt.data
camera = pywt.data.camera()
wavelet = pywt.Wavelet('sym2')
level = 5
# Note: Running with transform="dwtn" is faster, but the resulting images will
# look substantially worse.
coeffs = pywt.mran(camera, wavelet=wavelet, level=level, transform='swtn')
ca = coeffs[0]
details = coeffs[1:]
# Plot all coefficient subbands and the original
gridspec_kw = dict(hspace=0.1, wspace=0.1)
fontdict = dict(verticalalignment='center', horizontalalignment='center',
color='k')
fig, axes = plt.subplots(len(details) + 1, 3, figsize=[5, 8], sharex=True,
sharey=True, gridspec_kw=gridspec_kw)
imshow_kw = dict(interpolation='nearest', cmap=plt.cm.gray)
for i, x in enumerate(details):
axes[i][0].imshow(details[-i - 1]['ad'], **imshow_kw)
axes[i][1].imshow(details[-i - 1]['da'], **imshow_kw)
axes[i][2].imshow(details[-i - 1]['dd'], **imshow_kw)
axes[i][0].text(256, 50, 'ad%d' % (i + 1), fontdict=fontdict)
axes[i][1].text(256, 50, 'da%d' % (i + 1), fontdict=fontdict)
axes[i][2].text(256, 50, 'dd%d' % (i + 1), fontdict=fontdict)
axes[-1][0].imshow(ca, **imshow_kw)
axes[-1][0].text(256, 50, 'approx.', fontdict=fontdict)
axes[-1][1].imshow(camera, **imshow_kw)
axes[-1][1].text(256, 50, 'original', fontdict=fontdict)
for ax in axes.ravel():
ax.set_axis_off()
| 631 |
711 | /*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apiman.test.plugins;
import io.apiman.gateway.engine.beans.ApiRequest;
import io.apiman.gateway.engine.beans.ApiResponse;
import io.apiman.gateway.engine.policies.AbstractMappedPolicy;
import io.apiman.gateway.engine.policy.IPolicyChain;
import io.apiman.gateway.engine.policy.IPolicyContext;
/**
* @author <EMAIL>
*/
public class DeletedPluginPolicy extends AbstractMappedPolicy<DeletedPluginPolicyBean> {
/**
* Constructor.
*/
public DeletedPluginPolicy() {
}
/**
* @see io.apiman.gateway.engine.policies.AbstractMappedPolicy#getConfigurationClass()
*/
@Override
protected Class<DeletedPluginPolicyBean> getConfigurationClass() {
return DeletedPluginPolicyBean.class;
}
/**
* @see io.apiman.gateway.engine.policies.AbstractMappedPolicy#doApply(io.apiman.gateway.engine.beans.ApiRequest, io.apiman.gateway.engine.policy.IPolicyContext, java.lang.Object, io.apiman.gateway.engine.policy.IPolicyChain)
*/
@SuppressWarnings("nls")
@Override
protected void doApply(ApiRequest request, IPolicyContext context, DeletedPluginPolicyBean config,
IPolicyChain<ApiRequest> chain) {
System.out.println("Executing v1 of DeletedPluginPolicy.");
request.getHeaders().put("X-DeletedPluginPolicy-Version", "v1");
super.doApply(request, context, config, chain);
}
/**
* @see io.apiman.gateway.engine.policies.AbstractMappedPolicy#doApply(io.apiman.gateway.engine.beans.ApiResponse, io.apiman.gateway.engine.policy.IPolicyContext, java.lang.Object, io.apiman.gateway.engine.policy.IPolicyChain)
*/
@Override
protected void doApply(ApiResponse response, IPolicyContext context, DeletedPluginPolicyBean config,
IPolicyChain<ApiResponse> chain) {
super.doApply(response, context, config, chain);
}
}
| 844 |
19,438 | <reponame>r00ster91/serenity<filename>Userland/Libraries/LibWeb/Painting/StackingContext.h<gh_stars>1000+
/*
* Copyright (c) 2020, <NAME> <<EMAIL>>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/Vector.h>
#include <LibWeb/Layout/Node.h>
namespace Web::Layout {
class StackingContext {
public:
StackingContext(Box&, StackingContext* parent);
StackingContext* parent() { return m_parent; }
const StackingContext* parent() const { return m_parent; }
enum class StackingContextPaintPhase {
BackgroundAndBorders,
Floats,
Foreground,
FocusAndOverlay,
};
void paint_descendants(PaintContext&, Node&, StackingContextPaintPhase);
void paint(PaintContext&);
HitTestResult hit_test(const Gfx::IntPoint&, HitTestType) const;
void dump(int indent = 0) const;
private:
Box& m_box;
StackingContext* const m_parent { nullptr };
Vector<StackingContext*> m_children;
void paint_internal(PaintContext&);
};
}
| 387 |
498 | package com.bizzan.bitrade.job;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import javax.mail.MessagingException;
import javax.mail.internet.MimeMessage;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Page;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.core.ValueOperations;
import org.springframework.mail.javamail.JavaMailSender;
import org.springframework.mail.javamail.MimeMessageHelper;
import org.springframework.scheduling.annotation.Async;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.ui.freemarker.FreeMarkerTemplateUtils;
import org.springframework.web.client.RestTemplate;
import com.alibaba.fastjson.JSONObject;
import com.bizzan.bitrade.constant.SysConstant;
import com.bizzan.bitrade.constant.TransactionType;
import com.bizzan.bitrade.entity.Coin;
import com.bizzan.bitrade.entity.Member;
import com.bizzan.bitrade.entity.MemberInviteStastic;
import com.bizzan.bitrade.entity.MemberInviteStasticRank;
import com.bizzan.bitrade.entity.MemberTransaction;
import com.bizzan.bitrade.service.MemberInviteStasticService;
import com.bizzan.bitrade.service.MemberPromotionService;
import com.bizzan.bitrade.service.MemberService;
import com.bizzan.bitrade.service.MemberTransactionService;
import com.bizzan.bitrade.system.CoinExchangeFactory;
import com.bizzan.bitrade.vo.MemberPromotionStasticVO;
import freemarker.template.Configuration;
import freemarker.template.Template;
import freemarker.template.TemplateException;
import lombok.extern.slf4j.Slf4j;
@Component
@Slf4j
public class MemberInviteStasticJob {
@Autowired
private RestTemplate restTemplate;
@Autowired
private CoinExchangeFactory coinExchangeFactory;
@Autowired
private RedisTemplate redisTemplate;
@Autowired
private MemberService memberService;
@Autowired
private MemberTransactionService memberTransactionService;
@Autowired
private MemberPromotionService memberPromotionService;
@Autowired
private MemberInviteStasticService memberInviteStatsticService;
@Autowired
private JavaMailSender javaMailSender;
@Value("${spring.mail.username}")
private String from;
@Value("${spark.system.host}")
private String host;
@Value("${spark.system.name}")
private String company;
@Value("${spark.system.admins}")
private String admins;
private String serviceName = "bitrade-market";
private Random random = new Random();
/**
* 每日2点处理,统计用户推广币币手续费返佣结果(0 0 2 * * *),总榜
*/
@Scheduled(cron = "0 0 2 * * *")
public void stasticMemberInviteAll() {
//获取当前时间
SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
String dateNow = df.format(new Date());
int pageNo = 0;
int pageSize = 100;
while(true) {
Page<Member> members = memberService.findByPage(pageNo, pageSize);
List<Member> all = members.getContent();
if (all != null && all.size() > 0) {
for(Member item : all) {
if(item.getId() > 10000) {
List<MemberTransaction> transactions = memberTransactionService.queryByMember(item.getId(), TransactionType.PROMOTION_AWARD);
BigDecimal btcTotal = BigDecimal.ZERO;
BigDecimal ethTotal = BigDecimal.ZERO;
BigDecimal usdtTotal = BigDecimal.ZERO;
BigDecimal estimatedTotal = BigDecimal.ZERO;
if(transactions != null && transactions.size() > 0) {
for(MemberTransaction tItem : transactions) {
if(tItem.getSymbol().equals("BTC")) {
btcTotal = btcTotal.add(tItem.getAmount());
}
if(tItem.getSymbol().equals("ETH")) {
ethTotal = ethTotal.add(tItem.getAmount());
}
if(tItem.getSymbol().equals("USDT")) {
usdtTotal = usdtTotal.add(tItem.getAmount());
}
}
// 计算估算总额
CoinExchangeFactory.ExchangeRate rateBTC = coinExchangeFactory.get("BTC");
estimatedTotal = estimatedTotal.add(btcTotal.multiply(rateBTC.usdRate));
CoinExchangeFactory.ExchangeRate rateETH = coinExchangeFactory.get("ETH");
estimatedTotal = estimatedTotal.add(ethTotal.multiply(rateETH.usdRate));
estimatedTotal = estimatedTotal.add(usdtTotal).setScale(2);
}
// 更新 or 保存记录
MemberInviteStastic mis = memberInviteStatsticService.findByMemberId(item.getId());
if(mis != null) {
mis.setUsdtReward(usdtTotal);
mis.setBtcReward(btcTotal);
mis.setEthReward(ethTotal);
mis.setLevelOne(item.getFirstLevel());
mis.setLevelTwo(item.getSecondLevel());
mis.setEstimatedReward(estimatedTotal);
mis.setStasticDate(dateNow);
memberInviteStatsticService.save(mis);
}else {
mis = new MemberInviteStastic();
mis.setMemberId(item.getId());
mis.setUserIdentify(item.getMobilePhone());
mis.setIsRobot(0);
mis.setUsdtReward(usdtTotal);
mis.setBtcReward(btcTotal);
mis.setEthReward(ethTotal);
mis.setLevelOne(item.getFirstLevel());
mis.setLevelTwo(item.getSecondLevel());
mis.setEstimatedReward(estimatedTotal);
mis.setExtraReward(BigDecimal.ZERO);
mis.setStasticDate(dateNow);
memberInviteStatsticService.save(mis);
}
}else {
// 更新 or 保存记录
MemberInviteStastic mis = memberInviteStatsticService.findByMemberId(item.getId());
if(mis != null) {
int rand1 = random.nextInt(100);
mis.setUsdtReward(BigDecimal.ZERO);
mis.setBtcReward(BigDecimal.ZERO);
mis.setEthReward(BigDecimal.ZERO);
mis.setLevelOne(mis.getLevelOne() + (rand1 % 5)); // 一级好友人数每天自动增加0-5个
mis.setLevelTwo(mis.getLevelTwo() + (rand1 % 10)); // 二级好友人数每天自动增加0-10个
if(mis.getLevelOne() > 100) {
mis.setEstimatedReward(mis.getEstimatedReward().add(BigDecimal.valueOf(80 + (rand1 % 80)).add(BigDecimal.valueOf(random.nextDouble()).setScale(6, BigDecimal.ROUND_DOWN)))); // 80 - 160
}else if(mis.getLevelOne() > 80) {
mis.setEstimatedReward(mis.getEstimatedReward().add(BigDecimal.valueOf(70 + (rand1 % 70)).add(BigDecimal.valueOf(random.nextDouble()).setScale(6, BigDecimal.ROUND_DOWN)))); // 70 - 140
}else if(mis.getLevelOne() > 60) {
mis.setEstimatedReward(mis.getEstimatedReward().add(BigDecimal.valueOf(60 + (rand1 % 60)).add(BigDecimal.valueOf(random.nextDouble()).setScale(6, BigDecimal.ROUND_DOWN)))); // 60 - 120
}else if(mis.getLevelOne() > 50) {
mis.setEstimatedReward(mis.getEstimatedReward().add(BigDecimal.valueOf(50 + (rand1 % 50)).add(BigDecimal.valueOf(random.nextDouble()).setScale(6, BigDecimal.ROUND_DOWN)))); // 50 - 100
}else if(mis.getLevelOne() > 40) {
mis.setEstimatedReward(mis.getEstimatedReward().add(BigDecimal.valueOf(40 + (rand1 % 40)).add(BigDecimal.valueOf(random.nextDouble()).setScale(6, BigDecimal.ROUND_DOWN)))); // 40 - 80
}else if(mis.getLevelOne() > 30) {
mis.setEstimatedReward(mis.getEstimatedReward().add(BigDecimal.valueOf(30 + (rand1 % 30)).add(BigDecimal.valueOf(random.nextDouble()).setScale(6, BigDecimal.ROUND_DOWN)))); // 30 - 60
}else if(mis.getLevelOne() > 20) {
mis.setEstimatedReward(mis.getEstimatedReward().add(BigDecimal.valueOf(20 + (rand1 % 20)).add(BigDecimal.valueOf(random.nextDouble()).setScale(6, BigDecimal.ROUND_DOWN)))); // 20 - 40
}else if(mis.getLevelOne() > 10) {
mis.setEstimatedReward(mis.getEstimatedReward().add(BigDecimal.valueOf(10 + (rand1 % 10)).add(BigDecimal.valueOf(random.nextDouble()).setScale(6, BigDecimal.ROUND_DOWN)))); // 10-20
}else if(mis.getLevelOne() > 0) {
mis.setEstimatedReward(mis.getEstimatedReward().add(BigDecimal.valueOf(rand1 % 10).add(BigDecimal.valueOf(random.nextDouble()).setScale(6, BigDecimal.ROUND_DOWN)))); // 折合USDT奖励每天自动增加0 - 10刀
}else {
}
mis.setStasticDate(dateNow);
memberInviteStatsticService.save(mis);
}else {
mis = new MemberInviteStastic();
mis.setMemberId(item.getId());
mis.setUserIdentify(item.getMobilePhone());
mis.setIsRobot(1);
mis.setUsdtReward(BigDecimal.ZERO);
mis.setBtcReward(BigDecimal.ZERO);
mis.setEthReward(BigDecimal.ZERO);
mis.setLevelOne(0);
mis.setLevelTwo(0);
mis.setEstimatedReward(BigDecimal.ZERO);
mis.setExtraReward(BigDecimal.ZERO);
mis.setStasticDate(dateNow);
memberInviteStatsticService.save(mis);
}
}
}
pageNo++;
}else {
break;
}
}
int top = 20;
List<MemberInviteStastic> topRewardList = memberInviteStatsticService.topRewardAmount(top);
List<MemberInviteStastic> topInviteList = memberInviteStatsticService.topInviteCount(top);
// 发送邮件通知
String[] adminList = admins.split(",");
for(int i = 0; i < adminList.length; i++) {
try {
sendEmailMsg(adminList[i], topRewardList, topInviteList, "推广合伙人排名(总榜)");
} catch (MessagingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (TemplateException e) {
e.printStackTrace();
}
}
}
/**
* 日榜
*/
//@Scheduled(cron="0 0 2 * * *")
public void stasticMemberInviteDay() {
// SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
Date cTime = new Date();
Calendar calendar = Calendar.getInstance();
calendar.setTime(cTime);
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
Date endDate = calendar.getTime();
Calendar calendar2 = Calendar.getInstance();
Date yd = new Date(cTime.getTime() - 24 * 3600 * 1000);
calendar2.setTime(yd);
calendar2.set(Calendar.HOUR_OF_DAY, 0);
calendar2.set(Calendar.MINUTE, 0);
calendar2.set(Calendar.SECOND, 0);
Date startDate = calendar2.getTime();
List<MemberPromotionStasticVO> result = memberPromotionService.getDateRangeRank(0, startDate, endDate, 20);
List<MemberInviteStasticRank> allList = new ArrayList<MemberInviteStasticRank>();
for(MemberPromotionStasticVO vo : result) {
MemberInviteStasticRank misr = new MemberInviteStasticRank();
misr.setLevelOne(vo.getCount());
misr.setLevelTwo(0);
misr.setMemberId(vo.getInviterId());
misr.setStasticDate(endDate);
misr.setType(0);// 0标识DAY
Member m = memberService.findOne(vo.getInviterId());
misr.setUserIdentify(m.getMobilePhone());
// ID超过10000则不是机器人
if(m.getId().compareTo(Long.valueOf(10000)) >= 0) {
misr.setIsRobot(0);
}else {
misr.setIsRobot(1);
}
misr = memberInviteStatsticService.saveRank(misr);
allList.add(misr);
}
// 发送邮件通知
String[] adminList = admins.split(",");
for(int i = 0; i < adminList.length; i++) {
try {
sendEmailMsg(adminList[i], allList, "推广合伙人排名(日榜)");
} catch (MessagingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (TemplateException e) {
e.printStackTrace();
}
}
}
/**
* 周榜(每周一2点30统计)
*/
@Scheduled(cron="0 30 2 ? * MON")
public void stasticMemberInviteWeek() {
Date cTime = new Date();
Calendar calendar = Calendar.getInstance();
calendar.setTime(cTime);
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
Date endDate = calendar.getTime();
Calendar calendar2 = Calendar.getInstance();
Date yd = new Date(cTime.getTime() - 7 * 24 * 3600 * 1000);
calendar2.setTime(yd);
calendar2.set(Calendar.HOUR, 0);
calendar2.set(Calendar.MINUTE, 0);
calendar2.set(Calendar.SECOND, 0);
Date startDate = calendar2.getTime();
List<MemberPromotionStasticVO> result = memberPromotionService.getDateRangeRank(0, startDate, endDate, 20);
List<MemberInviteStasticRank> allList = new ArrayList<MemberInviteStasticRank>();
for(MemberPromotionStasticVO vo : result) {
MemberInviteStasticRank misr = new MemberInviteStasticRank();
misr.setLevelOne(vo.getCount());
misr.setLevelTwo(0);
misr.setMemberId(vo.getInviterId());
misr.setStasticDate(endDate);
misr.setType(1);// 1标识WEEK
Member m = memberService.findOne(vo.getInviterId());
misr.setUserIdentify(m.getMobilePhone());
// ID超过10000则不是机器人
if(m.getId().compareTo(Long.valueOf(10000)) >= 0) {
misr.setIsRobot(0);
}else {
misr.setIsRobot(1);
}
misr = memberInviteStatsticService.saveRank(misr);
allList.add(misr);
}
// 发送邮件通知
String[] adminList = admins.split(",");
for(int i = 0; i < adminList.length; i++) {
try {
sendEmailMsg(adminList[i], allList, "推广合伙人排名(周榜)");
} catch (MessagingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (TemplateException e) {
e.printStackTrace();
}
}
}
/**
* 月榜(每月1号3点统计)
*/
@Scheduled(cron="0 0 3 1 * ?")
public void stasticMemberInviteMonth() {
Date cTime = new Date();
Calendar calendar = Calendar.getInstance();
calendar.add(Calendar.MONTH, 0);
calendar.set(Calendar.DAY_OF_MONTH,1);//1:本月第一天
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
Date endDate = calendar.getTime();
Calendar calendar2 = Calendar.getInstance();
calendar2.add(Calendar.MONTH, -1); // 上月第一天
calendar2.set(Calendar.DAY_OF_MONTH, 1);
calendar2.set(Calendar.HOUR_OF_DAY, 0);
calendar2.set(Calendar.MINUTE, 0);
calendar2.set(Calendar.SECOND, 0);
Date startDate = calendar2.getTime();
List<MemberPromotionStasticVO> result = memberPromotionService.getDateRangeRank(0, startDate, endDate, 20);
List<MemberInviteStasticRank> allList = new ArrayList<MemberInviteStasticRank>();
for(MemberPromotionStasticVO vo : result) {
MemberInviteStasticRank misr = new MemberInviteStasticRank();
misr.setLevelOne(vo.getCount());
misr.setLevelTwo(0);
misr.setMemberId(vo.getInviterId());
misr.setStasticDate(endDate);
misr.setType(2);// 1标识Month
Member m = memberService.findOne(vo.getInviterId());
misr.setUserIdentify(m.getMobilePhone());
// ID超过10000则不是机器人
if(m.getId().compareTo(Long.valueOf(10000)) >= 0) {
misr.setIsRobot(0);
}else {
misr.setIsRobot(1);
}
misr = memberInviteStatsticService.saveRank(misr);
allList.add(misr);
}
// 发送邮件通知
String[] adminList = admins.split(",");
for(int i = 0; i < adminList.length; i++) {
try {
sendEmailMsg(adminList[i], allList, "推广合伙人排名(周榜)");
} catch (MessagingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (TemplateException e) {
e.printStackTrace();
}
}
}
/**
* 每天8点将排名同步到Redis
*/
@Scheduled(cron="0 0 8 * * *")
public void staticSync(){
// 总榜单同步Redis
ValueOperations valueOperations = redisTemplate.opsForValue();
int top = 20;
JSONObject resultObj = new JSONObject();
List<MemberInviteStastic> topReward = memberInviteStatsticService.topRewardAmount(top);
List<MemberInviteStastic> topInvite = memberInviteStatsticService.topInviteCount(top);
for(MemberInviteStastic item1 : topReward) {
item1.setUserIdentify(item1.getUserIdentify().substring(0, 3) + "****" + item1.getUserIdentify().substring(item1.getUserIdentify().length() - 4, item1.getUserIdentify().length()));
}
for(MemberInviteStastic item2 : topInvite) {
item2.setUserIdentify(item2.getUserIdentify().substring(0, 3) + "****" + item2.getUserIdentify().substring(item2.getUserIdentify().length() - 4, item2.getUserIdentify().length()));
}
resultObj.put("topreward", topReward);
resultObj.put("topinvite", topInvite);
valueOperations.set(SysConstant.MEMBER_PROMOTION_TOP_RANK+top, resultObj, SysConstant.MEMBER_PROMOTION_TOP_RANK_EXPIRE_TIME, TimeUnit.SECONDS);
}
@Async
public void sendEmailMsg(String email,
List<MemberInviteStastic> topRewardList,
List<MemberInviteStastic> topInviteList,
String subject) throws MessagingException, IOException, TemplateException {
MimeMessage mimeMessage = javaMailSender.createMimeMessage();
MimeMessageHelper helper = null;
helper = new MimeMessageHelper(mimeMessage, true);
helper.setFrom(from);
helper.setTo(email);
helper.setSubject(company + "-" + subject);
Map<String, Object> model = new HashMap<>(16);
model.put("topRewardList", topRewardList);
model.put("topInviteList", topInviteList);
Configuration cfg = new Configuration(Configuration.VERSION_2_3_26);
cfg.setClassForTemplateLoading(this.getClass(), "/templates");
Template template = cfg.getTemplate("promotionStastic.ftl");
String html = FreeMarkerTemplateUtils.processTemplateIntoString(template, model);
helper.setText(html, true);
//发送邮件
javaMailSender.send(mimeMessage);
}
@Async
public void sendEmailMsg(String email,
List<MemberInviteStasticRank> topInviteList,
String subject) throws MessagingException, IOException, TemplateException {
MimeMessage mimeMessage = javaMailSender.createMimeMessage();
MimeMessageHelper helper = null;
helper = new MimeMessageHelper(mimeMessage, true);
helper.setFrom(from);
helper.setTo(email);
helper.setSubject(company + "-" + subject);
Map<String, Object> model = new HashMap<>(16);
model.put("topInviteList", topInviteList);
Configuration cfg = new Configuration(Configuration.VERSION_2_3_26);
cfg.setClassForTemplateLoading(this.getClass(), "/templates");
Template template = cfg.getTemplate("promotionStasticRank.ftl");
String html = FreeMarkerTemplateUtils.processTemplateIntoString(template, model);
helper.setText(html, true);
//发送邮件
javaMailSender.send(mimeMessage);
}
}
| 9,142 |
631 | package org.javalite.activejdbc.test_models;
import org.javalite.activejdbc.Model;
public class PhoneNumber extends Model {
}
| 44 |
2,607 | <reponame>gitter-badger/P-2
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. */
package p.runtime.values;
import java.util.Arrays;
public class NamedTuple implements IValue<NamedTuple> {
private String[] fieldNames;
private IValue<?>[] fieldValues;
public NamedTuple(String[] fieldNames, IValue<?>[] fieldValues)
{
assert fieldNames.length == fieldValues.length;
this.fieldNames = fieldNames;
this.fieldValues = new IValue<?>[fieldValues.length];
for (int i = 0; i < fieldValues.length; i++) {
this.fieldValues[i] = IValue.safeClone(fieldValues[i]);
}
}
public IValue<?> getField(String name) {
for (int i = 0; i < fieldNames.length; i++) {
if (name.equals(fieldNames[i])) {
return fieldValues[i];
}
}
assert false;
return null;
}
public void setField(String name, IValue<?> value) {
for (int i = 0; i < fieldNames.length; i++) {
if (name.equals(fieldNames[i])) {
fieldValues[i] = value;
return;
}
}
assert false;
}
public String[] getFieldNames() {
return fieldNames;
}
public IValue<?>[] getFieldValues() {
return fieldValues;
}
@Override
public int hashCode() {
return Arrays.hashCode(fieldValues);
}
@Override
public boolean equals(Object obj) {
if (obj == this)
return true;
if (!(obj instanceof NamedTuple)) {
return false;
}
NamedTuple other = (NamedTuple) obj;
if (fieldValues.length != other.getFieldValues().length) {
return false;
}
String[] otherFieldNames = other.getFieldNames();
IValue<?>[] otherFieldValues = other.getFieldValues();
for (int i = 0 ; i < fieldValues.length; i++) {
if (!fieldNames[i].equals(otherFieldNames[i])) {
return false;
}
if (!IValue.safeEquals(fieldValues[i], otherFieldValues[i])) {
return false;
}
}
return true;
}
@Override
public NamedTuple genericClone() {
return new NamedTuple(fieldNames, this.fieldValues);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("(");
boolean hadElements = false;
for (int i = 0; i < fieldNames.length; i++) {
if (hadElements) {
sb.append(", ");
}
sb.append(fieldNames[i]);
sb.append(": ");
sb.append(fieldValues[i]);
hadElements = true;
}
sb.append(")");
return sb.toString();
}
// Constructor and setter only used for JSON deserialization
public NamedTuple() {}
public void setFieldNames(String[] fieldNames) {
this.fieldNames = fieldNames;
}
public void setFieldValues(IValue<?>[] fieldValues) {
this.fieldValues = fieldValues;
}
}
| 1,442 |
310 | <filename>embedded-google-storage/src/main/java/com/playtika/test/storage/EmbeddedStorageDependenciesAutoConfiguration.java
package com.playtika.test.storage;
import com.google.cloud.storage.Storage;
import com.playtika.test.common.spring.DependsOnPostProcessor;
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
import org.springframework.boot.autoconfigure.AutoConfigureOrder;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import static com.playtika.test.storage.StorageProperties.BEAN_NAME_EMBEDDED_GOOGLE_STORAGE_SERVER;
@Configuration
@AutoConfigureOrder
@ConditionalOnExpression("${embedded.containers.enabled:true}")
@ConditionalOnClass(Storage.class)
@ConditionalOnProperty(name = "embedded.google.storage.enabled", matchIfMissing = true)
public class EmbeddedStorageDependenciesAutoConfiguration {
@Bean
static BeanFactoryPostProcessor storageDependencyPostProcessor() {
return new DependsOnPostProcessor(Storage.class, new String[]{BEAN_NAME_EMBEDDED_GOOGLE_STORAGE_SERVER});
}
} | 411 |
994 | <filename>Algorithms/Bit Manipulation/Clear i'th bit from right in a number.py
def answer(n, i):
return n & ~(1 << i) | 44 |
2,206 | /*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package org.deeplearning4j.earlystopping.termination;
import org.nd4j.shade.jackson.annotation.JsonInclude;
import org.nd4j.shade.jackson.annotation.JsonSubTypes;
import org.nd4j.shade.jackson.annotation.JsonTypeInfo;
import java.io.Serializable;
@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS, include = JsonTypeInfo.As.PROPERTY, property = "@class")
@JsonInclude(JsonInclude.Include.NON_NULL)
public interface EpochTerminationCondition extends Serializable {
/** Initialize the epoch termination condition (often a no-op)*/
void initialize();
/**Should the early stopping training terminate at this epoch, based on the calculated score and the epoch number?
* Returns true if training should terminated, or false otherwise
* @param epochNum Number of the last completed epoch (starting at 0)
* @param score Score calculate for this epoch
* @return Whether training should be terminated at this epoch
*/
boolean terminate(int epochNum, double score, boolean minimize);
}
| 517 |
2,813 | package org.jabref.gui.util;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javafx.scene.text.Text;
/**
* Utility class with static methods for javafx {@link Text} objects
*/
public class TooltipTextUtil {
// (?s) tells Java that "." also matches the newline character
// (?<...>...) are named groups in Java regular expressions: https://stackoverflow.com/a/415635/873282
// .*? tells to match non-greedy (see https://stackoverflow.com/q/7124778/873282 for details)
private static final Pattern TT_TEXT = Pattern.compile("(?s)(?<before>.*?)<tt>(?<in>.*?)</tt>");
private static final Pattern B_TEXT = Pattern.compile("(?s)(?<before>.*?)<b>(?<in>.*?)</b>");
public enum TextType {
NORMAL, BOLD, ITALIC, MONOSPACED
}
public static Text createText(String textString, TextType textType) {
Text text = new Text(textString);
switch (textType) {
case BOLD:
text.getStyleClass().setAll("tooltip-text-bold");
break;
case ITALIC:
text.getStyleClass().setAll("tooltip-text-italic");
break;
case MONOSPACED:
text.getStyleClass().setAll("tooltip-text-monospaced");
break;
default:
break;
}
return text;
}
public static Text createText(String textString) {
return createText(textString, TextType.NORMAL);
}
/**
* Creates a list of Text elements respecting <code>tt</code> and <code>b</code> markers.
* Nesting of these markers is not possible.
*/
public static List<Text> createTextsFromHtml(String htmlString) {
List<Text> result = new ArrayList<>();
Matcher matcher = TT_TEXT.matcher(htmlString);
int lastMatchPos = 0;
while (matcher.find()) {
lastMatchPos = matcher.end();
String before = matcher.group("before");
if (!before.isBlank()) {
result.addAll(convertHtmlBold(before));
}
String in = matcher.group("in");
result.add(TooltipTextUtil.createText(in, TooltipTextUtil.TextType.MONOSPACED));
}
if (lastMatchPos < htmlString.length()) {
String remaining = htmlString.substring(lastMatchPos);
result.addAll(convertHtmlBold(remaining));
}
return result;
}
private static List<Text> convertHtmlBold(String htmlString) {
List<Text> result = new ArrayList<>();
Matcher matcher = B_TEXT.matcher(htmlString);
int lastMatchPos = 0;
while (matcher.find()) {
lastMatchPos = matcher.end();
String before = matcher.group("before");
if (!before.isBlank()) {
result.add(TooltipTextUtil.createText(before));
}
String in = matcher.group("in");
result.add(TooltipTextUtil.createText(in, TextType.BOLD));
}
if (lastMatchPos < htmlString.length()) {
String remaining = htmlString.substring(lastMatchPos);
result.add(TooltipTextUtil.createText(remaining));
}
return result;
}
/**
* Formats a String to multiple Texts by replacing some parts and adding font characteristics.
*/
public static List<Text> formatToTexts(String original, TextReplacement... replacements) {
List<Text> textList = new ArrayList<>();
textList.add(new Text(original));
for (TextReplacement replacement : replacements) {
splitReplace(textList, replacement);
}
return textList;
}
private static void splitReplace(List<Text> textList, TextReplacement replacement) {
Optional<Text> textContainingReplacement = textList.stream().filter(it -> it.getText().contains(replacement.toReplace)).findFirst();
if (textContainingReplacement.isPresent()) {
int index = textList.indexOf(textContainingReplacement.get());
String original = textContainingReplacement.get().getText();
textList.remove(index);
String[] textParts = original.split(replacement.toReplace);
if (textParts.length == 2) {
if (textParts[0].equals("")) {
textList.add(index, TooltipTextUtil.createText(replacement.replacement, replacement.textType));
textList.add(index + 1, TooltipTextUtil.createText(textParts[1], TooltipTextUtil.TextType.NORMAL));
} else {
textList.add(index, TooltipTextUtil.createText(textParts[0], TooltipTextUtil.TextType.NORMAL));
textList.add(index + 1, TooltipTextUtil.createText(replacement.replacement, replacement.textType));
textList.add(index + 2, TooltipTextUtil.createText(textParts[1], TooltipTextUtil.TextType.NORMAL));
}
} else if (textParts.length == 1) {
textList.add(index, TooltipTextUtil.createText(textParts[0], TooltipTextUtil.TextType.NORMAL));
textList.add(index + 1, TooltipTextUtil.createText(replacement.replacement, replacement.textType));
} else {
throw new IllegalStateException("It is not allowed that the toReplace string: '" + replacement.toReplace
+ "' exists multiple times in the original string");
}
} else {
throw new IllegalStateException("It is not allowed that the toReplace string: '" + replacement.toReplace
+ "' does not exist in the original string");
}
}
public static class TextReplacement {
private final String toReplace;
private final String replacement;
private final TooltipTextUtil.TextType textType;
public TextReplacement(String toReplace, String replacement, TooltipTextUtil.TextType textType) {
this.toReplace = toReplace;
this.replacement = replacement;
this.textType = textType;
}
}
public static String textToHtmlString(Text text) {
String textString = text.getText();
textString = textString.replace("\n", "<br>");
if (text.getStyleClass().toString().contains("tooltip-text-monospaced")) {
textString = String.format("<tt>%s</tt>", textString);
}
if (text.getStyleClass().toString().contains("tooltip-text-bold")) {
textString = String.format("<b>%s</b>", textString);
}
if (text.getStyleClass().toString().contains("tooltip-text-italic")) {
textString = String.format("<i>%s</i>", textString);
}
return textString;
}
}
| 2,931 |
347 | <reponame>hbraha/ovirt-engine
package org.ovirt.engine.api.restapi.resource;
import org.ovirt.engine.api.model.Disk;
import org.ovirt.engine.api.model.Disks;
import org.ovirt.engine.api.resource.VmBackupDiskResource;
import org.ovirt.engine.api.resource.VmBackupDisksResource;
import org.ovirt.engine.api.restapi.types.DiskMapper;
import org.ovirt.engine.core.common.businessentities.VmBackup;
import org.ovirt.engine.core.common.queries.IdQueryParameters;
import org.ovirt.engine.core.common.queries.QueryType;
public class BackendVmBackupDisksResource
extends AbstractBackendCollectionResource<Disk, org.ovirt.engine.core.common.businessentities.storage.Disk>
implements VmBackupDisksResource {
private BackendVmBackupResource parent;
public BackendVmBackupDisksResource(BackendVmBackupResource parent) {
super(Disk.class, org.ovirt.engine.core.common.businessentities.storage.Disk.class);
this.parent = parent;
}
@Override
public Disks list() {
Disks disks = new Disks();
VmBackup vmBackup = getEntity(VmBackup.class, QueryType.GetVmBackupById, new IdQueryParameters(asGuid(parent.get().getId())), null);
vmBackup.getDisks().stream().map(d -> DiskMapper.map(d, null)).forEach(disks.getDisks()::add);
return disks;
}
@Override
public VmBackupDiskResource getDiskResource(String id) {
return inject(new BackendVmBackupDiskResource(id));
}
}
| 546 |
358 | <reponame>liaoziyang/ContentAssist
/*
* Copyright 2014
* Software Science and Technology Lab.
* Department of Computer Science, Ritsumeikan University
*/
package org.jtool.macrorecorder.internal.recorder;
import org.jtool.macrorecorder.recorder.MacroCompressor;
import org.jtool.macrorecorder.recorder.Recorder;
import org.jtool.macrorecorder.util.EditorUtilities;
import org.jtool.macrorecorder.macro.DocumentMacro;
import org.jtool.macrorecorder.macro.ExecutionMacro;
import org.jtool.macrorecorder.macro.TriggerMacro;
import org.jtool.macrorecorder.macro.ResourceMacro;
import org.jtool.macrorecorder.macro.CopyMacro;
import org.eclipse.swt.custom.StyledText;
import org.eclipse.ui.IEditorPart;
import org.eclipse.jface.text.IDocument;
/**
* Records document macros performed on the editor.
* @author <NAME>
*/
public class DocMacroRecorderOnEdit extends DocMacroRecorder {
/**
* An editor on which document macros are recorded.
*/
private IEditorPart editor;
/**
* The document of a file.
*/
private IDocument doc;
/**
* A compressor that compresses macros.
*/
private MacroCompressor compressor;
/**
* The styled text of an editor.
*/
private StyledText styledText;
/**
* Creates an object that records document macros performed on an editor.
* @param editor the editor
* @param recorder a recorder that sends macro events
* @param compressor a compressor that compresses macros
*/
public DocMacroRecorderOnEdit(IEditorPart editor, Recorder recorder, MacroCompressor compressor) {
super(EditorUtilities.getInputFilePath(editor), recorder);
this.editor = editor;
this.doc = EditorUtilities.getDocument(editor);
this.compressor = compressor;
this.styledText = EditorUtilities.getStyledText(editor);
}
/**
* Starts the recording of document macros.
*/
public void start() {
if (editor == null) {
return;
}
DocumentManager.register(doc, styledText, documentManager);
preCode = doc.get();
super.start();
}
/**
* Stops the recording of macros.
*/
public void stop() {
if (editor == null) {
return;
}
DocumentManager.unregister(doc, styledText, documentManager);
super.stop();
}
/**
* Records a document macro and its compressed macro.
* @param macro the document macro
*/
protected void recordDocumentMacro(DocumentMacro macro) {
boolean isCutPaste = setCutPasteMacro(macro);
recordRawMacro(macro);
// System.out.println("MACRO = " + macro);
if (isCutPaste) {
dumpMacros(macro);
return;
}
if (compressor.canCombine(macro)) {
DocumentMacro newMacro = compressor.combine(lastDocumentMacro, macro);
if (newMacro != null) {
lastDocumentMacro = newMacro;
} else {
dumpLastDocumentMacro();
lastDocumentMacro = macro;
}
} else {
dumpMacros(macro);
}
}
/**
* Tests if a macro indicates the cut or paste and sets its type according to its result.
* @param macro a macro that might be a cut or paste one
* @return <code>true</code> if a macro indicates the cut or paste, otherwise <code>false</code>
*/
boolean setCutPasteMacro(DocumentMacro macro) {
if (lastRawMacro == null) {
return false;
}
if (lastRawMacro instanceof ExecutionMacro) {
ExecutionMacro emacro = (ExecutionMacro)lastRawMacro;
if (emacro.getCommandId().compareTo("org.eclipse.ui.edit.cut") == 0) {
macro.setType("Cut");
return true;
} else if (emacro.getCommandId().compareTo("org.eclipse.ui.edit.paste") == 0) {
macro.setType("Paste");
return true;
}
}
return false;
}
/**
* Records a command execution macro.
* @param macro the command execution macro
*/
protected void recordExecutionMacro(ExecutionMacro macro) {
super.recordExecutionMacro(macro);
if (styledText == null) {
return;
}
if (macro.getCommandId().compareTo("org.eclipse.ui.edit.copy") == 0 ||
macro.getCommandId().compareTo("org.eclipse.jdt.ui.edit.text.java.copy.qualified.name") == 0) {
int offset = styledText.getSelectionRange().x;
String text = styledText.getSelectionText();
long time = Time.getCurrentTime();
CopyMacro cmacro = new CopyMacro(time, "Copy", macro.getPath(), offset, text);
recordRawMacro(cmacro);
dumpMacros(cmacro);
} else if (macro.getCommandId().compareTo("org.eclipse.ui.edit.delete") == 0) {
macro.setType("Delete");
}
}
/**
* Records a trigger macro.
* @param macro the trigger macro
*/
protected void recordTriggerMacro(TriggerMacro macro) {
super.recordTriggerMacro(macro);
}
/**
* Records a resource change macro.
* @param macro the resource change macro
*/
protected void recordResourceMacro(ResourceMacro macro) {
super.recordResourceMacro(macro);
}
/**
* Records a compressed macro into the operation history and its original one in .
* @param macro a document macro
*/
protected void recordUndoRedoMacro(DocumentMacro macro) {
super.recordUndoRedoMacro(macro);
}
/**
* Obtains the current contents of a file under recording.
* @return the contents of source code, or <code>null</code> if source code does not exist
*/
protected String getCurrentCode() {
IDocument doc = EditorUtilities.getDocument(editor);
if (doc != null) {
return doc.get();
}
return null;
}
}
| 2,676 |
496 | <reponame>ujlbu4/vas3k.club
from django.shortcuts import redirect, render
from django_q.tasks import async_task
from auth.helpers import auth_required
from notifications.telegram.users import notify_profile_needs_review
from posts.models.post import Post
from users.forms.intro import UserIntroForm
from users.models.geo import Geo
from users.models.user import User
@auth_required
def intro(request):
if request.me.moderation_status == User.MODERATION_STATUS_APPROVED:
return redirect("profile", request.me.slug)
if request.method == "POST":
form = UserIntroForm(request.POST, request.FILES, instance=request.me)
if form.is_valid():
user = form.save(commit=False)
# send to moderation
user.moderation_status = User.MODERATION_STATUS_ON_REVIEW
user.save()
# create intro post
intro_post = Post.upsert_user_intro(
user, form.cleaned_data["intro"], is_visible=False
)
Geo.update_for_user(user)
# notify moderators to review profile
async_task(notify_profile_needs_review, user, intro_post)
return redirect("on_review")
else:
existing_intro = Post.get_user_intro(request.me)
form = UserIntroForm(
instance=request.me,
initial={"intro": existing_intro.text if existing_intro else ""},
)
return render(request, "users/intro.html", {"form": form})
| 618 |
773 | <gh_stars>100-1000
import logging
import numpy as np
import numpy.random as rnd
import matplotlib.pyplot as plt
from sklearn.ensemble import IsolationForest
from sklearn.neighbors import LocalOutlierFactor
from sklearn.svm import OneClassSVM
from ..common.utils import get_command_args, configure_logger
from ..common.gen_samples import get_demo_samples, plot_sample
from ..common.data_plotter import DataPlotter
from ..loda.loda import Loda
"""
python -m ad_examples.ad.ad_outlier --plot --debug --log_file=temp/ad_outlier.log --dataset=face --algo=ifor
Supported algorithms: ifor, loda, lof, ocsvm
Supported synthetic datasets:
face
face_diff
donut
donut_diff
1
4
"""
if __name__ == "__main__":
logger = logging.getLogger(__name__)
args = get_command_args(debug=False, debug_args=["--algo=ifor",
"--dataset=face",
"--debug",
"--plot",
"--log_file=temp/ad_outlier.log"])
# print "log file: %s" % args.log_file
configure_logger(args)
ad_type = args.algo # ocsvm, ifor, lof, loda
# ad_type = "ifor"
# ad_type = "lof"
sample_type = args.dataset + "_"
# sample_type = "4_"
# sample_type = "donut_"
# sample_type = "donut_diff_"
# sample_type = "face_"
# sample_type = "face_diff_"
rnd.seed(42)
x, y = get_demo_samples(sample_type)
n = x.shape[0]
outliers_fraction = 0.1
xx = yy = x_grid = Z = scores = None
if args.plot:
plot_sample(x, y, pdfpath="temp/ad_%ssamples.pdf" % (sample_type))
# to plot probability contours
xx, yy = np.meshgrid(np.linspace(np.min(x[:, 0]), np.max(x[:, 0]), 50),
np.linspace(np.min(x[:, 1]), np.max(x[:, 1]), 50))
x_grid = np.c_[xx.ravel(), yy.ravel()]
if ad_type == "ocsvm":
ad = OneClassSVM(nu=outliers_fraction, kernel="rbf", gamma=0.1)
ad.fit(x)
scores = -ad.decision_function(x).reshape((n,))
Z = -ad.decision_function(x_grid)
elif ad_type == "ifor":
ad = IsolationForest(max_samples=256, contamination=outliers_fraction, random_state=None)
ad.fit(x)
scores = -ad.decision_function(x)
Z = -ad.decision_function(x_grid)
elif ad_type == "lof":
ad = LocalOutlierFactor(n_neighbors=35, contamination=outliers_fraction)
ad.fit(x)
scores = -ad._decision_function(x)
Z = -ad._decision_function(x_grid)
elif ad_type == "loda":
ad = Loda(mink=100, maxk=200)
ad.fit(x)
scores = -ad.decision_function(x)
Z = -ad.decision_function(x_grid)
logger.debug("scores:\n%s" % str(list(scores)))
top_anoms = np.argsort(-scores)[np.arange(10)]
if args.plot:
# plot_samples_and_lines(x, lines=None, line_colors=None, line_legends=None,
# top_anoms=top_anoms, pdfpath="temp/%s_%soutlier.pdf" % (ad_type, sample_type))
Z = Z.reshape(xx.shape)
pdfpath = "temp/ad_%scontours_%s.pdf" % (sample_type, ad_type)
dp = DataPlotter(pdfpath=pdfpath, rows=1, cols=1)
pl = dp.get_next_plot()
pl.contourf(xx, yy, Z, 20, cmap=plt.cm.get_cmap('jet'))
dp.plot_points(x, pl, labels=y, lbl_color_map={0: "grey", 1: "red"}, s=25)
pl.scatter(x[top_anoms, 0], x[top_anoms, 1], marker='o', s=35,
edgecolors='red', facecolors='none')
dp.close()
| 1,814 |
4,216 | <gh_stars>1000+
/**
* @file core/tree/binary_space_tree/rp_tree_max_split_impl.hpp
* @author <NAME>
*
* Implementation of class (RPTreeMaxSplit) to split a binary space partition
* tree.
*
* mlpack is free software; you may redistribute it and/or modify it under the
* terms of the 3-clause BSD license. You should have received a copy of the
* 3-clause BSD license along with mlpack. If not, see
* http://www.opensource.org/licenses/BSD-3-Clause for more information.
*/
#ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_RP_TREE_MAX_SPLIT_IMPL_HPP
#define MLPACK_CORE_TREE_BINARY_SPACE_TREE_RP_TREE_MAX_SPLIT_IMPL_HPP
#include "rp_tree_max_split.hpp"
#include "rp_tree_mean_split.hpp"
namespace mlpack {
namespace tree {
template<typename BoundType, typename MatType>
bool RPTreeMaxSplit<BoundType, MatType>::SplitNode(const BoundType& /* bound */,
MatType& data,
const size_t begin,
const size_t count,
SplitInfo& splitInfo)
{
splitInfo.direction.zeros(data.n_rows);
// Get the normal to the hyperplane.
math::RandVector(splitInfo.direction);
// Get the value according to which we will perform the split.
return GetSplitVal(data, begin, count, splitInfo.direction,
splitInfo.splitVal);
}
template<typename BoundType, typename MatType>
bool RPTreeMaxSplit<BoundType, MatType>::GetSplitVal(
const MatType& data,
const size_t begin,
const size_t count,
const arma::Col<ElemType>& direction,
ElemType& splitVal)
{
const size_t maxNumSamples = 100;
const size_t numSamples = std::min(maxNumSamples, count);
arma::uvec samples;
// Get no more than numSamples distinct samples.
math::ObtainDistinctSamples(begin, begin + count, numSamples, samples);
arma::Col<ElemType> values(samples.n_elem);
// Find the median of scalar products of the samples and the normal vector.
for (size_t k = 0; k < samples.n_elem; ++k)
values[k] = arma::dot(data.col(samples[k]), direction);
const ElemType maximum = arma::max(values);
const ElemType minimum = arma::min(values);
if (minimum == maximum)
return false;
splitVal = arma::median(values);
// Add a random deviation to the median.
// This algorithm differs from the method suggested in the random projection
// tree paper, for two reasons:
// 1. Evaluating the method proposed in the paper is time-consuming, since
// we must solve the furthest-pair problem.
// 2. The proposed method does not appear to guarantee that a valid split
// value will be generated (i.e. it can produce a split value where there
// may be no points on the left or the right).
splitVal += math::Random((minimum - splitVal) * 0.75,
(maximum - splitVal) * 0.75);
if (splitVal == maximum)
splitVal = minimum;
return true;
}
} // namespace tree
} // namespace mlpack
#endif // MLPACK_CORE_TREE_BINARY_SPACE_TREE_RP_TREE_MAX_SPLIT_IMPL_HPP
| 1,207 |
342 | <reponame>blackpixel/overshare-kit<gh_stars>100-1000
//
// OSKShareableContentItem.h
// Overshare
//
//
// Copyright (c) 2013 Overshare Kit. All rights reserved.
//
@import UIKit;
extern NSString * const OSKShareableContentItemType_MicroblogPost;
extern NSString * const OSKShareableContentItemType_Facebook;
extern NSString * const OSKShareableContentItemType_BlogPost;
extern NSString * const OSKShareableContentItemType_Email;
extern NSString * const OSKShareableContentItemType_SMS;
extern NSString * const OSKShareableContentItemType_PhotoSharing;
extern NSString * const OSKShareableContentItemType_CopyToPasteboard;
extern NSString * const OSKShareableContentItemType_ReadLater;
extern NSString * const OSKShareableContentItemType_LinkBookmark;
extern NSString * const OSKShareableContentItemType_WebBrowser;
extern NSString * const OSKShareableContentItemType_PasswordManagementAppSearch;
extern NSString * const OSKShareableContentItemType_ToDoListEntry;
extern NSString * const OSKShareableContentItemType_AirDrop;
extern NSString * const OSKShareableContentItemType_TextEditing;
///---------------------------
/// @name Abstract Base Class
///---------------------------
/**
An abstract base class for the many kinds of shareable content items.
@discussion Never instantiate `OSKShareableContentItem` directly. You must use it via
a subclass (either built-in or one of your own).
@see OSKShareableContent
*/
@interface OSKShareableContentItem : NSObject
/**
An alternate name to be used in place of the default name of any `<OSKActivity>` that
is handling the content item. The default is `nil`.
@discussion Useful for when you need multiple instances of the same content item, e.g.
in Riposte, we have "Email Post" and "Email Conversation" in the
conversation share sheet. If you don't set an alternate activity name, then the
`<OSKActivity>`'s default name and icon will be used instead.
If all you need to do is localize an activity name, it is better to do that
via the `customizationsDelegate` of `OSKPresentationManager`.
*/
@property (copy, nonatomic) NSString *alternateActivityName;
/**
An alternate icon to be displayed in place of the default icon of any `<OSKActivity>` that
is handling the content item. The default is `nil`.
*/
@property (strong, nonatomic) UIImage *alternateActivityIcon;
/**
Returns either one of the officially supported item types listed above,
or a custom item type.
@warning Required. Subclasses must override without calling super.
*/
- (NSString *)itemType;
/**
Additional activity-specific or contextual info.
@discussion Third-party apps & services vary widely in the extra features they
offer. Facebook is *in general* a microblogging activity, like ADN and Twitter,
but in practice it has a few advanced needs. Rather than add dozens of properties
that are each only used by a single activity type, it makes more sense use an
NSMutableDictionary to store activity-specific or app-specific contextual info.
To avoid conflicts, keys in this dictionary should be namespaced as follows:
com.<application>.<activityName>.<key>
For example, the key to an NSDictionary of Facebook post attributes would use
a protected namespace as follows:
com.oversharekit.facebook.userInfo
Let's say there's an app called Foo.app that has integrated OvershareKit.
It has also written a bespoke OSKActivity subclass, "FOOSelfieActivity." This
activity is a microblogging activity, but it needs additional data to submit a post.
It could add an NSDictionary of custom attributes to the userInfo dictionary
with the following key:
com.fooapp.selfie.userInfo
This would allow Foo.app to add the Selfie activity without having to make awkward
modifications to their OSK integration.
As OvershareKit matures, we may occasionally promote frequently-used data types
stored in userInfo dictionaries to class-level @properties.
*/
@property (copy, nonatomic, readonly) NSMutableDictionary *userInfo;
@end
///---------------------------------------------------
/// @name Microblog Posts (Twitter, App.net)
///---------------------------------------------------
/**
Content for sharing to microblogging services like Twitter or App.net.
*/
@interface OSKMicroblogPostContentItem : OSKShareableContentItem
/**
The plain-text content of the outgoing post. Must not be nil.
*/
@property (copy, nonatomic) NSString *text;
/**
An optional array of `<UIImage>` objects to be attached to the outgoing post.
@discussion Not all activities support multiple images. Those that do not will simply
ignore all but the first image in the array when creating a new post.
*/
@property (strong, nonatomic) NSArray *images;
/**
The latitude component of the user's geolocation.
*/
@property (nonatomic, assign) double latitude;
/**
The longitude component of the user's geolocation.
*/
@property (nonatomic, assign) double longitude;
@end
///---------------------------------------------------
/// @name Facebook
///---------------------------------------------------
/**
Text content. The user should be provided an opportunity to edit this text prior to
publishing, per Facebook's API terms.
*/
@interface OSKFacebookContentItem : OSKShareableContentItem
/**
The plain-text content of the outgoing post. Must not be nil.
*/
@property (copy, nonatomic) NSString *text;
/**
Facebook link posts require a URL separate from the post text.
*/
@property (copy, nonatomic) NSURL *link;
/**
An optional array of `<UIImage>` objects to be attached to the outgoing post.
*/
@property (strong, nonatomic) NSArray *images;
/**
The latitude component of the user's geolocation.
*/
@property (nonatomic, assign) double latitude;
/**
The longitude component of the user's geolocation.
*/
@property (nonatomic, assign) double longitude;
@end
///-----------------------------------------
/// @name Blog Posts (Tumblr)
///-----------------------------------------
/**
Content for sharing to blogging services like Tumblr or WordPress.
@warning As of October 31, 2013, no activities in Overshare Kit are using this item.
*/
@interface OSKBlogPostContentItem : OSKShareableContentItem
/**
The plain-text content of the blog post. Must not be nil.
*/
@property (copy, nonatomic) NSString *text;
/**
An optional array of `<UIImage>` objects to be attached to the outgoing post.
*/
@property (strong, nonatomic) NSArray *images;
/**
An optional array of `NSString` tags for tagging the outgoing post.
*/
@property (strong, nonatomic) NSArray *tags;
/**
An optional flag for creating the post in the drafts queue instead of immediately publishing the post.
Defaults to NO (publishes immediately).
*/
@property (assign, nonatomic) BOOL publishAsDraft;
@end
///-----------------------------------------
/// @name Email
///-----------------------------------------
/**
Content for creating a new email message.
*/
@interface OSKEmailContentItem : OSKShareableContentItem
/**
An array of email addresses for the "to:" field. Optional.
*/
@property (copy, nonatomic) NSArray *toRecipients;
/**
An array of email addresses for the "cc:" field. Optional.
*/
@property (copy, nonatomic) NSArray *ccRecipients;
/**
An array of email addresses for the "bcc:" field. Optional.
*/
@property (copy, nonatomic) NSArray *bccRecipients;
/**
A plain-text subject for the email. Optional.
*/
@property (copy, nonatomic) NSString *subject;
/**
The body text for the outgoing email. May be plain text or HTML markup.
If HTML, the `isHTML` property must set to `YES`.
*/
@property (copy, nonatomic) NSString *body;
/**
Flags whether or not the `body` contents are HTML markup.
*/
@property (assign, nonatomic) BOOL isHTML;
/**
An array of `UIImage` or OSKMimeAttachment objects to attach to the outgoing message.
*/
@property (copy, nonatomic) NSArray *attachments;
@end
///-----------------------------------------
/// @name SMS & iMessage
///-----------------------------------------
/**
Content for sharing via iMessage or SMS.
*/
@interface OSKSMSContentItem : OSKShareableContentItem
/**
An array of recipient phone numbers or email addresses. Optional.
*/
@property (copy, nonatomic) NSArray *recipients;
/**
The plain-text content of the outgoing message.
*/
@property (copy, nonatomic) NSString *body;
/**
An array of `UIImage` objects to attach to the outgoing message.
*/
@property (copy, nonatomic) NSArray *attachments;
@end
///-----------------------------------------
/// @name Photo Sharing (Instagram, etc.)
///-----------------------------------------
/**
Content for sharing to photo services like Instagram or Flickr.
*/
@interface OSKPhotoSharingContentItem : OSKShareableContentItem
/**
An array of one or more `UIImage` objects.
*/
@property (copy, nonatomic) NSArray *images;
/**
A plain-text caption to be applied to all the images.
*/
@property (copy, nonatomic) NSString *caption;
/**
The latitude component of the user's location.
*/
@property (nonatomic, assign) double latitude;
/**
The longitude component of the user's location.
*/
@property (nonatomic, assign) double longitude;
@end
///-----------------------------------------
/// @name Copy-to-Pasteboard
///-----------------------------------------
/**
Content for saving to the system pasteboard.
*/
@interface OSKCopyToPasteboardContentItem : OSKShareableContentItem
/**
Plain text content for copying & pasting. Setting this property will set all
other properties to nil.
*/
@property (copy, nonatomic) NSString *text;
/**
Image content for copying & pasting. Setting this property will set all
other properties to nil.
*/
@property (copy, nonatomic) NSArray *images;
@end
///---------------------------------------------
/// @name Read Later (Instapaper, Pocket, etc.)
///---------------------------------------------
/**
Content for sending to read-later services like Instapaper or Pocket.
*/
@interface OSKReadLaterContentItem : OSKShareableContentItem
/**
The url to be saved. Must be set to a non-nil value before sharing.
*/
@property (copy, nonatomic) NSURL *url;
/**
An optional title. Not all activities use this.
*/
@property (copy, nonatomic) NSString *title;
/**
An optional description. Not all activities use this.
*/
@property (copy, nonatomic) NSString *itemDescription;
@end
///-----------------------------------------
/// @name Link Bookmarking (Pinboard)
///-----------------------------------------
/**
Content for sending to link-bookmarking services like Pinboard.
*/
@interface OSKLinkBookmarkContentItem : OSKShareableContentItem
/**
The url to be saved. Required.
*/
@property (copy, nonatomic) NSURL *url;
/**
The title of the bookmark. Optional.
If left blank, `OSKPinboardActivity` will attempt to fetch
the page title before sending the link to Pinboard.
*/
@property (copy, nonatomic) NSString *title;
/**
Optional plain-text notes describing the link.
*/
@property (copy, nonatomic) NSString *notes;
/**
Option to flag a saved item as "to-read."
Not all services may support this flag. At the very least, Pinboard does. It is
recommended to set this to YES (it is YES by default).
*/
@property (assign, nonatomic) BOOL markToRead;
/**
Optional array of tags for the saved item.
*/
@property (copy, nonatomic) NSArray *tags;
@end
///--------------------------------------------
/// @name Web Browsers (Safari.app, Chrome.app)
///--------------------------------------------
/**
Content that can be opened in another app's web browser.
*/
@interface OSKWebBrowserContentItem : OSKShareableContentItem
/**
The url to opened. Required.
*/
@property (copy, nonatomic) NSURL *url;
@end
///-----------------------------------------
/// @name 1Password Searches
///-----------------------------------------
/**
Content for performing a search in a password management app like 1Password.
*/
@interface OSKPasswordManagementAppSearchContentItem : OSKShareableContentItem
/**
The search query.
*/
@property (copy, nonatomic) NSString *query;
@end
///-----------------------------------------------
/// @name Creating To-Do Items (OmniFocus, Things)
///-----------------------------------------------
/**
Content for creating a new to-do item in a task management app like OmniFocus or Things.
*/
@interface OSKToDoListEntryContentItem : OSKShareableContentItem
/**
The title of the entry. Required.
*/
@property (copy, nonatomic) NSString *title;
/**
Optional notes for the body of the entry.
*/
@property (copy, nonatomic) NSString *notes;
@end
///-----------------------------------------
/// @name AirDrop
///-----------------------------------------
/**
Content that can be shared via AirDrop.
*/
@interface OSKAirDropContentItem : OSKShareableContentItem
/**
The items in this array should be the same items that you would pass to an
instance of `UIActivityViewController`.
*/
@property (copy, nonatomic) NSArray *items;
@end
///-------------------------------------------------------
/// @name Text-Editing (Drafts, Editorial, Evernote etc.)
///-------------------------------------------------------
/**
Content for creating a new text editing document.
*/
@interface OSKTextEditingContentItem : OSKShareableContentItem
/**
The body text. Required.
*/
@property (copy, nonatomic) NSString *text;
/**
Optional title of the entry. Some apps don't support title fields.
*/
@property (copy, nonatomic) NSString *title;
/**
Optional image attachements for the new entry. Not all apps support images. Those that
do may not support multiple images.
*/
@property (copy, nonatomic) NSArray *images;
/**
Optional tags for the new entry. Not all apps support tags. Those that
do may not support multiple tags.
*/
@property (copy, nonatomic) NSArray *tags;
@end
| 3,585 |
898 | //
// ProtonCore.h
// ProtonCore
//
// Created by <NAME> on 21/3/21.
// Copyright © 2021 <NAME>. All rights reserved.
//
#import <Foundation/Foundation.h>
//! Project version number for ProtonCore.
FOUNDATION_EXPORT double ProtonCoreVersionNumber;
//! Project version string for ProtonCore.
FOUNDATION_EXPORT const unsigned char ProtonCoreVersionString[];
#import <ProtonCore/PRTextStorage.h>
#import <ProtonCore/PREditorContentName.h>
| 147 |
387 | <gh_stars>100-1000
"""
##################################################################################################
# Copyright Info : Copyright (c) Davar Lab @ Hikvision Research Institute. All rights reserved.
# Filename : track_test.py
# Abstract : generate track result from detection result
# Current Version: 1.0.0
# Date : 2021-07-10
##################################################################################################
"""
import os
import json
import argparse
import numpy as np
import torch
import mmcv
from mmcv.parallel import collate, scatter, MMDataParallel
from mmcv.runner import load_checkpoint
from sklearn.metrics.pairwise import cosine_similarity
from mmdet.datasets.pipelines import Compose
from davarocr.davar_rcg.models.builder import build_recognizor
import test_utils
def parse_args():
"""
Returns:
args parameter of model test
"""
parser = argparse.ArgumentParser(description='DavarOCR test video text e2e')
parser.add_argument('config', help='test config file path')
parser.add_argument('--local_rank', type=int, default=0)
args_ = parser.parse_args()
if 'LOCAL_RANK' not in os.environ:
os.environ['LOCAL_RANK'] = str(args_.local_rank)
return args_
if __name__ == '__main__':
args = parse_args()
cfg = mmcv.Config.fromfile(args.config)
# Set cudnn_benchmark
if cfg.get('cudnn_benchmark', False):
torch.backends.cudnn.benchmark = True
cfg.data.test.test_mode = True
test_pipeline = Compose(cfg.test_pipeline)
model_path = cfg.ckpts[0]['ModelPath']
if not os.path.exists(model_path):
print(model_path + ' not exist.')
config_cfg = mmcv.Config.fromfile(cfg.ckpts[0]['ConfigPath'])
test_cfg = config_cfg.test_cfg
# Build recognition model
model = build_recognizor(config_cfg.model, train_cfg=None, test_cfg=test_cfg)
# Load the model pth file
checkpoint = load_checkpoint(model, model_path, map_location='cpu')
model.CLASSES = None
model = MMDataParallel(model, device_ids=[0])
device = next(model.parameters()).device
model.eval()
# Hyper parameters
# The feature similarity threshold
feat_sim_thresh = cfg.feat_sim_thresh
# The feature similarity with adjacent threshold
feat_sim_with_loc_thresh = cfg.feat_sim_with_loc_thresh
# The track instance max exist duration
max_exist_duration = cfg.max_exist_duration
# The feature channel for tracking
feat_channels = cfg.feat_channels
# Constant eps
eps = cfg.eps
# The unique identification of text sequence in a video
text_id = 0
# The predicted detection result file by detection model
ori_det_data = mmcv.load(cfg.testsets[0]["AnnFile"])
img_prefix = cfg.testsets[0]["FilePre"]
det_data = dict()
# extract video and img name as key to save track result
for key in ori_det_data.keys():
video = key.split('/')[-2]
frame_id = key.split('/')[-1]
img_key = video + '/' + frame_id
if video not in det_data.keys():
det_data[video] = dict()
det_data[video][img_key] = ori_det_data[key]
# to save track sequence for all videos
track_res_dict = dict()
# output(json) file to save track result
out_dir = cfg.out_dir
if not os.path.exists(out_dir):
os.makedirs(out_dir)
# generate track sequence by video
# step1 : construct history dict to store: text id (store folder name) / past duration / newest feat
# step2 : extract text feat from current frame
# step3 : get current matching list
# step4 : confirm each match pair valid and updating feat and duration
for video, value in det_data.items():
print('processing video: ' + str(video))
frame_nums = len(value.keys())
# History data to match current data
his_textid_list = []
his_duration_list = []
his_loc_list = []
his_feat_array = None
# To save track sequence for specific video
track_res_dict[video] = dict()
# The frame id in video should start from "1" and should be consecutive by default
for frame_id in range(1, frame_nums + 1):
print("processing frame :", frame_id)
key = video + '/' + str(frame_id) + '.jpg'
img_info = value[key]
# Read predict bboxes from one image into list
instance_infos = test_utils.instance_to_list(img_info, key)
cur_frame_pos = []
track_feature = None
# Data pipelines and model output
if len(instance_infos) > 0:
batch_data = []
for instance in instance_infos:
cur_frame_pos.append(instance['ann']['bbox'])
data = dict(img_info=instance, img_prefix=img_prefix)
data = test_pipeline(data)
batch_data.append(data)
data_collate = collate(batch_data, samples_per_gpu=len(batch_data))
device = int(str(device).rsplit(':', maxsplit=1)[-1])
data = scatter(data_collate, [device])[0]
with torch.no_grad():
result = model(return_loss=False, rescale=True, **data)
# Get model output
texts = result['text']
scores = result['scores']
scores = scores.cpu().numpy()
scores = scores.reshape(-1)
track_feature = result['track_feature']
track_feature = track_feature.cpu().numpy()
else:
texts = None
scores = None
track_feature = None
# In the begging, when no history data to match, create all bboxes as new track sequences
if len(his_textid_list) == 0:
if len(instance_infos) > 0:
first_img_flag = True
for text_img_idx, instance in enumerate(instance_infos):
# Append the text id, duration time, bbox for a new text instance
his_textid_list.append(text_id)
his_duration_list.append(0)
his_loc_list.append(instance['ann']['bbox'])
# For first instance we need to reshape the history feature array to (1, feature_channel)
if first_img_flag:
his_feat_array = np.array(track_feature[text_img_idx, :])
first_img_flag = False
# Append left features
else:
his_feat_array = np.row_stack((his_feat_array, np.array([track_feature[text_img_idx, :]])))
# Update a new track
test_utils.update_track(text_id, video, track_res_dict, frame_id, instance['ann']['bbox'],
texts[text_img_idx], scores[text_img_idx].item())
# Update text id for next track
text_id = text_id + 1
continue
ori_his_len = len(his_textid_list)
his_matched_matrix = np.zeros([ori_his_len], np.int)
# If current img has no text instance
if len(instance_infos) == 0:
# Update history data
his_textid_list, his_duration_list, his_loc_list, his_feat_array = test_utils.\
update_history(his_textid_list, his_duration_list, his_loc_list, his_feat_array, his_matched_matrix,
max_exist_duration, ori_his_len)
continue
# For the case that there is only one history feature, we should reshape to it (1, feat_channels)
if len(his_feat_array.shape) == 1 and his_feat_array.shape[0] == feat_channels:
his_feat_array = his_feat_array.reshape(1, his_feat_array.shape[0])
#
max_num = max(len(track_feature), len(his_feat_array))
# Calc feature similarty, iou, adjacent matrix to match history track, In YORO, we only use feat_sim_matrix
# and adja_maxtrix to match, You can adjust to your only task
feat_sim_matrix = cosine_similarity(track_feature, his_feat_array)
iou_matrix = np.zeros([len(cur_frame_pos), len(his_loc_list)], np.float)
adja_matrix = np.zeros([len(cur_frame_pos), len(his_loc_list)], np.int)
for cur_idx, cur_bbox in enumerate(cur_frame_pos):
cur_poly = test_utils.polygon_from_points(cur_bbox)
for his_idx, his_bbox in enumerate(his_loc_list):
# Calculate IOU
his_poly = test_utils.polygon_from_points(his_bbox)
cur_iou = test_utils.get_intersection_over_union(cur_poly, his_poly)
iou_matrix[cur_idx, his_idx] = cur_iou
# Calculate the expand coordinates
expand_start_x, expand_end_x, expand_start_y, expand_end_y = test_utils.calculate_expand(his_bbox)
for i in range(4):
if expand_start_x <= cur_frame_pos[cur_idx][2 * i] <= expand_end_x and \
expand_start_y <= cur_frame_pos[cur_idx][2 * i + 1] <= expand_end_y:
adja_matrix[cur_idx, his_idx] = 1
# Aggregate feature similarity matrix and adjacent matrix
match_matrix = feat_sim_matrix + (0.1 * adja_matrix + eps)
# Reshape the match matrix to square matrix
square_cost_matrix = np.zeros([max_num, max_num], np.float)
for i in range(feat_sim_matrix.shape[0]):
for j in range(feat_sim_matrix.shape[1]):
square_cost_matrix[i, j] = match_matrix[i, j]
# Record the padding row and col index
useless_row = []
useless_col = []
for i in range(feat_sim_matrix.shape[0], max_num):
useless_row.append(i)
for j in range(feat_sim_matrix.shape[1], max_num):
useless_col.append(j)
# Get the match index result
row_ind, col_ind = test_utils.hungary(square_cost_matrix)
ori_his_len = len(his_duration_list)
his_matched_matrix = np.zeros([ori_his_len], np.int)
# Iter all match pairs, If the similarity match the condition, then allocate pairs into same track seq
for row_ind_idx, row_item in enumerate(row_ind):
cur_idx = row_item
his_idx = col_ind[row_ind_idx]
# The match idx falls in padding row indices, that means the instance are bogus instance
if cur_idx in useless_row:
continue
# Match valid
if his_idx not in useless_col:
# Calculating pairs' feature similarity, adjacent relation, iou
matched_feat_sim = feat_sim_matrix[cur_idx, his_idx]
matched_adja = adja_matrix[cur_idx, his_idx]
matched_iou = iou_matrix[cur_idx, his_idx]
# Only if the feature similarity meet the threshold or paris are adjacent and meet the loc feature
# similarity, can they be valid pairs
if (matched_feat_sim >= feat_sim_thresh) or (matched_feat_sim >= feat_sim_with_loc_thresh and
matched_adja >= 1):
matched_text_id = his_textid_list[his_idx]
# Update matched track
test_utils.update_track(matched_text_id, video, track_res_dict, frame_id,
instance_infos[cur_idx]['ann']['bbox'], texts[cur_idx],
scores[cur_idx].item())
# Update history data
his_feat_array[his_idx, :] = track_feature[cur_idx, :]
his_duration_list[his_idx] = 0
his_loc_list[his_idx] = cur_frame_pos[cur_idx]
his_matched_matrix[his_idx] = 1
# Match invalid. create new track
else:
# Append a new track in history data
his_textid_list.append(text_id)
his_duration_list.append(0)
his_loc_list.append(cur_frame_pos[cur_idx])
his_feat_array = np.row_stack((his_feat_array, np.array([track_feature[cur_idx, :]])))
# Append a new track
test_utils.update_track(text_id, video, track_res_dict, frame_id,
instance_infos[cur_idx]['ann']['bbox'], texts[cur_idx],
scores[cur_idx].item())
text_id = text_id + 1
# The match idx falls in padding col indices, instance do not match any history data, create new track
else:
# Append a new track in history data
his_textid_list.append(text_id)
his_duration_list.append(0)
his_loc_list.append(cur_frame_pos[cur_idx])
his_feat_array = np.row_stack((his_feat_array, np.array([track_feature[cur_idx, :]])))
# Append a new track
test_utils.update_track(text_id, video, track_res_dict, frame_id,
instance_infos[cur_idx]['ann']['bbox'], texts[cur_idx],
scores[cur_idx].item())
text_id = text_id + 1
# Updating history data
his_textid_list, his_duration_list, his_loc_list, his_feat_array = test_utils. \
update_history(his_textid_list, his_duration_list, his_loc_list, his_feat_array, his_matched_matrix,
max_exist_duration, ori_his_len)
# Output
out_file_name = os.path.join(out_dir, cfg.out_file)
with open(out_file_name, 'w') as write_file:
json.dump(track_res_dict, write_file, indent=4)
| 7,088 |
348 | <reponame>cygnus-x-1/constellation
/*
* Copyright 2010-2021 Australian Signals Directorate
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package au.gov.asd.tac.constellation.plugins.importexport;
import au.gov.asd.tac.constellation.graph.Attribute;
import au.gov.asd.tac.constellation.plugins.importexport.model.TableRow;
import au.gov.asd.tac.constellation.utilities.icon.UserInterfaceIconProvider;
import java.awt.Color;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javafx.application.Platform;
import javafx.beans.value.ObservableValue;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.geometry.Side;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.control.Tab;
import javafx.scene.control.TabPane;
import javafx.scene.control.TextField;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.input.MouseEvent;
import javafx.scene.layout.AnchorPane;
/**
* The ConfigurationPane is a UI element that displays a sample of the imported
* data and allows the user to assign graph attributes to columns in the data.
*
* @author sirius
*/
public class ConfigurationPane extends AnchorPane {
private static final double TAB_ANCHOR_POS = 5.0;
private static final double RUN_BUTTON_ANCHOR_POS = 10.0;
private static final int DOUBLE_CLICK_AMT = 2;
private static final Image ADD_IMAGE = UserInterfaceIconProvider.ADD.buildImage(16, Color.BLACK);
protected final ImportController importController;
protected final TabPane tabPane;
private final String helpText;
public ConfigurationPane(final ImportController importController, final String helpText) {
this.importController = importController;
this.helpText = helpText;
setMaxHeight(Double.MAX_VALUE);
setMaxWidth(Double.MAX_VALUE);
setMinSize(0, 0);
// Add the tab pane that will hold a tab for each run
tabPane = new TabPane();
tabPane.setMaxHeight(Double.MAX_VALUE);
tabPane.setSide(Side.TOP);
AnchorPane.setTopAnchor(tabPane, TAB_ANCHOR_POS);
AnchorPane.setLeftAnchor(tabPane, TAB_ANCHOR_POS);
AnchorPane.setRightAnchor(tabPane, TAB_ANCHOR_POS);
AnchorPane.setBottomAnchor(tabPane, TAB_ANCHOR_POS);
getChildren().add(tabPane);
// Create a button to allow the user to add a new tab (RunPane).
Button newRunButton = new Button("", new ImageView(ADD_IMAGE));
newRunButton.setOnAction(event -> importController.createNewRun());
AnchorPane.setTopAnchor(newRunButton, RUN_BUTTON_ANCHOR_POS);
AnchorPane.setRightAnchor(newRunButton, RUN_BUTTON_ANCHOR_POS);
getChildren().add(newRunButton);
// Add a single run to start with
createTab();
}
protected final Tab createTab() {
// Create a unique label for the new tab
int runNumber = 0;
boolean unique;
do {
runNumber++;
final String label = "Run " + runNumber;
unique = true;
for (final Tab tab : tabPane.getTabs()) {
final Label tabLabel = (Label) tab.getGraphic();
if (label.equals(tabLabel.getText())) {
unique = false;
break;
}
}
} while (!unique);
final Label label = new Label("Run " + runNumber);
final Tab tab = new Tab();
tab.setGraphic(label);
tab.setOnClosed(event -> importController.updateDisplayedAttributes());
label.setOnMouseClicked(event -> labelClickEvent(tab, label, event));
// Add the tab
tabPane.getTabs().add(tab);
// Bring the new tab to the front
tabPane.getSelectionModel().select(tab);
// Create the run pane - store the name of the associated configuration pane tab
final RunPane runPane = new RunPane(importController, helpText, label.getText());
tab.setContent(runPane);
return tab;
}
private void labelClickEvent(final Tab tab, final Label label, final MouseEvent event) {
if (event.getClickCount() == DOUBLE_CLICK_AMT) {
final TextField field = new TextField(label.getText());
field.setOnAction(e -> {
label.setText(field.getText());
tab.setGraphic(label);
});
field.focusedProperty().addListener((ObservableValue<? extends Boolean> observable, Boolean oldValue,
Boolean newValue) -> {
if (!newValue) {
label.setText(field.getText());
tab.setGraphic(label);
// Ensure runPane is updated to store the updated name (corresponding to the configuration pane tab
// name) which is used when generating summary details to user.
final RunPane runPane = (RunPane) tab.getContent();
runPane.setPaneName(label.getText());
}
});
tab.setGraphic(field);
field.selectAll();
field.requestFocus();
}
}
public void createNewRun(final Map<String, Attribute> vertexAttributes,
final Map<String, Attribute> transactionAttributes, final Set<Integer> keys,
final String[] columns, final List<String[]> data) {
final Tab tab = createTab();
RunPane runPane = (RunPane) tab.getContent();
runPane.requestLayout();
Platform.runLater(() -> {
RunPane runPane1 = (RunPane) tab.getContent();
runPane1.setDisplayedAttributes(vertexAttributes, transactionAttributes, keys);
runPane1.setSampleData(columns, createTableRows(data));
});
}
/**
* Set the configuration pane to display the specified column headers and
* sample data rows.
*
* @param columnLabels Column header labels.
* @param currentData Rows of sample data.
*/
public void setSampleData(final String[] columnLabels, final List<String[]> currentData) {
tabPane.getTabs().stream().map(tab -> (RunPane) tab.getContent()).forEachOrdered(runPane -> {
runPane.setSampleData(columnLabels, createTableRows(currentData));
runPane.refreshDataView();
});
}
private static ObservableList<TableRow> createTableRows(final List<String[]> data) {
final ObservableList<TableRow> rows = FXCollections.observableArrayList();
final int rowCount = Math.min(101, data.size());
for (int row = 0; row < rowCount; row++) {
rows.add(new TableRow(row, data.get(row)));
}
return rows;
}
/**
* A List<ImportDefinition> where each list element corresponds to a
* RunPane tab.
*
* @return A List<ImportDefinition> where each list element
* corresponds to a RunPane tab.
*/
public List<ImportDefinition> createDefinitions(final boolean isFilesIncludeHeadersEnabled) {
List<ImportDefinition> definitions = new ArrayList<>(tabPane.getTabs().size());
for (Tab tab : tabPane.getTabs()) {
RunPane runPane = (RunPane) tab.getContent();
definitions.add(runPane.createDefinition(isFilesIncludeHeadersEnabled ? 1 : 0));
}
return Collections.unmodifiableList(definitions);
}
public void deleteAttribute(final Attribute attribute) {
for (final Tab tab : tabPane.getTabs()) {
final RunPane runPane = (RunPane) tab.getContent();
runPane.deleteAttribute(attribute);
}
}
public void setDisplayedAttributes(final Map<String, Attribute> vertexAttributes,
final Map<String, Attribute> transactionAttributes, final Set<Integer> keys) {
for (final Tab tab : tabPane.getTabs()) {
final RunPane runPane = (RunPane) tab.getContent();
runPane.setDisplayedAttributes(vertexAttributes, transactionAttributes, keys);
runPane.setAttributePaneHeight();
}
}
/**
* Returns a combined collection of all attributes that have been allocated
* to a column in any run.
*
* @return a combined collection of all attributes that have been allocated
* to a column in any run.
*/
public Collection<Attribute> getAllocatedAttributes() {
final List<Attribute> allocatedAttributes = new ArrayList<>();
for (final Tab tab : tabPane.getTabs()) {
final RunPane runPane = (RunPane) tab.getContent();
allocatedAttributes.addAll(runPane.getAllocatedAttributes());
}
return allocatedAttributes;
}
void update(final List<ImportDefinition> definitions) {
// First create a new RunPane for each ImportDefinition...
// (This tends to involve Platform.runLater() so let them be queued.)
tabPane.getTabs().clear();
definitions.forEach(_item -> importController.createNewRun());
// ...then configure each RunPane.
// (This will queue waiting for the RunPane creations.)
Platform.runLater(() -> {
final ObservableList<Tab> tabs = tabPane.getTabs();
for (int ix = 0; ix < definitions.size(); ix++) {
final ImportDefinition id = definitions.get(ix);
final RunPane runPane = (RunPane) tabs.get(ix).getContent();
runPane.update(id);
}
});
}
public void clearFilters() {
tabPane.getTabs().stream().map(tab -> (RunPane) tab.getContent()).forEachOrdered(runPane -> runPane.clearFilters());
}
}
| 4,137 |
513 | <reponame>weslambert/strelka
import re
import os
import json
import subprocess
import tempfile
from strelka import strelka
class ScanCapa(strelka.Scanner):
"""Executes FireEye CAPA with versioned rules and provides known capabilities and MITRE ATT&CK matches.
Options:
tmp_directory: Location where tempfile writes temporary files.
Defaults to '/tmp/'.
location: Location of the CAPA rules file or directory.
Defaults to '/etc/capa/'
"""
def scan(self, data, file, options, expire_at):
tmp_directory = options.get('tmp_directory', '/tmp/')
location = options.get('location', '/etc/capa/')
# Only run if rules file exists, otherwise return no rules error
if len(os.listdir(location)) != 0:
try:
with tempfile.NamedTemporaryFile(dir=tmp_directory) as tmp_data:
tmp_data.write(data)
tmp_data.flush()
try:
(stdout, stderr) = subprocess.Popen(
['/tmp/capa-linux', tmp_data.name, '-r', location, '-j'],
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL
).communicate()
except:
self.flags.append('error_processing')
return
if stdout:
# Observed extraneous data in stdout requiring string trimming. Parse out JSON response.
# This can be fixed when CAPA is aviailable as a Python 3 library.
try:
stdout = stdout[stdout.find(b'{'):]
stdout = stdout[:stdout.rfind(b'}')]
stdout += b'}'
capa_json = json.loads(stdout)
except:
self.flags.append('error_parsing')
return
try:
# Sets are used to remove duplicative values
self.event['matches'] = set()
self.event['mitre_techniques'] = set()
self.event['mitre_ids'] = set()
for k, v in capa_json['rules'].items():
self.event['matches'].add(k)
if 'att&ck' in v['meta']:
result = re.search(r'^([^:]+)::([^\[)]+)\s\[([^\]]+)\]', v['meta']['att&ck'][0])
self.event['mitre_techniques'].add(result.group(2))
self.event['mitre_ids'].add(result.group(3))
# For consistency, convert sets to list
self.event['matches'] = list(self.event['matches'])
self.event['mitre_techniques'] = list(self.event['mitre_techniques'])
self.event['mitre_ids'] = list(self.event['mitre_ids'])
except:
self.flags.append('error_collection')
except:
self.flags.append('error_execution')
else:
self.flags.append('error_norules')
| 1,912 |
355 | # coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: skip-file
# pytype: skip-file
"""Various sampling methods."""
import torch
import numpy as np
from scipy import integrate
from models import utils as mutils
def get_div_fn(fn):
"""Create the divergence function of `fn` using the Hutchinson-Skilling trace estimator."""
def div_fn(x, t, eps):
with torch.enable_grad():
x.requires_grad_(True)
fn_eps = torch.sum(fn(x, t) * eps)
grad_fn_eps = torch.autograd.grad(fn_eps, x)[0]
x.requires_grad_(False)
return torch.sum(grad_fn_eps * eps, dim=tuple(range(1, len(x.shape))))
return div_fn
def get_likelihood_fn(sde, inverse_scaler, hutchinson_type='Rademacher',
rtol=1e-5, atol=1e-5, method='RK45', eps=1e-5):
"""Create a function to compute the unbiased log-likelihood estimate of a given data point.
Args:
sde: A `sde_lib.SDE` object that represents the forward SDE.
inverse_scaler: The inverse data normalizer.
hutchinson_type: "Rademacher" or "Gaussian". The type of noise for Hutchinson-Skilling trace estimator.
rtol: A `float` number. The relative tolerance level of the black-box ODE solver.
atol: A `float` number. The absolute tolerance level of the black-box ODE solver.
method: A `str`. The algorithm for the black-box ODE solver.
See documentation for `scipy.integrate.solve_ivp`.
eps: A `float` number. The probability flow ODE is integrated to `eps` for numerical stability.
Returns:
A function that a batch of data points and returns the log-likelihoods in bits/dim,
the latent code, and the number of function evaluations cost by computation.
"""
def drift_fn(model, x, t):
"""The drift function of the reverse-time SDE."""
score_fn = mutils.get_score_fn(sde, model, train=False, continuous=True)
# Probability flow ODE is a special case of Reverse SDE
rsde = sde.reverse(score_fn, probability_flow=True)
return rsde.sde(x, t)[0]
def div_fn(model, x, t, noise):
return get_div_fn(lambda xx, tt: drift_fn(model, xx, tt))(x, t, noise)
def likelihood_fn(model, data):
"""Compute an unbiased estimate to the log-likelihood in bits/dim.
Args:
model: A score model.
data: A PyTorch tensor.
Returns:
bpd: A PyTorch tensor of shape [batch size]. The log-likelihoods on `data` in bits/dim.
z: A PyTorch tensor of the same shape as `data`. The latent representation of `data` under the
probability flow ODE.
nfe: An integer. The number of function evaluations used for running the black-box ODE solver.
"""
with torch.no_grad():
shape = data.shape
if hutchinson_type == 'Gaussian':
epsilon = torch.randn_like(data)
elif hutchinson_type == 'Rademacher':
epsilon = torch.randint_like(data, low=0, high=2).float() * 2 - 1.
else:
raise NotImplementedError(f"Hutchinson type {hutchinson_type} unknown.")
def ode_func(t, x):
sample = mutils.from_flattened_numpy(x[:-shape[0]], shape).to(data.device).type(torch.float32)
vec_t = torch.ones(sample.shape[0], device=sample.device) * t
drift = mutils.to_flattened_numpy(drift_fn(model, sample, vec_t))
logp_grad = mutils.to_flattened_numpy(div_fn(model, sample, vec_t, epsilon))
return np.concatenate([drift, logp_grad], axis=0)
init = np.concatenate([mutils.to_flattened_numpy(data), np.zeros((shape[0],))], axis=0)
solution = integrate.solve_ivp(ode_func, (eps, sde.T), init, rtol=rtol, atol=atol, method=method)
nfe = solution.nfev
zp = solution.y[:, -1]
z = mutils.from_flattened_numpy(zp[:-shape[0]], shape).to(data.device).type(torch.float32)
delta_logp = mutils.from_flattened_numpy(zp[-shape[0]:], (shape[0],)).to(data.device).type(torch.float32)
prior_logp = sde.prior_logp(z)
bpd = -(prior_logp + delta_logp) / np.log(2)
N = np.prod(shape[1:])
bpd = bpd / N
# A hack to convert log-likelihoods to bits/dim
offset = 7. - inverse_scaler(-1.)
bpd = bpd + offset
return bpd, z, nfe
return likelihood_fn
| 1,818 |
1,601 | #include "test3.hh"
int main(int argc, const char** /*argv*/) {
int* x = 0;
if (foo(argc > 3)) {
bar(x);
}
return 0;
}
| 71 |
342 | package org.anddev.andengine.entity.particle.modifier;
import org.anddev.andengine.entity.particle.Particle;
/**
* (c) 2010 <NAME>
* (c) 2011 Zynga Inc.
*
* @author <NAME>
* @since 21:21:10 - 14.03.2010
*/
public class AlphaModifier extends BaseSingleValueSpanModifier {
// ===========================================================
// Constants
// ===========================================================
// ===========================================================
// Fields
// ===========================================================
// ===========================================================
// Constructors
// ===========================================================
public AlphaModifier(final float pFromAlpha, final float pToAlpha, final float pFromTime, final float pToTime) {
super(pFromAlpha, pToAlpha, pFromTime, pToTime);
}
// ===========================================================
// Getter & Setter
// ===========================================================
// ===========================================================
// Methods for/from SuperClass/Interfaces
// ===========================================================
@Override
protected void onSetInitialValue(final Particle pParticle, final float pAlpha) {
pParticle.setAlpha(pAlpha);
}
@Override
protected void onSetValue(final Particle pParticle, final float pAlpha) {
pParticle.setAlpha(pAlpha);
}
// ===========================================================
// Methods
// ===========================================================
// ===========================================================
// Inner and Anonymous Classes
// ===========================================================
}
| 387 |
331 | <filename>Hard/Challenge 0001 Hard/solutions/solution.c
/*
* @Author: <NAME>
* @Email: <EMAIL>
* @Created Date: Sat Mar 19, 2016 05:18:39 PM
* @Last Modified time: Sat Mar 19, 2016 05:50:29 PM
*
* @Description: Higher-lower game with computer trying to guess the number.
* Will use a binary search and handle if the user cheats by changing the number.
*
*/
#include <stdio.h>
int main() {
printf("Number Guessing Game!\n");
printf("I'm going to try to guess a number that you have thought of.\n");
printf("To keep things from getting too hairy, keep the number you think of <= 1000 ok?\n");
unsigned long high = 1000;
unsigned long currGuess = 500;
unsigned long low = 0;
//Will hold 1,2, or 3 to represent higher, lower, found.
unsigned short where;
unsigned short numGuessMade = 0;
do {
numGuessMade++;
printf("My guess number %hu is %lu\n", numGuessMade, currGuess);
printf("Was I right? 1) higher, 2) lower, 3) yes!\n");
scanf("%hu", &where);
if (where == 1) {
low = currGuess;
currGuess = (low + high) / 2;
if (currGuess == low) {
printf("What you told me is impossible... You cheated!\n");
return 0;
}
}
else if (where == 2) {
high = currGuess;
currGuess = (low + high) / 2;
if (currGuess == high) {
printf("What you told me is impossible... You cheated!\n");
return 0;
}
}
} while(where != 3);
printf("I guessed your number: %lu in %hu tries!\n", currGuess, numGuessMade);
return 0;
}
| 763 |
446 | #!/usr/bin/env python3
"""Extract variables of one model part into a single checkpoint file.
Can be used to load the model part in a different setup."""
import argparse
import os
import tensorflow as tf
from neuralmonkey.logging import log as _log
def log(message: str, color: str = "blue") -> None:
_log(message, color)
def main() -> None:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("orig_checkpoint", metavar="EXPERIMENT-CHECKPOINT",
help="path to the original checkpoint")
parser.add_argument("model_part_name", metavar="MODEL-PART",
help="name of the extracted model part")
parser.add_argument("output_path", metavar="OUTPUT-CHECKPOINT",
help="output checkopint file")
args = parser.parse_args()
if not os.path.exists("{}.index".format(args.orig_checkpoint)):
log("Checkpoint '{}' does not exist.".format(
args.orig_checkpoint), color="red")
exit(1)
log("Getting list of variables.")
var_list = [
name for name, shape in
tf.contrib.framework.list_variables(args.orig_checkpoint)
if name.startswith("{}/".format(args.model_part_name))]
if not var_list:
log("No variables for model part '{}' in checkpoint '{}'.".format(
args.model_part_name, args.orig_checkpoint), color="red")
exit(1)
log("Reading variables from the checkpoint: {}".format(
", ".join(var_list)))
var_values, var_dtypes = {}, {}
reader = tf.contrib.framework.load_checkpoint(args.orig_checkpoint)
for name in var_list:
tensor = reader.get_tensor(name)
var_dtypes[name] = tensor.dtype
var_values[name] = tensor
tf_vars = [
tf.get_variable(v, shape=var_values[v].shape, dtype=var_dtypes[v])
for v in var_values]
placeholders = [tf.placeholder(v.dtype, shape=v.shape) for v in tf_vars]
assign_ops = [tf.assign(v, p) for (v, p) in zip(tf_vars, placeholders)]
saver = tf.train.Saver()
# Build a model only with variables, set them to the average values.
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
for p, assign_op, (name, value) in zip(placeholders, assign_ops,
var_values.items()):
sess.run(assign_op, {p: value})
saver.save(sess, os.path.abspath(args.output_path))
log("Extracted model part saved to {}".format(args.output_path))
if __name__ == "__main__":
main()
| 1,092 |
3,428 | <filename>lib/node_modules/@stdlib/datasets/spam-assassin/data/easy-ham-1/02342.fb90b37c6a682e01e61c9da213e727b7.json<gh_stars>1000+
{"id":"02342","group":"easy-ham-1","checksum":{"type":"MD5","value":"fb90b37c6a682e01e61c9da213e727b7"},"text":"From <EMAIL> Mon Oct 7 12:05:41 2002\nReturn-Path: <<EMAIL>>\nDelivered-To: yyyy<EMAIL>assin.taint.org\nReceived: from localhost (jalapeno [127.0.0.1])\n\tby jmason.org (Postfix) with ESMTP id A7AD216F87\n\tfor <jm@localhost>; Mon, 7 Oct 2002 12:04:19 +0100 (IST)\nReceived: from jalapeno [127.0.0.1]\n\tby localhost with IMAP (fetchmail-5.9.0)\n\tfor jm@localhost (single-drop); Mon, 07 Oct 2002 12:04:19 +0100 (IST)\nReceived: from dogma.slashnull.org (localhost [127.0.0.1]) by\n dogma.slashnull.org (8.11.6/8.11.6) with ESMTP id g9780pK23304 for\n <<EMAIL>>; Mon, 7 Oct 2002 09:00:51 +0100\nMessage-Id: <<EMAIL>>\nTo: yyyy<EMAIL>int.org\nFrom: guardian <<EMAIL>>\nSubject: Al-Qaida suspected in tanker blast\nDate: Mon, 07 Oct 2002 08:00:51 -0000\nContent-Type: text/plain; encoding=utf-8\n\nURL: http://www.newsisfree.com/click/-1,8622117,215/\nDate: 2002-10-07T03:52:59+01:00\n\nFrench ship burns off Yemen.\n\n\n"} | 517 |
2,706 | <filename>retro/cores/gba/src/platform/python/_builder.py
import cffi
import os, os.path
import shlex
import subprocess
import sys
ffi = cffi.FFI()
pydir = os.path.dirname(os.path.abspath(__file__))
srcdir = os.path.join(pydir, "..", "..")
incdir = os.path.join(pydir, "..", "..", "..", "include")
bindir = os.environ.get("BINDIR", os.path.join(os.getcwd(), ".."))
cpp = shlex.split(os.environ.get("CPP", "cc -E"))
cppflags = shlex.split(os.environ.get("CPPFLAGS", ""))
if __name__ == "__main__":
cppflags.extend(sys.argv[1:])
cppflags.extend(["-I" + incdir, "-I" + srcdir, "-I" + bindir])
ffi.set_source("mgba._pylib", """
#define static
#define inline
#include "flags.h"
#define OPAQUE_THREADING
#include <mgba/core/cache-set.h>
#include <mgba-util/common.h>
#include <mgba/core/core.h>
#include <mgba/core/map-cache.h>
#include <mgba/core/log.h>
#include <mgba/core/mem-search.h>
#include <mgba/core/thread.h>
#include <mgba/core/version.h>
#include <mgba/debugger/debugger.h>
#include <mgba/gba/interface.h>
#include <mgba/internal/arm/arm.h>
#include <mgba/internal/debugger/cli-debugger.h>
#include <mgba/internal/gba/gba.h>
#include <mgba/internal/gba/input.h>
#include <mgba/internal/gba/renderers/cache-set.h>
#include <mgba/internal/lr35902/lr35902.h>
#include <mgba/internal/gb/gb.h>
#include <mgba/internal/gb/renderers/cache-set.h>
#include <mgba-util/png-io.h>
#include <mgba-util/vfs.h>
#define PYEXPORT
#include "platform/python/core.h"
#include "platform/python/log.h"
#include "platform/python/sio.h"
#include "platform/python/vfs-py.h"
#undef PYEXPORT
""", include_dirs=[incdir, srcdir],
extra_compile_args=cppflags,
libraries=["mgba"],
library_dirs=[bindir],
sources=[os.path.join(pydir, path) for path in ["vfs-py.c", "core.c", "log.c", "sio.c"]])
preprocessed = subprocess.check_output(cpp + ["-fno-inline", "-P"] + cppflags + [os.path.join(pydir, "_builder.h")], universal_newlines=True)
lines = []
for line in preprocessed.splitlines():
line = line.strip()
if line.startswith('#'):
continue
lines.append(line)
ffi.cdef('\n'.join(lines))
preprocessed = subprocess.check_output(cpp + ["-fno-inline", "-P"] + cppflags + [os.path.join(pydir, "lib.h")], universal_newlines=True)
lines = []
for line in preprocessed.splitlines():
line = line.strip()
if line.startswith('#'):
continue
lines.append(line)
ffi.embedding_api('\n'.join(lines))
ffi.embedding_init_code("""
import os, os.path
venv = os.getenv('VIRTUAL_ENV')
if venv:
activate = os.path.join(venv, 'bin', 'activate_this.py')
exec(compile(open(activate, "rb").read(), activate, 'exec'), dict(__file__=activate))
from mgba._pylib import ffi, lib
symbols = {}
globalSyms = {
'symbols': symbols
}
pendingCode = []
@ffi.def_extern()
def mPythonSetDebugger(debugger):
from mgba.debugger import NativeDebugger, CLIDebugger
oldDebugger = globalSyms.get('debugger')
if oldDebugger and oldDebugger._native == debugger:
return
if oldDebugger and not debugger:
del globalSyms['debugger']
return
if debugger.type == lib.DEBUGGER_CLI:
debugger = CLIDebugger(debugger)
else:
debugger = NativeDebugger(debugger)
globalSyms['debugger'] = debugger
@ffi.def_extern()
def mPythonLoadScript(name, vf):
from mgba.vfs import VFile
vf = VFile(vf)
name = ffi.string(name)
source = vf.readAll().decode('utf-8')
try:
code = compile(source, name, 'exec')
pendingCode.append(code)
except:
return False
return True
@ffi.def_extern()
def mPythonRunPending():
global pendingCode
for code in pendingCode:
exec(code, globalSyms, {})
pendingCode = []
@ffi.def_extern()
def mPythonDebuggerEntered(reason, info):
debugger = globalSyms['debugger']
if not debugger:
return
if info == ffi.NULL:
info = None
for cb in debugger._cbs:
cb(reason, info)
@ffi.def_extern()
def mPythonLookupSymbol(name, outptr):
name = ffi.string(name).decode('utf-8')
if name not in symbols:
return False
sym = symbols[name]
val = None
try:
val = int(sym)
except:
try:
val = sym()
except:
pass
if val is None:
return False
try:
outptr[0] = ffi.cast('int32_t', val)
return True
except:
return False
""")
if __name__ == "__main__":
ffi.emit_c_code("lib.c")
| 2,218 |
809 | /**
* @file
*
* @date Nov 16, 2018
* @author <NAME>
*/
#include <util/log.h>
#include <errno.h>
#include <stddef.h>
#include <stdint.h>
#include <hal/reg.h>
#include <drivers/common/memory.h>
#include <drivers/clk/ccm_imx6.h>
#include <drivers/i2c/i2c.h>
#include <drivers/iomuxc.h>
#include <embox/unit.h>
#include "imx_i2c.h"
EMBOX_UNIT_INIT(imx_i2c3_init);
#define I2C3_PIN_SEL OPTION_GET(NUMBER,i2c_pins_select)
#define IMX_I2C_BASE OPTION_GET(NUMBER,base_addr)
#define IMX_I2C_IRQ_NUM OPTION_GET(NUMBER,irq_num)
static struct imx_i2c imx_i2c3_priv = {
.irq_num = IMX_I2C_IRQ_NUM,
.base_addr = IMX_I2C_BASE,
};
static struct i2c_adapter imx_i2c3_adap = {
.i2c_algo_data = &imx_i2c3_priv,
.i2c_algo = &imx_i2c_algo,
};
static inline void imx_i2c3_pins_init(void) {
#if I2C3_PIN_SEL == 1
iomuxc_write(IOMUXC_SW_MUX_CTL_PAD_GPIO03, 0x12);
iomuxc_write(IOMUXC_I2C3_SCL_IN_SELECT_INPUT, 0x11);
iomuxc_write(IOMUXC_SW_MUX_CTL_PAD_GPIO06, 0x12);
iomuxc_write(IOMUXC_I2C3_SDA_IN_SELECT_INPUT, 0x11);
#elif I2C3_PIN_SEL == 2
iomuxc_write(IOMUXC_SW_MUX_CTL_PAD_GPIO05, 0x16);
iomuxc_write(IOMUXC_I2C3_SCL_IN_SELECT_INPUT, 0x12);
iomuxc_write(IOMUXC_SW_MUX_CTL_PAD_GPIO16, 0x16);
iomuxc_write(IOMUXC_I2C3_SDA_IN_SELECT_INPUT, 0x12);
#else
iomuxc_write(IOMUXC_SW_MUX_CTL_PAD_EIM_DATA17, 0x16);
iomuxc_write(IOMUXC_I2C3_SCL_IN_SELECT_INPUT, 0x10);
iomuxc_write(IOMUXC_SW_MUX_CTL_PAD_EIM_DATA18, 0x16);
iomuxc_write(IOMUXC_I2C3_SDA_IN_SELECT_INPUT, 0x10);
#endif
}
static int imx_i2c3_init(void) {
imx_i2c3_pins_init();
clk_enable("i2c3");
return i2c_bus_register(&imx_i2c3_adap, 3, "i2c3");
}
PERIPH_MEMORY_DEFINE(imx_i2c3, IMX_I2C_BASE, 0x100);
| 954 |
1,011 | #ifndef __TimeStepPF_h__
#define __TimeStepPF_h__
#include "SimulationDataPF.h"
#include "SPlisHSPlasH/Common.h"
#include "SPlisHSPlasH/TimeStep.h"
#include "SPlisHSPlasH/SPHKernels.h"
#include "SPlisHSPlasH/Utilities/MatrixFreeSolver.h"
// since all diagonal blocks are 3x3 diagonal matrices, a diagonal preconditioner does suffice
#define PD_USE_DIAGONAL_PRECONDITIONER
namespace SPH
{
/** \brief This class implements the Projective Fluids approach introduced
* by Weiler, Koschier and Bender [WKB16].
*
* References:
* - [WKB16] <NAME>, <NAME>, and <NAME>. Projective fluids. In Proceedings of the 9th International Conference on Motion in Games, MIG '16, 79-84. New York, NY, USA, 2016. ACM. URL: http://doi.acm.org/10.1145/2994258.2994282
*/
class TimeStepPF : public TimeStep
{
protected:
using VectorXr = Eigen::Matrix<Real, -1, 1>;
using VectorXrMap = Eigen::Map<VectorXr>;
#ifdef PD_USE_DIAGONAL_PRECONDITIONER
using Solver = Eigen::ConjugateGradient<MatrixReplacement, Eigen::Lower | Eigen::Upper, JacobiPreconditioner3D>;
FORCE_INLINE static void diagonalMatrixElement(const unsigned int row, Vector3r &result, void *userData);
void preparePreconditioner();
#else
using Solver = Eigen::ConjugateGradient<MatrixReplacement, Eigen::Lower | Eigen::Upper, Eigen::IdentityPreconditioner>;
#endif
SimulationDataPF m_simulationData;
Solver m_solver;
Real m_stiffness;
unsigned int m_counter;
unsigned int m_numActiveParticlesTotal;
void initialGuessForPositions(const unsigned int fluidModelIndex);
void solvePDConstraints();
void updatePositionsAndVelocity(const VectorXr & x);
void addAccellerationToVelocity();
void matrixFreeRHS(const VectorXr & x, VectorXr & result);
/** Perform the neighborhood search for all fluid particles.
*/
void performNeighborhoodSearch();
virtual void emittedParticles(FluidModel *model, const unsigned int startIndex) override;
virtual void initParameters() override;
public:
static int STIFFNESS;
TimeStepPF();
virtual ~TimeStepPF(void);
virtual void step() override;
virtual void reset() override;
virtual void resize() override;
static void matrixVecProd(const Real* vec, Real *result, void *userData);
};
}
#endif
| 775 |
1,475 | <gh_stars>1000+
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli.functions;
import org.apache.geode.annotations.Immutable;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.execute.FunctionContext;
import org.apache.geode.cache.execute.ResultSender;
import org.apache.geode.cache.wan.GatewaySender;
import org.apache.geode.internal.cache.execute.InternalFunction;
import org.apache.geode.internal.cache.xmlcache.CacheXml;
import org.apache.geode.management.internal.cli.CliUtils;
import org.apache.geode.management.internal.configuration.domain.XmlEntity;
import org.apache.geode.management.internal.functions.CliFunctionResult;
public class GatewaySenderDestroyFunction
implements InternalFunction<GatewaySenderDestroyFunctionArgs> {
private static final long serialVersionUID = 1L;
@Immutable
public static final GatewaySenderDestroyFunction INSTANCE = new GatewaySenderDestroyFunction();
private static final String ID =
"org.apache.geode.management.internal.cli.functions.GatewaySenderDestroyFunction";
@Override
public String getId() {
return ID;
}
@Override
@SuppressWarnings("deprecation")
public void execute(FunctionContext<GatewaySenderDestroyFunctionArgs> context) {
ResultSender<Object> resultSender = context.getResultSender();
Cache cache = context.getCache();
String memberNameOrId =
CliUtils.getMemberNameOrId(cache.getDistributedSystem().getDistributedMember());
GatewaySenderDestroyFunctionArgs gatewaySenderDestroyFunctionArgs =
context.getArguments();
String senderId = gatewaySenderDestroyFunctionArgs.getId();
boolean ifExists = gatewaySenderDestroyFunctionArgs.isIfExists();
GatewaySender gatewaySender = cache.getGatewaySender(senderId);
if (gatewaySender == null) {
String message = "Gateway sender " + senderId + " not found.";
if (ifExists) {
resultSender
.lastResult(new CliFunctionResult(memberNameOrId, true, "Skipping: " + message));
} else {
resultSender.lastResult(new CliFunctionResult(memberNameOrId, false, message));
}
return;
}
try {
gatewaySender.stop();
gatewaySender.destroy();
XmlEntity xmlEntity = new XmlEntity(CacheXml.GATEWAY_SENDER, "id", senderId);
resultSender.lastResult(new CliFunctionResult(memberNameOrId, xmlEntity,
String.format("GatewaySender \"%s\" destroyed on \"%s\"", senderId, memberNameOrId)));
} catch (Exception e) {
resultSender.lastResult(new CliFunctionResult(memberNameOrId, e, ""));
}
}
}
| 1,055 |
3,986 | <reponame>liuxiaoqiang1018/XUI
package com.xuexiang.xuidemo.fragment.components.popupwindow;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import com.xuexiang.xpage.annotation.Page;
import com.xuexiang.xui.widget.popupwindow.easypopup.EasyPopup;
import com.xuexiang.xui.widget.popupwindow.easypopup.HorizontalGravity;
import com.xuexiang.xui.widget.popupwindow.easypopup.VerticalGravity;
import com.xuexiang.xuidemo.R;
import com.xuexiang.xuidemo.base.BaseFragment;
import com.xuexiang.xuidemo.utils.XToastUtils;
import butterknife.BindView;
import butterknife.OnClick;
/**
* @author xuexiang
* @date 2017/10/30 上午11:49
*/
@Page(name = "EasyPopup\n可自定义的弹出窗")
public class EasyPopFragment extends BaseFragment {
private EasyPopup mCirclePop;
@BindView(R.id.btn_circle_comment)
Button mBtnCircleComment;
@Override
protected int getLayoutId() {
return R.layout.fragment_easypop;
}
@Override
protected void initViews() {
initCirclePop();
}
@Override
protected void initListeners() {
}
public void initCirclePop() {
mCirclePop = new EasyPopup(getContext())
.setContentView(R.layout.layout_friend_circle_comment)
// .setAnimationStyle(R.style.CirclePopAnim)
.setFocusAndOutsideEnable(true)
.createPopup();
TextView tvZan = mCirclePop.getView(R.id.tv_zan);
TextView tvComment = mCirclePop.getView(R.id.tv_comment);
tvZan.setOnClickListener(v -> {
XToastUtils.toast("点赞");
mCirclePop.dismiss();
});
tvComment.setOnClickListener(v -> {
XToastUtils.toast("评论");
mCirclePop.dismiss();
});
}
@OnClick(R.id.btn_circle_comment_left)
public void showCirclePopLeft(View view) {
mCirclePop.showAtAnchorView(mBtnCircleComment, VerticalGravity.CENTER, HorizontalGravity.LEFT, 0, 0);
}
@OnClick(R.id.btn_circle_comment_right)
public void showCirclePopRight(View view) {
mCirclePop.showAtAnchorView(mBtnCircleComment, VerticalGravity.CENTER, HorizontalGravity.RIGHT, 0, 0);
}
@OnClick(R.id.btn_circle_comment_top)
public void showCirclePopTop(View view) {
mCirclePop.showAtAnchorView(mBtnCircleComment, VerticalGravity.ABOVE, HorizontalGravity.CENTER, 0, 0);
}
@OnClick(R.id.btn_circle_comment_bottom)
public void showCirclePopBottom(View view) {
mCirclePop.showAtAnchorView(mBtnCircleComment, VerticalGravity.BELOW, HorizontalGravity.CENTER, 0, 0);
}
}
| 1,163 |
2,231 | <filename>com.dynamo.cr/com.dynamo.cr.bob/src/com/dynamo/bob/IResourceScanner.java
// Copyright 2020 The Defold Foundation
// Licensed under the Defold License version 1.0 (the "License"); you may not use
// this file except in compliance with the License.
//
// You may obtain a copy of the License, together with FAQs at
// https://www.defold.com/license
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
package com.dynamo.bob;
import java.io.IOException;
import java.io.InputStream;
import java.util.Set;
/**
* Resource scanner interface
*
*/
public interface IResourceScanner {
/**
* Retrieve an input stream from the resource at the specified path.
* The caller is responsible for closing the stream.
* @param path path of the resource
* @return InputStream of the resource
*/
InputStream openInputStream(String path) throws IOException;
/**
* Whether the resource exists on disk or in an archive or not.
* @param path
* @return true if it exists
*/
boolean exists(String path);
/**
* Whether the resource represents a file on disk or not.
* @param path
* @return true if it is a file
*/
boolean isFile(String path);
/**
* Scan after resources matching the filter
* @param filter filter to match resources
* @return {@link Set} of resources
*/
public Set<String> scan(String filter);
}
| 519 |
348 | {"nom":"Bissy-sur-Fley","circ":"5ème circonscription","dpt":"Saône-et-Loire","inscrits":89,"abs":39,"votants":50,"blancs":13,"nuls":0,"exp":37,"res":[{"nuance":"REM","nom":"<NAME>","voix":24},{"nuance":"LR","nom":"<NAME>","voix":13}]} | 98 |
60,067 | <filename>benchmarks/distributed/rpc/parameter_server/trainer/trainer.py<gh_stars>1000+
import functools
import time
from abc import ABC, abstractmethod
from metrics.MetricsLogger import MetricsLogger
import torch
class TrainerBase(ABC):
BATCH_LEVEL_METRIC = "batch_level_metric"
BATCH_ALL = "batch_all"
FORWARD_METRIC = "forward_metric"
FORWARD_PASS = "forward_pass"
BACKWARD_METRIC = "backward_metric"
BACKWARD = "backward"
def __init__(self, rank):
r"""
Inits TrainerBase class.
Args:
rank (int): worker rank
"""
self.__metrics_logger = MetricsLogger(rank)
@abstractmethod
def train(self):
r"""
A method to be implemented by child class that will train a neural network.
"""
return
def record_start(self, type, key, name, cuda=True):
r"""
A method that records the start event for a metric.
Args:
type (str): group id for metric
key (str): unique id for metric within a group
name (str): description of the metric
cuda (bool): indicator to determine if this is a CUDA metric
"""
self.__metrics_logger.record_start(
type,
key,
name,
cuda
)
def record_end(self, type, key):
r"""
A method that records the end event for a metric.
Args:
type (str): group id for metric
key (str): unique id for metric within a group
"""
self.__metrics_logger.record_end(
type,
key
)
def record_batch_start(self, key, cuda=True):
r"""
A helper method that records a batch metric for the
given key. A user should call this at the start of an
iteration step during training.
Args:
key (str): unique id for metric within a group
cuda (bool): indicator to determine if this is a CUDA metric
"""
self.__metrics_logger.record_start(
self.BATCH_LEVEL_METRIC,
key,
self.BATCH_ALL,
cuda
)
def record_batch_end(self, key):
r"""
A helper method that records a batch metric for the
given key. A user should call this at the end of an
iteration step during training.
Args:
key (str): unique id for metric within a group
"""
self.__metrics_logger.record_end(
self.BATCH_LEVEL_METRIC,
key
)
def record_forward_start(self, key, cuda=True):
r"""
A helper method that records a forward metric
for the given key. A user should call this before
their neural network forward.
Args:
key (str): unique id for metric within a group
cuda (bool): indicator to determine if this is a CUDA metric
"""
self.__metrics_logger.record_start(
self.FORWARD_METRIC,
key,
self.FORWARD_PASS,
cuda
)
def record_forward_end(self, key):
r"""
A helper method that records a forward metric
for the given key. A user should call this after their
neural network forward.
Args:
key (str): unique id for metric within a group
"""
self.__metrics_logger.record_end(
self.FORWARD_METRIC,
key
)
def record_backward_start(self, key, cuda=True):
r"""
A helper method that records a backward metric
for the given key. A user should call this before
their .backward() call.
Args:
key (str): unique id for metric within a group
cuda (bool): indicator to determine if this is a CUDA metric
"""
self.__metrics_logger.record_start(
self.BACKWARD_METRIC,
key,
self.BACKWARD,
cuda
)
def record_backward_end(self, key):
r"""
A helper method that records a backward metric
for the given key. A user should call this after
.backward().
Args:
key (str): unique id for metric within a group
"""
self.__metrics_logger.record_end(
self.BACKWARD_METRIC,
key
)
@staticmethod
def methodmetric(name, type="method_metric", cuda=True):
r"""
A decorator that records a metric for the decorated method.
Args:
name (str): description of the metric
type (str): group id for metric
cuda (bool): indicator to determine if this is a CUDA metric
"""
def decorator(function):
@functools.wraps(function)
def wrapper(self, *args):
key = time.time()
self.__metrics_logger.record_start(type, key, name, cuda)
result = function(self, *args)
self.__metrics_logger.record_end(type, key)
return result
return wrapper
return decorator
def get_metrics(self):
r"""
A method that returns metrics captured by the __metrics_logger.
"""
return self.__metrics_logger.get_processed_metrics()
def clear_metrics(self):
r"""
A method that clears __metrics_logger recorded metrics.
"""
return self.__metrics_logger.clear_metrics()
class DdpTrainer(TrainerBase):
def __init__(
self,
process_group,
use_cuda_rpc,
server_rref,
backend,
epochs,
preprocess_data,
create_criterion,
create_ddp_model,
hook_state_class,
hook,
iteration_step
):
r"""
A trainer that implements a DDP training algorithm using a simple hook that performs allreduce
using the process_group implementation.
Args:
process_group (ProcessGroup): distributed process group
use_cuda_rpc (bool): indicator for CUDA RPC
server_rref (RRef): remote reference to the server
backend (str): distributed communication backend
epochs (int): epoch count for training
preprocess_data (function): preprocesses data passed
to the trainer before starting training
create_criterion (function): creates a criterion to calculate loss
create_ddp_model (function): creates a ddp model for the trainer
hook_state_class (class): class that will be used to keep tracking of state
during training.
hook (function): ddp communication hook
iteration_step (function): will perform 1 step of training
"""
super().__init__(process_group.rank())
self.process_group = process_group
self.use_cuda_rpc = use_cuda_rpc
self.server_rref = server_rref
self.backend = backend
self.epochs = epochs
self.preprocess_data = preprocess_data
self.create_criterion = create_criterion
self.create_ddp_model = create_ddp_model
self.hook_state_class = hook_state_class
self.hook = hook
self.iteration_step = iteration_step
self.rank = process_group.rank()
self.trainer_count = process_group.size()
def epoch_key(self, epoch, index):
r"""
A method that returns an encoded key that represents the current epoch and
iteration index.
Args:
epoch (int): epoch index
index (int): iteration index
"""
return f"{epoch},{index}"
def train(self, model, data):
r"""
A method that implements the training algorithm.
Args:
model (nn.Module): neural network model
data (list): training examples
"""
model = model.cuda(self.rank)
data = self.preprocess_data(self.rank, data)
criterion = self.create_criterion(self.rank)
ddp_model, hook_state = self.create_ddp_model(
self, self.rank, model, self.process_group, self.hook_state_class, self.hook
)
optimizer = torch.optim.SGD(ddp_model.parameters(), 1e-4)
for epoch in range(self.epochs):
if epoch % 5 == 0 and self.rank == 0:
print(f"train epoch={epoch}")
for index, batch in enumerate(data):
self.iteration_step(
self, ddp_model, criterion, optimizer, hook_state, epoch, index, batch
)
torch.cuda.synchronize(self.rank)
| 3,957 |
683 | <reponame>laurit/opentelemetry-java-instrumentation
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.javaagent.instrumentation.cassandra.v3_0;
import static io.opentelemetry.javaagent.instrumentation.cassandra.v3_0.CassandraSingletons.instrumenter;
import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.CloseFuture;
import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.RegularStatement;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.ResultSetFuture;
import com.datastax.driver.core.Session;
import com.datastax.driver.core.Statement;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import io.opentelemetry.context.Context;
import io.opentelemetry.context.Scope;
import java.util.Map;
public class TracingSession implements Session {
private final Session session;
public TracingSession(Session session) {
this.session = session;
}
@Override
public String getLoggedKeyspace() {
return session.getLoggedKeyspace();
}
@Override
public Session init() {
return new TracingSession(session.init());
}
@Override
public ListenableFuture<Session> initAsync() {
return Futures.transform(session.initAsync(), TracingSession::new, Runnable::run);
}
@Override
public ResultSet execute(String query) {
CassandraRequest request = CassandraRequest.create(session, query);
Context context = instrumenter().start(Context.current(), request);
ResultSet resultSet;
try (Scope ignored = context.makeCurrent()) {
resultSet = session.execute(query);
} catch (Throwable t) {
instrumenter().end(context, request, null, t);
throw t;
}
instrumenter().end(context, request, resultSet.getExecutionInfo(), null);
return resultSet;
}
@Override
public ResultSet execute(String query, Object... values) {
CassandraRequest request = CassandraRequest.create(session, query);
Context context = instrumenter().start(Context.current(), request);
ResultSet resultSet;
try (Scope ignored = context.makeCurrent()) {
resultSet = session.execute(query, values);
} catch (Throwable t) {
instrumenter().end(context, request, null, t);
throw t;
}
instrumenter().end(context, request, resultSet.getExecutionInfo(), null);
return resultSet;
}
@Override
public ResultSet execute(String query, Map<String, Object> values) {
CassandraRequest request = CassandraRequest.create(session, query);
Context context = instrumenter().start(Context.current(), request);
ResultSet resultSet;
try (Scope ignored = context.makeCurrent()) {
resultSet = session.execute(query, values);
} catch (Throwable t) {
instrumenter().end(context, request, null, t);
throw t;
}
instrumenter().end(context, request, resultSet.getExecutionInfo(), null);
return resultSet;
}
@Override
public ResultSet execute(Statement statement) {
String query = getQuery(statement);
CassandraRequest request = CassandraRequest.create(session, query);
Context context = instrumenter().start(Context.current(), request);
ResultSet resultSet;
try (Scope ignored = context.makeCurrent()) {
resultSet = session.execute(statement);
} catch (Throwable t) {
instrumenter().end(context, request, null, t);
throw t;
}
instrumenter().end(context, request, resultSet.getExecutionInfo(), null);
return resultSet;
}
@Override
public ResultSetFuture executeAsync(String query) {
CassandraRequest request = CassandraRequest.create(session, query);
Context context = instrumenter().start(Context.current(), request);
try (Scope ignored = context.makeCurrent()) {
ResultSetFuture future = session.executeAsync(query);
addCallbackToEndSpan(future, context, request);
return future;
}
}
@Override
public ResultSetFuture executeAsync(String query, Object... values) {
CassandraRequest request = CassandraRequest.create(session, query);
Context context = instrumenter().start(Context.current(), request);
try (Scope ignored = context.makeCurrent()) {
ResultSetFuture future = session.executeAsync(query, values);
addCallbackToEndSpan(future, context, request);
return future;
}
}
@Override
public ResultSetFuture executeAsync(String query, Map<String, Object> values) {
CassandraRequest request = CassandraRequest.create(session, query);
Context context = instrumenter().start(Context.current(), request);
try (Scope ignored = context.makeCurrent()) {
ResultSetFuture future = session.executeAsync(query, values);
addCallbackToEndSpan(future, context, request);
return future;
}
}
@Override
public ResultSetFuture executeAsync(Statement statement) {
String query = getQuery(statement);
CassandraRequest request = CassandraRequest.create(session, query);
Context context = instrumenter().start(Context.current(), request);
try (Scope ignored = context.makeCurrent()) {
ResultSetFuture future = session.executeAsync(statement);
addCallbackToEndSpan(future, context, request);
return future;
}
}
@Override
public PreparedStatement prepare(String query) {
return session.prepare(query);
}
@Override
public PreparedStatement prepare(RegularStatement statement) {
return session.prepare(statement);
}
@Override
public ListenableFuture<PreparedStatement> prepareAsync(String query) {
return session.prepareAsync(query);
}
@Override
public ListenableFuture<PreparedStatement> prepareAsync(RegularStatement statement) {
return session.prepareAsync(statement);
}
@Override
public CloseFuture closeAsync() {
return session.closeAsync();
}
@Override
public void close() {
session.close();
}
@Override
public boolean isClosed() {
return session.isClosed();
}
@Override
public Cluster getCluster() {
return session.getCluster();
}
@Override
public State getState() {
return session.getState();
}
private static String getQuery(Statement statement) {
String query = null;
if (statement instanceof BoundStatement) {
query = ((BoundStatement) statement).preparedStatement().getQueryString();
} else if (statement instanceof RegularStatement) {
query = ((RegularStatement) statement).getQueryString();
}
return query == null ? "" : query;
}
private static void addCallbackToEndSpan(
ResultSetFuture future, Context context, CassandraRequest request) {
Futures.addCallback(
future,
new FutureCallback<ResultSet>() {
@Override
public void onSuccess(ResultSet resultSet) {
instrumenter().end(context, request, resultSet.getExecutionInfo(), null);
}
@Override
public void onFailure(Throwable t) {
instrumenter().end(context, request, null, t);
}
},
Runnable::run);
}
}
| 2,305 |
3,586 | <gh_stars>1000+
package com.linkedin.common.urn;
import com.linkedin.data.message.Message;
import com.linkedin.data.schema.DataSchema;
import com.linkedin.data.schema.NamedDataSchema;
import com.linkedin.data.schema.validator.Validator;
import com.linkedin.data.schema.validator.ValidatorContext;
import java.net.URISyntaxException;
/**
* Rest.li Validator responsible for ensuring that {@link Urn} objects are well-formed.
*
* Note that this validator does not validate the integrity of strongly typed urns,
* or validate Urn objects against their associated key aspect.
*/
public class UrnValidator implements Validator {
@Override
public void validate(ValidatorContext context) {
if (DataSchema.Type.TYPEREF.equals(context.dataElement().getSchema().getType())
&& ((NamedDataSchema) context.dataElement().getSchema()).getName().endsWith("Urn")) {
try {
Urn.createFromString((String) context.dataElement().getValue());
} catch (URISyntaxException e) {
context.addResult(new Message(context.dataElement().path(), "\"Provided urn %s\" is invalid", context.dataElement().getValue()));
context.setHasFix(false);
}
}
}
} | 399 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.