max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
562 | #if LIB_VERSION == 2
#include "coap2/coap.h"
#elif LIB_VERSION == 3
#include "coap3/coap.h"
#else
#error "Version not supported"
#endif
#include <iostream>
int main() {
std::cout << "starting" << std::endl;
coap_startup();
// create CoAP context and a client session
coap_context_t *ctx = coap_new_context(nullptr);
coap_free_context(ctx);
coap_cleanup();
std::cout << "stopping" << std::endl;
return 0;
}
| 171 |
14,668 | // Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chromeos/services/multidevice_setup/host_backend_delegate_impl.h"
#include <memory>
#include "ash/constants/ash_features.h"
#include "base/containers/flat_map.h"
#include "base/test/scoped_feature_list.h"
#include "base/timer/mock_timer.h"
#include "base/unguessable_token.h"
#include "chromeos/components/multidevice/remote_device_test_util.h"
#include "chromeos/components/multidevice/software_feature.h"
#include "chromeos/components/multidevice/software_feature_state.h"
#include "chromeos/services/device_sync/public/cpp/fake_device_sync_client.h"
#include "chromeos/services/multidevice_setup/fake_eligible_host_devices_provider.h"
#include "chromeos/services/multidevice_setup/fake_host_backend_delegate.h"
#include "components/sync_preferences/testing_pref_service_syncable.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/abseil-cpp/absl/types/optional.h"
namespace chromeos {
namespace multidevice_setup {
namespace {
const char kPendingRequestHostIdPrefName[] =
"multidevice_setup.pending_request_host_id";
const char kPendingRemovalOfCurrentHost[] = "pendingRemovalOfCurrentHost";
const char kNoPendingRequest[] = "";
const size_t kNumTestDevices = 4;
} // namespace
class MultiDeviceSetupHostBackendDelegateImplTest
: public ::testing::TestWithParam<bool> {
public:
MultiDeviceSetupHostBackendDelegateImplTest(
const MultiDeviceSetupHostBackendDelegateImplTest&) = delete;
MultiDeviceSetupHostBackendDelegateImplTest& operator=(
const MultiDeviceSetupHostBackendDelegateImplTest&) = delete;
protected:
MultiDeviceSetupHostBackendDelegateImplTest()
: test_devices_(
multidevice::CreateRemoteDeviceRefListForTest(kNumTestDevices)) {}
~MultiDeviceSetupHostBackendDelegateImplTest() override = default;
// testing::Test:
void SetUp() override {
SetFeatureFlags(GetParam() /* use_v1_devicesync */);
// Tests are run once to simulate when v1 DeviceSync is enabled and once to
// simulate when it is disabled, leaving only v2 DeviceSync operational. In
// the former case, only public keys are needed, and in the latter case,
// only Instance IDs are needed.
for (multidevice::RemoteDeviceRef device : test_devices_) {
if (features::ShouldUseV1DeviceSync())
GetMutableRemoteDevice(device)->instance_id.clear();
else
GetMutableRemoteDevice(device)->public_key.clear();
}
fake_eligible_host_devices_provider_ =
std::make_unique<FakeEligibleHostDevicesProvider>();
fake_eligible_host_devices_provider_->set_eligible_host_devices(
test_devices_);
test_pref_service_ =
std::make_unique<sync_preferences::TestingPrefServiceSyncable>();
HostBackendDelegateImpl::RegisterPrefs(test_pref_service_->registry());
fake_device_sync_client_ =
std::make_unique<device_sync::FakeDeviceSyncClient>();
fake_device_sync_client_->set_synced_devices(test_devices_);
}
void TearDown() override {
if (delegate_)
delegate_->RemoveObserver(observer_.get());
}
void CreateDelegate(
const absl::optional<multidevice::RemoteDeviceRef>& initial_host,
const std::string& initial_pending_host_request = kNoPendingRequest) {
SetHostInDeviceSyncClient(initial_host);
test_pref_service_->SetString(kPendingRequestHostIdPrefName,
initial_pending_host_request);
auto mock_timer = std::make_unique<base::MockOneShotTimer>();
mock_timer_ = mock_timer.get();
delegate_ = HostBackendDelegateImpl::Factory::Create(
fake_eligible_host_devices_provider_.get(), test_pref_service_.get(),
fake_device_sync_client_.get(), std::move(mock_timer));
EXPECT_EQ(initial_host, delegate_->GetMultiDeviceHostFromBackend());
observer_ = std::make_unique<FakeHostBackendDelegateObserver>();
delegate_->AddObserver(observer_.get());
}
int GetSetHostNetworkRequestCallbackQueueSize() {
return features::ShouldUseV1DeviceSync()
? fake_device_sync_client_
->GetSetSoftwareFeatureStateInputsQueueSize()
: fake_device_sync_client_->GetSetFeatureStatusInputsQueueSize();
}
void InvokePendingSetHostNetworkRequestCallback(
device_sync::mojom::NetworkRequestResult result_code,
bool expected_to_notify_observer_and_start_retry_timer) {
size_t num_failure_events_before_call =
observer_->num_failed_backend_requests();
if (features::ShouldUseV1DeviceSync()) {
fake_device_sync_client_->InvokePendingSetSoftwareFeatureStateCallback(
result_code);
} else {
fake_device_sync_client_->InvokePendingSetFeatureStatusCallback(
result_code);
}
if (expected_to_notify_observer_and_start_retry_timer) {
EXPECT_EQ(num_failure_events_before_call + 1u,
observer_->num_failed_backend_requests());
} else {
EXPECT_EQ(num_failure_events_before_call,
observer_->num_failed_backend_requests());
}
EXPECT_EQ(expected_to_notify_observer_and_start_retry_timer,
mock_timer_->IsRunning());
}
void SimulateNewHostDevicesSynced(
const absl::optional<multidevice::RemoteDeviceRef>&
host_device_after_sync,
bool expected_to_fulfill_pending_request) {
absl::optional<multidevice::RemoteDeviceRef> host_device_before_call =
delegate_->GetMultiDeviceHostFromBackend();
bool host_changed = host_device_before_call != host_device_after_sync;
size_t num_host_change_events_before_call =
observer_->num_changes_on_backend();
size_t num_pending_host_request_change_events_before_call =
observer_->num_pending_host_request_changes();
SetHostInDeviceSyncClient(host_device_after_sync);
fake_device_sync_client_->NotifyNewDevicesSynced();
if (host_changed) {
EXPECT_EQ(num_host_change_events_before_call + 1u,
observer_->num_changes_on_backend());
} else {
EXPECT_EQ(num_host_change_events_before_call,
observer_->num_changes_on_backend());
}
if (expected_to_fulfill_pending_request) {
EXPECT_FALSE(delegate_->HasPendingHostRequest());
// Expected to change from a pending request to no request.
EXPECT_EQ(num_pending_host_request_change_events_before_call + 1u,
observer_->num_pending_host_request_changes());
} else {
EXPECT_EQ(num_pending_host_request_change_events_before_call,
observer_->num_pending_host_request_changes());
}
}
void AttemptToSetMultiDeviceHostOnBackend(
const absl::optional<multidevice::RemoteDeviceRef>& host_device) {
absl::optional<multidevice::RemoteDeviceRef> host_before_call =
delegate_->GetMultiDeviceHostFromBackend();
bool attempting_to_set_host_which_already_exists =
host_device == host_before_call;
size_t num_pending_host_request_change_events_before_call =
observer_->num_pending_host_request_changes();
bool was_request_for_same_device_as_pending_request =
delegate_->HasPendingHostRequest() &&
delegate_->GetPendingHostRequest() == host_device;
delegate_->AttemptToSetMultiDeviceHostOnBackend(host_device);
// A new attempt means that any previous retry attempts should have been
// canceled.
EXPECT_FALSE(mock_timer_->IsRunning());
if (attempting_to_set_host_which_already_exists) {
EXPECT_FALSE(delegate_->HasPendingHostRequest());
return;
}
EXPECT_EQ(host_device, delegate_->GetPendingHostRequest());
if (was_request_for_same_device_as_pending_request) {
EXPECT_EQ(num_pending_host_request_change_events_before_call,
observer_->num_pending_host_request_changes());
} else {
EXPECT_EQ(num_pending_host_request_change_events_before_call + 1u,
observer_->num_pending_host_request_changes());
}
// Verify that the correct parameters were passed to
// SetSoftwareFeatureState() or SetFeatureStatus().
if (host_device) {
VerifyLatestSetHostNetworkRequest(*host_device, true /* should_enable */);
} else {
ASSERT_TRUE(host_before_call);
VerifyLatestSetHostNetworkRequest(*host_before_call,
false /* should_enable */);
}
}
void SetHostInDeviceSyncClient(
const absl::optional<multidevice::RemoteDeviceRef>& host_device) {
for (const auto& remote_device : test_devices_) {
bool should_be_host =
host_device != absl::nullopt &&
((!remote_device.instance_id().empty() &&
host_device->instance_id() == remote_device.instance_id()) ||
(!remote_device.GetDeviceId().empty() &&
host_device->GetDeviceId() == remote_device.GetDeviceId()));
GetMutableRemoteDevice(remote_device)
->software_features
[multidevice::SoftwareFeature::kBetterTogetherHost] =
should_be_host ? multidevice::SoftwareFeatureState::kEnabled
: multidevice::SoftwareFeatureState::kSupported;
}
}
FakeEligibleHostDevicesProvider* fake_eligible_host_devices_provider() {
return fake_eligible_host_devices_provider_.get();
}
device_sync::FakeDeviceSyncClient* fake_device_sync_client() {
return fake_device_sync_client_.get();
}
FakeHostBackendDelegateObserver* observer() { return observer_.get(); }
base::MockOneShotTimer* mock_timer() { return mock_timer_; }
HostBackendDelegate* delegate() { return delegate_.get(); }
const multidevice::RemoteDeviceRefList& test_devices() const {
return test_devices_;
}
private:
void SetFeatureFlags(bool use_v1_devicesync) {
std::vector<base::Feature> enabled_features;
std::vector<base::Feature> disabled_features;
// These flags have no direct effect of on the host backend delegate;
// however, v2 Enrollment and DeviceSync must be enabled before v1
// DeviceSync can be disabled.
enabled_features.push_back(chromeos::features::kCryptAuthV2Enrollment);
enabled_features.push_back(chromeos::features::kCryptAuthV2DeviceSync);
if (use_v1_devicesync) {
disabled_features.push_back(
chromeos::features::kDisableCryptAuthV1DeviceSync);
} else {
enabled_features.push_back(
chromeos::features::kDisableCryptAuthV1DeviceSync);
}
scoped_feature_list_.InitWithFeatures(enabled_features, disabled_features);
}
void VerifyLatestSetHostNetworkRequest(
const multidevice::RemoteDeviceRef expected_host,
bool expected_should_enable) {
// Verify inputs to SetSoftwareFeatureState().
if (features::ShouldUseV1DeviceSync()) {
ASSERT_FALSE(
fake_device_sync_client_->set_software_feature_state_inputs_queue()
.empty());
const device_sync::FakeDeviceSyncClient::SetSoftwareFeatureStateInputs&
inputs = fake_device_sync_client_
->set_software_feature_state_inputs_queue()
.back();
EXPECT_EQ(expected_host.public_key(), inputs.public_key);
EXPECT_EQ(multidevice::SoftwareFeature::kBetterTogetherHost,
inputs.software_feature);
EXPECT_EQ(expected_should_enable, inputs.enabled);
EXPECT_EQ(expected_should_enable, inputs.is_exclusive);
return;
}
// Verify inputs to SetFeatureStatus().
ASSERT_FALSE(
fake_device_sync_client_->set_feature_status_inputs_queue().empty());
const device_sync::FakeDeviceSyncClient::SetFeatureStatusInputs& inputs =
fake_device_sync_client_->set_feature_status_inputs_queue().back();
EXPECT_EQ(expected_host.instance_id(), inputs.device_instance_id);
EXPECT_EQ(multidevice::SoftwareFeature::kBetterTogetherHost,
inputs.feature);
EXPECT_EQ(expected_should_enable
? device_sync::FeatureStatusChange::kEnableExclusively
: device_sync::FeatureStatusChange::kDisable,
inputs.status_change);
}
multidevice::RemoteDeviceRefList test_devices_;
std::unique_ptr<FakeEligibleHostDevicesProvider>
fake_eligible_host_devices_provider_;
std::unique_ptr<sync_preferences::TestingPrefServiceSyncable>
test_pref_service_;
std::unique_ptr<device_sync::FakeDeviceSyncClient> fake_device_sync_client_;
base::MockOneShotTimer* mock_timer_;
std::unique_ptr<FakeHostBackendDelegateObserver> observer_;
std::unique_ptr<HostBackendDelegate> delegate_;
base::test::ScopedFeatureList scoped_feature_list_;
};
TEST_P(MultiDeviceSetupHostBackendDelegateImplTest, Success) {
CreateDelegate(absl::nullopt /* initial_host */);
// Set device 0.
AttemptToSetMultiDeviceHostOnBackend(test_devices()[0]);
EXPECT_EQ(1, GetSetHostNetworkRequestCallbackQueueSize());
InvokePendingSetHostNetworkRequestCallback(
device_sync::mojom::NetworkRequestResult::kSuccess,
false /* expected_to_notify_observer_and_start_retry_timer */);
EXPECT_TRUE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[0], delegate()->GetPendingHostRequest());
SimulateNewHostDevicesSynced(test_devices()[0] /* host_device_after_sync */,
true /* expected_to_fulfill_pending_request */);
EXPECT_FALSE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[0], delegate()->GetMultiDeviceHostFromBackend());
// Remove device 0 such that there is no longer a host.
AttemptToSetMultiDeviceHostOnBackend(absl::nullopt);
EXPECT_EQ(1, GetSetHostNetworkRequestCallbackQueueSize());
InvokePendingSetHostNetworkRequestCallback(
device_sync::mojom::NetworkRequestResult::kSuccess,
false /* expected_to_notify_observer_and_start_retry_timer */);
EXPECT_TRUE(delegate()->HasPendingHostRequest());
EXPECT_EQ(absl::nullopt, delegate()->GetPendingHostRequest());
SimulateNewHostDevicesSynced(absl::nullopt /* host_device_after_sync */,
true /* expected_to_fulfill_pending_request */);
EXPECT_FALSE(delegate()->HasPendingHostRequest());
EXPECT_EQ(absl::nullopt, delegate()->GetMultiDeviceHostFromBackend());
// Set device 1.
AttemptToSetMultiDeviceHostOnBackend(test_devices()[1]);
EXPECT_EQ(1, GetSetHostNetworkRequestCallbackQueueSize());
InvokePendingSetHostNetworkRequestCallback(
device_sync::mojom::NetworkRequestResult::kSuccess,
false /* expected_to_notify_observer_and_start_retry_timer */);
EXPECT_TRUE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[1], delegate()->GetPendingHostRequest());
SimulateNewHostDevicesSynced(test_devices()[1] /* host_device_after_sync */,
true /* expected_to_fulfill_pending_request */);
EXPECT_FALSE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[1], delegate()->GetMultiDeviceHostFromBackend());
}
TEST_P(MultiDeviceSetupHostBackendDelegateImplTest, Failure) {
CreateDelegate(absl::nullopt /* initial_host */);
// Attempt to set device 0, but fail.
AttemptToSetMultiDeviceHostOnBackend(test_devices()[0]);
EXPECT_EQ(1, GetSetHostNetworkRequestCallbackQueueSize());
InvokePendingSetHostNetworkRequestCallback(
device_sync::mojom::NetworkRequestResult::kOffline,
true /* expected_to_notify_observer_and_start_retry_timer */);
EXPECT_TRUE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[0], delegate()->GetPendingHostRequest());
EXPECT_EQ(absl::nullopt, delegate()->GetMultiDeviceHostFromBackend());
// A retry should have been scheduled, so fire the timer to start the retry.
mock_timer()->Fire();
// Simulate another failure.
EXPECT_EQ(1, GetSetHostNetworkRequestCallbackQueueSize());
InvokePendingSetHostNetworkRequestCallback(
device_sync::mojom::NetworkRequestResult::kOffline,
true /* expected_to_notify_observer_and_start_retry_timer */);
EXPECT_TRUE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[0], delegate()->GetPendingHostRequest());
EXPECT_EQ(absl::nullopt, delegate()->GetMultiDeviceHostFromBackend());
// Attempt to set device 1, but fail.
AttemptToSetMultiDeviceHostOnBackend(test_devices()[1]);
EXPECT_EQ(1, GetSetHostNetworkRequestCallbackQueueSize());
InvokePendingSetHostNetworkRequestCallback(
device_sync::mojom::NetworkRequestResult::kOffline,
true /* expected_to_notify_observer_and_start_retry_timer */);
EXPECT_TRUE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[1], delegate()->GetPendingHostRequest());
EXPECT_EQ(absl::nullopt, delegate()->GetMultiDeviceHostFromBackend());
}
TEST_P(MultiDeviceSetupHostBackendDelegateImplTest,
StartWithDevice_SimultaneousRequests) {
// Start with device 0 as the active host.
CreateDelegate(test_devices()[0] /* initial_host */);
// Attempt to set device 1, but do not invoke the callback yet.
AttemptToSetMultiDeviceHostOnBackend(test_devices()[1]);
EXPECT_TRUE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[1], delegate()->GetPendingHostRequest());
EXPECT_EQ(test_devices()[0], delegate()->GetMultiDeviceHostFromBackend());
// Attempt to set device 2, but do not invoke device 1's callback yet.
AttemptToSetMultiDeviceHostOnBackend(test_devices()[2]);
EXPECT_TRUE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[2], delegate()->GetPendingHostRequest());
EXPECT_EQ(test_devices()[0], delegate()->GetMultiDeviceHostFromBackend());
// Attempt to set device 3.
AttemptToSetMultiDeviceHostOnBackend(test_devices()[3]);
EXPECT_TRUE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[3], delegate()->GetPendingHostRequest());
EXPECT_EQ(test_devices()[0], delegate()->GetMultiDeviceHostFromBackend());
// Note: Below, we assume that the feature setting requests are processed in
// the order they are called. This is an assumption made in the
// HostBackendDelegate implementation.
// Fire the callback for device 1, but have it fail. This is not expected to
// notify the observer or start the retry timer, since the failure was for
// device 1's request and device 3 is the pending host request.
EXPECT_EQ(3, GetSetHostNetworkRequestCallbackQueueSize());
InvokePendingSetHostNetworkRequestCallback(
device_sync::mojom::NetworkRequestResult::kOffline,
false /* expected_to_notify_observer_and_start_retry_timer */);
EXPECT_TRUE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[3], delegate()->GetPendingHostRequest());
EXPECT_EQ(test_devices()[0], delegate()->GetMultiDeviceHostFromBackend());
// Fire the callback for device 2, and have it succeed. This should affect the
// value of GetMultiDeviceHostFromBackend(), but there should still be a
// pending request for device 3.
EXPECT_EQ(2, GetSetHostNetworkRequestCallbackQueueSize());
InvokePendingSetHostNetworkRequestCallback(
device_sync::mojom::NetworkRequestResult::kSuccess,
false /* expected_to_notify_observer_and_start_retry_timer */);
SimulateNewHostDevicesSynced(test_devices()[2] /* host_device_after_sync */,
false /* expected_to_fulfill_pending_request */);
EXPECT_TRUE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[3], delegate()->GetPendingHostRequest());
EXPECT_EQ(test_devices()[2], delegate()->GetMultiDeviceHostFromBackend());
// Fire the callback for device 3, and have it succeed.
EXPECT_EQ(1, GetSetHostNetworkRequestCallbackQueueSize());
InvokePendingSetHostNetworkRequestCallback(
device_sync::mojom::NetworkRequestResult::kSuccess,
false /* expected_to_notify_observer_and_start_retry_timer */);
SimulateNewHostDevicesSynced(test_devices()[3] /* host_device_after_sync */,
true /* expected_to_fulfill_pending_request */);
EXPECT_FALSE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[3], delegate()->GetMultiDeviceHostFromBackend());
}
TEST_P(MultiDeviceSetupHostBackendDelegateImplTest,
SimultaneousRequestsToSameDevice) {
CreateDelegate(absl::nullopt /* initial_host */);
// Attempt to set device 0, but do not invoke the callback yet.
AttemptToSetMultiDeviceHostOnBackend(test_devices()[0]);
EXPECT_TRUE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[0], delegate()->GetPendingHostRequest());
EXPECT_EQ(absl::nullopt, delegate()->GetMultiDeviceHostFromBackend());
// Attempt to set device 0 again, and still do not invoke the callback.
AttemptToSetMultiDeviceHostOnBackend(test_devices()[0]);
EXPECT_TRUE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[0], delegate()->GetPendingHostRequest());
EXPECT_EQ(absl::nullopt, delegate()->GetMultiDeviceHostFromBackend());
// Attempt to set device 0 one more time.
AttemptToSetMultiDeviceHostOnBackend(test_devices()[0]);
EXPECT_TRUE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[0], delegate()->GetPendingHostRequest());
EXPECT_EQ(absl::nullopt, delegate()->GetMultiDeviceHostFromBackend());
// Fire the first callback, which should successfully transition the host.
EXPECT_EQ(3, GetSetHostNetworkRequestCallbackQueueSize());
InvokePendingSetHostNetworkRequestCallback(
device_sync::mojom::NetworkRequestResult::kSuccess,
false /* expected_to_notify_observer_and_start_retry_timer */);
SimulateNewHostDevicesSynced(test_devices()[0] /* host_device_after_sync */,
true /* expected_to_fulfill_pending_request */);
EXPECT_FALSE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[0], delegate()->GetMultiDeviceHostFromBackend());
// Fire the second callback, but have it fail. No state should be affected.
EXPECT_EQ(2, GetSetHostNetworkRequestCallbackQueueSize());
InvokePendingSetHostNetworkRequestCallback(
device_sync::mojom::NetworkRequestResult::kOffline,
false /* expected_to_notify_observer_and_start_retry_timer */);
EXPECT_FALSE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[0], delegate()->GetMultiDeviceHostFromBackend());
// Fire the third callback, and have it succeed. Still, no state should be
// affected.
EXPECT_EQ(1, GetSetHostNetworkRequestCallbackQueueSize());
InvokePendingSetHostNetworkRequestCallback(
device_sync::mojom::NetworkRequestResult::kSuccess,
false /* expected_to_notify_observer_and_start_retry_timer */);
EXPECT_FALSE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[0], delegate()->GetMultiDeviceHostFromBackend());
}
TEST_P(MultiDeviceSetupHostBackendDelegateImplTest,
MultipleRequestsToSameDevice_FirstFail_ThenSucceed) {
CreateDelegate(absl::nullopt /* initial_host */);
// Attempt to set device 0, but fail.
AttemptToSetMultiDeviceHostOnBackend(test_devices()[0]);
EXPECT_EQ(1, GetSetHostNetworkRequestCallbackQueueSize());
InvokePendingSetHostNetworkRequestCallback(
device_sync::mojom::NetworkRequestResult::kOffline,
true /* expected_to_notify_observer_and_start_retry_timer */);
EXPECT_TRUE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[0], delegate()->GetPendingHostRequest());
EXPECT_EQ(absl::nullopt, delegate()->GetMultiDeviceHostFromBackend());
// The retry timer is running; however, instead of relying on that, call
// AttemptToSetMultiDeviceHostOnBackend() again to trigger an immediate retry
// without the timer.
AttemptToSetMultiDeviceHostOnBackend(test_devices()[0]);
EXPECT_EQ(1, GetSetHostNetworkRequestCallbackQueueSize());
InvokePendingSetHostNetworkRequestCallback(
device_sync::mojom::NetworkRequestResult::kSuccess,
false /* expected_to_notify_observer_and_start_retry_timer */);
EXPECT_TRUE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[0], delegate()->GetPendingHostRequest());
SimulateNewHostDevicesSynced(test_devices()[0] /* host_device_after_sync */,
true /* expected_to_fulfill_pending_request */);
EXPECT_FALSE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[0], delegate()->GetMultiDeviceHostFromBackend());
}
TEST_P(MultiDeviceSetupHostBackendDelegateImplTest,
InitialPendingRequestButNoInitialDevice) {
CreateDelegate(
absl::nullopt /* initial_host */,
features::ShouldUseV1DeviceSync()
? test_devices()[0].GetDeviceId()
: test_devices()[0].instance_id() /* initial_pending_host_request */);
// The delegate should have started a request as soon as it was created.
// Simulate it succeeding.
EXPECT_EQ(1, GetSetHostNetworkRequestCallbackQueueSize());
InvokePendingSetHostNetworkRequestCallback(
device_sync::mojom::NetworkRequestResult::kSuccess,
false /* expected_to_notify_observer_and_start_retry_timer */);
SimulateNewHostDevicesSynced(test_devices()[0] /* host_device_after_sync */,
true /* expected_to_fulfill_pending_request */);
EXPECT_FALSE(delegate()->HasPendingHostRequest());
EXPECT_EQ(test_devices()[0], delegate()->GetMultiDeviceHostFromBackend());
}
TEST_P(MultiDeviceSetupHostBackendDelegateImplTest,
InitialDeviceWithPendingRequestToRemoveIt) {
CreateDelegate(
test_devices()[0] /* initial_host */,
kPendingRemovalOfCurrentHost /* initial_pending_host_request */);
// The delegate should have started a request as soon as it was created.
// Simulate it succeeding.
EXPECT_EQ(1, GetSetHostNetworkRequestCallbackQueueSize());
InvokePendingSetHostNetworkRequestCallback(
device_sync::mojom::NetworkRequestResult::kSuccess,
false /* expected_to_notify_observer_and_start_retry_timer */);
SimulateNewHostDevicesSynced(absl::nullopt /* host_device_after_sync */,
true /* expected_to_fulfill_pending_request */);
EXPECT_FALSE(delegate()->HasPendingHostRequest());
EXPECT_EQ(absl::nullopt, delegate()->GetMultiDeviceHostFromBackend());
}
TEST_P(MultiDeviceSetupHostBackendDelegateImplTest, ChangedFromOtherDevice) {
CreateDelegate(absl::nullopt /* initial_host */);
// The device changed from another device (i.e.,
// AttemptToSetMultiDeviceHostOnBackend() was not called).
SimulateNewHostDevicesSynced(test_devices()[0] /* host_device_after_sync */,
false /* expected_to_fulfill_pending_request */);
// One more change.
SimulateNewHostDevicesSynced(test_devices()[1] /* host_device_after_sync */,
false /* expected_to_fulfill_pending_request */);
}
TEST_P(MultiDeviceSetupHostBackendDelegateImplTest,
PendingRequestCanceledIfDeviceToSetNoLongerExists) {
CreateDelegate(absl::nullopt /* initial_host */,
"nonexistentDeviceId" /* initial_pending_host_request */);
// An initial pending host request exists, but it is for a host that is not
// present in the DeviceSyncClient. Thus, the request should be canceled.
EXPECT_FALSE(delegate()->HasPendingHostRequest());
}
TEST_P(MultiDeviceSetupHostBackendDelegateImplTest,
PendingRequestCanceledIfDeviceToRemoveNoLongerExists) {
CreateDelegate(
absl::nullopt /* initial_host */,
kPendingRemovalOfCurrentHost /* initial_pending_host_request */);
// An initial pending host request exists to remove the current host, but
// there actually is no current host. Thus, the request should be canceled.
EXPECT_FALSE(delegate()->HasPendingHostRequest());
}
TEST_P(MultiDeviceSetupHostBackendDelegateImplTest, TryToSetNonEligibleHost) {
// Make all test devices ineligible.
fake_eligible_host_devices_provider()->set_eligible_host_devices(
multidevice::RemoteDeviceRefList());
CreateDelegate(absl::nullopt /* initial_host */);
delegate()->AttemptToSetMultiDeviceHostOnBackend(test_devices()[0]);
EXPECT_EQ(0u, observer()->num_pending_host_request_changes());
}
// Runs tests twice; once with v1 DeviceSync enabled and once with it disabled.
// TODO(https://crbug.com/1019206): Remove when v1 DeviceSync is disabled,
// when all devices should have an Instance ID.
INSTANTIATE_TEST_SUITE_P(All,
MultiDeviceSetupHostBackendDelegateImplTest,
::testing::Bool());
} // namespace multidevice_setup
} // namespace chromeos
| 10,253 |
2,728 | #ifndef DATE_TIME_DATE_DURATION__
#define DATE_TIME_DATE_DURATION__
/* Copyright (c) 2002,2003 CrystalClear Software, Inc.
* Use, modification and distribution is subject to the
* Boost Software License, Version 1.0. (See accompanying
* file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
* Author: <NAME>, <NAME>
* $Date$
*/
#include <boost/operators.hpp>
#include <boost/date_time/special_defs.hpp>
#include <boost/date_time/compiler_config.hpp>
#include <boost/date_time/int_adapter.hpp>
namespace boost {
namespace date_time {
//! Duration type with date level resolution
template<class duration_rep_traits>
class BOOST_SYMBOL_VISIBLE date_duration : private
boost::less_than_comparable1< date_duration< duration_rep_traits >
, boost::equality_comparable1< date_duration< duration_rep_traits >
, boost::addable1< date_duration< duration_rep_traits >
, boost::subtractable1< date_duration< duration_rep_traits >
, boost::dividable2< date_duration< duration_rep_traits >, int
> > > > >
{
public:
typedef typename duration_rep_traits::int_type duration_rep_type;
typedef typename duration_rep_traits::impl_type duration_rep;
//! Construct from a day count
BOOST_CXX14_CONSTEXPR explicit date_duration(duration_rep day_count) : days_(day_count) {}
/*! construct from special_values - only works when
* instantiated with duration_traits_adapted */
BOOST_CXX14_CONSTEXPR date_duration(special_values sv) :
days_(duration_rep::from_special(sv))
{}
//! returns days_ as it's instantiated type - used for streaming
BOOST_CXX14_CONSTEXPR duration_rep get_rep()const
{
return days_;
}
BOOST_CXX14_CONSTEXPR special_values as_special() const
{
return days_.as_special();
}
BOOST_CXX14_CONSTEXPR bool is_special()const
{
return days_.is_special();
}
//! returns days as value, not object.
BOOST_CXX14_CONSTEXPR duration_rep_type days() const
{
return duration_rep_traits::as_number(days_);
}
//! Returns the smallest duration -- used by to calculate 'end'
static BOOST_CXX14_CONSTEXPR date_duration unit()
{
return date_duration<duration_rep_traits>(1);
}
//! Equality
BOOST_CXX14_CONSTEXPR bool operator==(const date_duration& rhs) const
{
return days_ == rhs.days_;
}
//! Less
BOOST_CXX14_CONSTEXPR bool operator<(const date_duration& rhs) const
{
return days_ < rhs.days_;
}
/* For shortcut operators (+=, -=, etc) simply using
* "days_ += days_" may not work. If instantiated with
* an int_adapter, shortcut operators are not present,
* so this will not compile */
//! Subtract another duration -- result is signed
BOOST_CXX14_CONSTEXPR date_duration& operator-=(const date_duration& rhs)
{
//days_ -= rhs.days_;
days_ = days_ - rhs.days_;
return *this;
}
//! Add a duration -- result is signed
BOOST_CXX14_CONSTEXPR date_duration& operator+=(const date_duration& rhs)
{
days_ = days_ + rhs.days_;
return *this;
}
//! unary- Allows for dd = -date_duration(2); -> dd == -2
BOOST_CXX14_CONSTEXPR date_duration operator-() const
{
return date_duration<duration_rep_traits>(get_rep() * (-1));
}
//! Division operations on a duration with an integer.
BOOST_CXX14_CONSTEXPR date_duration& operator/=(int divisor)
{
days_ = days_ / divisor;
return *this;
}
//! return sign information
BOOST_CXX14_CONSTEXPR bool is_negative() const
{
return days_ < 0;
}
private:
duration_rep days_;
};
/*! Struct for instantiating date_duration with <b>NO</b> special values
* functionality. Allows for transparent implementation of either
* date_duration<long> or date_duration<int_adapter<long> > */
struct BOOST_SYMBOL_VISIBLE duration_traits_long
{
typedef long int_type;
typedef long impl_type;
static BOOST_CXX14_CONSTEXPR int_type as_number(impl_type i) { return i; }
};
/*! Struct for instantiating date_duration <b>WITH</b> special values
* functionality. Allows for transparent implementation of either
* date_duration<long> or date_duration<int_adapter<long> > */
struct BOOST_SYMBOL_VISIBLE duration_traits_adapted
{
typedef long int_type;
typedef boost::date_time::int_adapter<long> impl_type;
static BOOST_CXX14_CONSTEXPR int_type as_number(impl_type i) { return i.as_number(); }
};
} } //namspace date_time
#endif
| 1,848 |
303 | #include "tr.h"
#include "internal.h"
OBJ TrRange_new(VM, OBJ first, OBJ last, int exclusive) {
TrRange *r = TR_INIT_CORE_OBJECT(Range);
r->first = first;
r->last = last;
r->exclusive = exclusive;
return (OBJ)r;
}
static OBJ TrRange_first(VM, OBJ self) { return TR_CRANGE(self)->first; }
static OBJ TrRange_last(VM, OBJ self) { return TR_CRANGE(self)->last; }
static OBJ TrRange_exclude_end(VM, OBJ self) { return TR_BOOL(TR_CRANGE(self)->exclusive); }
//static OBJ TrRange_each(VM, OBJ self) { }
void TrRange_init(VM) {
OBJ c = TR_INIT_CORE_CLASS(Range, Object);
tr_def(c, "first", TrRange_first, 0);
tr_def(c, "last", TrRange_last, 0);
tr_def(c, "exclude_end?", TrRange_exclude_end, 0);
//tr_def(c, "each", TrRange_each, 0);
}
| 318 |
766 | <gh_stars>100-1000
/*
* Copyright 2008-2017 <NAME>
*
* See LICENCE for the full copyright terms.
*/
#include <assert.h>
#include <stddef.h>
#include <fsm/fsm.h>
#include <adt/set.h>
#include <adt/dlist.h>
#include <adt/stateset.h>
#include <adt/edgeset.h>
#include "../internal.h"
static int
fsm_reachable(const struct fsm *fsm, fsm_state_t state,
int any,
int (*predicate)(const struct fsm *, fsm_state_t))
{
int r, stopcond;
fsm_state_t *q;
size_t i, qtop;
assert(state < fsm->statecount);
assert(predicate != NULL);
for (i=0; i < fsm->statecount; i++) {
fsm->states[i].visited = 0;
}
q = f_malloc(fsm->opt->alloc, fsm->statecount * sizeof *q);
if (q == NULL) {
return -1;
}
/*
* Iterative depth-first search.
*/
/* TODO: write in terms of fsm_walk or some common iteration callback */
q[0] = state;
qtop = 1;
fsm->states[state].visited = 1;
stopcond = (any) ? 1 : 0;
while (qtop > 0) {
struct edge_iter it;
struct fsm_edge e;
fsm_state_t state;
int pred_result;
assert(qtop > 0);
state = q[--qtop];
pred_result = !!predicate(fsm, state);
if (pred_result == stopcond) {
r = stopcond;
goto cleanup;
}
for (edge_set_reset(fsm->states[state].edges, &it); edge_set_next(&it, &e); ) {
if (fsm->states[e.state].visited == 0) {
assert(qtop < fsm->statecount);
q[qtop++] = e.state;
fsm->states[e.state].visited = 1;
}
}
}
r = !stopcond;
goto cleanup;
cleanup:
for (i=0; i < fsm->statecount; i++) {
fsm->states[i].visited = 0;
}
f_free(fsm->opt->alloc, q);
return r;
}
int
fsm_reachableall(const struct fsm *fsm, fsm_state_t state,
int (*predicate)(const struct fsm *, fsm_state_t))
{
return fsm_reachable(fsm, state, 0, predicate);
}
int
fsm_reachableany(const struct fsm *fsm, fsm_state_t state,
int (*predicate)(const struct fsm *, fsm_state_t))
{
return fsm_reachable(fsm, state, 1, predicate);
}
| 847 |
841 | /*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.services.ejb.client;
import org.jbpm.kie.services.helper.CleanUpCommand;
import org.jbpm.kie.services.test.QueryServiceImplTest;
import org.jbpm.services.ejb.api.DefinitionServiceEJBRemote;
import org.jbpm.services.ejb.api.DeploymentServiceEJBRemote;
import org.jbpm.services.ejb.api.ProcessServiceEJBRemote;
import org.jbpm.services.ejb.api.RuntimeDataServiceEJBRemote;
import org.jbpm.services.ejb.api.UserTaskServiceEJBRemote;
import org.jbpm.services.ejb.api.query.QueryServiceEJBRemote;
import org.jbpm.services.ejb.client.helper.DeploymentServiceWrapper;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.kie.api.KieServices;
import org.kie.scanner.KieMavenRepository;
public class ClientQueryServiceEJBTest extends QueryServiceImplTest {
private static final String application = "sample-war-ejb-app";
@Before
public void prepare() {
super.prepare();
userTaskService.execute(GROUP_ID +":" + ARTIFACT_ID +":" + VERSION, new CleanUpCommand());
}
@Override
protected void close() {
// do nothing
}
@Override
protected void configureServices() {
correctUser = "anonymous";
try {
ClientServiceFactory factory = ServiceFactoryProvider.getProvider("JBoss");
DeploymentServiceEJBRemote deploymentService = factory.getService(application, DeploymentServiceEJBRemote.class);
ProcessServiceEJBRemote processService = factory.getService(application, ProcessServiceEJBRemote.class);
RuntimeDataServiceEJBRemote runtimeDataService = factory.getService(application, RuntimeDataServiceEJBRemote.class);
DefinitionServiceEJBRemote definitionService = factory.getService(application, DefinitionServiceEJBRemote.class);
UserTaskServiceEJBRemote userTaskService = factory.getService(application, UserTaskServiceEJBRemote.class);
QueryServiceEJBRemote queryService = factory.getService(application, QueryServiceEJBRemote.class);
setBpmn2Service(definitionService);
setProcessService(processService);
setRuntimeDataService(runtimeDataService);
setUserTaskService(userTaskService);
setQueryService(queryService);
setDeploymentService(new DeploymentServiceWrapper(deploymentService));
setIdentityProvider(identityProvider);
} catch (Exception e) {
throw new RuntimeException("Unable to configure services", e);
}
}
@Override
protected void prepareJPAModule(KieServices ks, KieMavenRepository repository ) {
// no op here
}
@Override
protected String getDataSourceJNDI() {
return "java:jboss/datasources/ExampleDS";
}
@Ignore("not supported for remote ejb")
@Test
@Override
public void testGetTaskInstancesWithCustomVariables() throws Exception {
}
@Ignore("not supported for remote ejb")
@Test
@Override
public void testGetProcessInstancesWithQueryParamBuilder() {
}
@Ignore("Requires actual authentication and users to be configured on remote server")
@Test
@Override
public void testGetTaskInstancesAsPotOwners() {
}
@Ignore("Requires actual authentication and users to be configured on remote server")
@Test
@Override
public void testGetTaskInstancesAsBA() {
}
}
| 1,257 |
841 | /*
* Tencent is pleased to support the open source community by making Pebble available.
* Copyright (C) 2016 THL A29 Limited, a Tencent company. All rights reserved.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://opensource.org/licenses/MIT
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*
*/
#ifndef _PEBBLE_COMMON_NET_MESSAGE_H_
#define _PEBBLE_COMMON_NET_MESSAGE_H_
#include <list>
#include "framework/message.h"
namespace pebble {
class Epoll;
class NetIO;
class NetConnection;
#define INVAILD_HANDLE UINT64_MAX
/// @brief获取消息的数据部分长度,NetMessage从TCP接收到消息头部分后,
/// 向上层用户询问此消息的数据长度,用于接收完整消息
/// @param head 消息头缓存
/// @param head_len 消息头长度
/// @return uint32_t 消息数据部分的长度(不包括消息头长度),<0解码出错,应该关闭连接
typedef cxx::function<int32_t(const uint8_t* head, uint32_t head_len)> GetMsgDataLen;
/// @brief 封装tcp/udp收发消息功能,向上层用户提供基于消息的收发能力
class NetMessage {
public:
NetMessage();
~NetMessage();
/// @brief 每个连接默认的收发缓冲区大小,默认为2M
static const int32_t DEFAULT_MSG_BUFF_LEN = 1024 * 1024 * 2;
/// @param msg_head_len 由上层用户指定TCP发送时消息头的长度
/// @param get_msg_data_len_func 当接收完消息头部分后,回调此函数得到消息数据部分的长度
/// @param msg_buff_len TCP接收缓冲区大小,默认为2M
int32_t Init(uint32_t msg_head_len, const GetMsgDataLen& get_msg_data_len_func,
uint32_t msg_buff_len = DEFAULT_MSG_BUFF_LEN);
/// @return 0 成功
/// @return INVALID_HANDLE 失败
uint64_t Bind(const std::string& ip, uint16_t port);
/// @return 0 成功
/// @return INVALID_HANDLE 失败
uint64_t Connect(const std::string& ip, uint16_t port);
/// @return 0 成功
/// @return <0 失败
int32_t Send(uint64_t handle, const uint8_t* msg, uint32_t msg_len);
/// @return 0 成功
/// @return <0 失败
int32_t SendV(uint64_t handle, uint32_t msg_frag_num,
const uint8_t* msg_frag[], uint32_t msg_frag_len[]);
/// @return 0 成功
/// @return <0 失败
int32_t Recv(uint64_t handle, uint8_t* buff, uint32_t* buff_len, MsgExternInfo* msg_info);
/// @return 0 成功
/// @return <0 失败
int32_t Peek(uint64_t handle, const uint8_t** msg, uint32_t* msg_len, MsgExternInfo* msg_info);
/// @return 0 成功
/// @return <0 失败
int32_t Pop(uint64_t handle);
/// @return 0 成功
/// @return <0 失败
int32_t Close(uint64_t handle);
/// @return 0 成功,有事件
/// @return <0 失败,无事件或网络故障
int32_t Poll(uint64_t* handle, int32_t* event, int32_t timeout_ms);
/// @brief 是否TCP连接
bool IsTcpTransport(uint64_t handle);
/// @brief 设置发送缓冲区列表最大长度
void SetMaxSendListSize(uint32_t max_send_list_size);
private:
int32_t PollConnectionBuffer(uint64_t* handle);
NetConnection* CreateConnection(uint64_t netaddr);
NetConnection* GetConnection(uint64_t netaddr);
void CloseConnection(uint64_t netaddr);
void CloseAllConnections();
void SendCacheData(uint64_t netaddr, NetConnection* connection);
int32_t RecvTcpData(uint64_t netaddr);
int32_t RecvUdpData(uint64_t netaddr);
// 因message接口的限制,对于点对点通信方式先这样处理,后续优化
uint64_t GetLocalHandle(uint64_t netaddr);
void OnSocketError(uint64_t netaddr);
private:
Epoll* m_epoll;
NetIO* m_netio;
uint8_t* m_send_buff; // 对使用udp发送多片消息时使用buff组装完整包
uint32_t m_msg_buff_len;
uint32_t m_max_send_list_size;
uint32_t m_msg_head_len;
GetMsgDataLen m_get_msg_data_len_func;
// 连接数据
cxx::unordered_map<uint64_t, NetConnection*> m_connections;
// udp <peer handle, local listen handle> map
cxx::unordered_map<uint64_t, uint64_t> m_peer_handle_to_local;
};
} // namespace pebble
#endif // _PEBBLE_COMMON_NET_MESSAGE_H_
| 2,160 |
461 | <reponame>edmund-troche/esp32-ble2mqtt
#include "wifi.h"
#include <esp_err.h>
#include <esp_event.h>
#include <esp_log.h>
#include <esp_wifi.h>
#include <esp_wpa2.h>
#include <arpa/inet.h>
#include <string.h>
static const char *TAG = "WiFi";
static wifi_on_connected_cb_t on_connected_cb = NULL;
static wifi_on_disconnected_cb_t on_disconnected_cb = NULL;
static char *wifi_hostname = NULL;
void wifi_set_on_connected_cb(wifi_on_connected_cb_t cb)
{
on_connected_cb = cb;
}
void wifi_set_on_disconnected_cb(wifi_on_disconnected_cb_t cb)
{
on_disconnected_cb = cb;
}
uint8_t *wifi_mac_get(void)
{
static uint8_t mac[6] = {};
if (!mac[0])
esp_wifi_get_mac(ESP_IF_WIFI_STA, mac);
return mac;
}
void wifi_hostname_set(const char *hostname)
{
if (wifi_hostname)
free(wifi_hostname);
wifi_hostname = strdup(hostname);
}
static void event_handler(void* arg, esp_event_base_t event_base,
int32_t event_id, void* event_data)
{
if (event_base == WIFI_EVENT)
{
switch(event_id) {
case WIFI_EVENT_STA_START:
if (wifi_hostname)
tcpip_adapter_set_hostname(TCPIP_ADAPTER_IF_STA, wifi_hostname);
esp_wifi_connect();
break;
case WIFI_EVENT_STA_CONNECTED:
ESP_LOGI(TAG, "Connected");
break;
case WIFI_EVENT_STA_DISCONNECTED:
ESP_LOGI(TAG, "Disconnected");
if (on_disconnected_cb)
on_disconnected_cb();
/* This is a workaround as ESP32 WiFi libs don't currently
* auto-reassociate. */
esp_wifi_connect();
break;
default:
ESP_LOGD(TAG, "Unhandled WiFi event (%d)", event_id);
break;
}
}
else if (event_base == IP_EVENT)
{
switch(event_id) {
case IP_EVENT_STA_GOT_IP:
{
ip_event_got_ip_t *event = (ip_event_got_ip_t *)event_data;
ESP_LOGD(TAG, "Got IP address: %s",
inet_ntoa(event->ip_info.ip));
if (on_connected_cb)
on_connected_cb();
break;
}
case IP_EVENT_STA_LOST_IP:
ESP_LOGD(TAG, "Lost IP address");
break;
default:
ESP_LOGD(TAG, "Unhandled IP event (%d)", event_id);
break;
}
}
}
eap_method_t wifi_eap_atomethod(const char *method)
{
if (method == NULL)
return EAP_NONE;
struct {
const char *name;
int method;
} *p, methods[] = {
{ "TLS", EAP_TLS },
{ "PEAP", EAP_PEAP },
{ "TTLS", EAP_TTLS },
{ NULL, EAP_NONE }
};
for (p = methods; p->name; p++)
{
if (!strcmp(p->name, method))
break;
}
return p->method;
}
int wifi_start_ap(const char *ssid, const char *password)
{
wifi_config_t wifi_config = { .ap = { .max_connection = 1 } };
strncpy((char *)wifi_config.ap.ssid, ssid, 32);
if (password)
{
strncpy((char *)wifi_config.sta.password, password, 64);
wifi_config.ap.authmode = WIFI_AUTH_WPA2_PSK;
}
else
wifi_config.ap.authmode = WIFI_AUTH_OPEN;
esp_netif_create_default_wifi_ap();
ESP_ERROR_CHECK(esp_wifi_set_mode(WIFI_MODE_AP));
ESP_ERROR_CHECK(esp_wifi_set_config(ESP_IF_WIFI_AP, &wifi_config));
ESP_ERROR_CHECK(esp_wifi_start());
return 0;
}
int wifi_connect(const char *ssid, const char *password,
eap_method_t eap_method, const char *eap_identity,
const char *eap_username, const char *eap_password,
const char *ca_cert, const char *client_cert, const char *client_key)
{
wifi_config_t wifi_config = {};
strncpy((char *)wifi_config.sta.ssid, ssid, 32);
if (password)
strncpy((char *)wifi_config.sta.password, password, 64);
ESP_ERROR_CHECK(esp_wifi_set_mode(WIFI_MODE_STA));
ESP_ERROR_CHECK(esp_wifi_set_config(ESP_IF_WIFI_STA, &wifi_config));
if (eap_method)
{
if (ca_cert)
{
ESP_ERROR_CHECK(esp_wifi_sta_wpa2_ent_set_ca_cert((uint8_t *)ca_cert,
strlen(ca_cert)));
}
if (client_cert)
{
ESP_ERROR_CHECK(esp_wifi_sta_wpa2_ent_set_cert_key((uint8_t *)client_cert,
strlen(client_cert), (uint8_t *)client_key,
client_key ? strlen(client_key) : 0, NULL, 0));
}
if (eap_identity)
{
ESP_ERROR_CHECK(esp_wifi_sta_wpa2_ent_set_identity((uint8_t *)eap_identity,
strlen(eap_identity)));
}
if (eap_method == EAP_PEAP || eap_method == EAP_TTLS)
{
if (eap_username || eap_password)
{
ESP_ERROR_CHECK(esp_wifi_sta_wpa2_ent_set_username((uint8_t *)eap_username,
strlen(eap_username)));
ESP_ERROR_CHECK(esp_wifi_sta_wpa2_ent_set_password((uint8_t *)eap_password,
strlen(eap_password)));
}
else
{
ESP_LOGE(TAG, "Username and password are required for "
"Tunneled TLS or Protected EAP");
}
}
ESP_ERROR_CHECK(esp_wifi_sta_wpa2_ent_enable());
}
ESP_LOGI(TAG, "Connecting to SSID %s", wifi_config.sta.ssid);
ESP_ERROR_CHECK(esp_wifi_start());
ESP_ERROR_CHECK(esp_wifi_set_max_tx_power(78));
return 0;
}
int wifi_reconnect(void)
{
return esp_wifi_disconnect();
}
int wifi_initialize(void)
{
ESP_LOGD(TAG, "Initializing WiFi station");
ESP_ERROR_CHECK(esp_netif_init());
ESP_ERROR_CHECK(esp_event_loop_create_default());
esp_netif_create_default_wifi_sta();
wifi_init_config_t cfg = WIFI_INIT_CONFIG_DEFAULT();
ESP_ERROR_CHECK(esp_wifi_init(&cfg));
ESP_ERROR_CHECK(esp_wifi_set_storage(WIFI_STORAGE_RAM));
ESP_ERROR_CHECK(esp_event_handler_register(WIFI_EVENT, ESP_EVENT_ANY_ID, &event_handler, NULL));
ESP_ERROR_CHECK(esp_event_handler_register(IP_EVENT, ESP_EVENT_ANY_ID, &event_handler, NULL));
return 0;
}
| 3,168 |
746 | <reponame>VishalS711/protege
package org.protege.editor.owl.ui.deprecation;
import com.google.common.base.Optional;
import org.protege.editor.core.ui.preferences.PreferencesLayoutPanel;
import org.protege.editor.owl.OWLEditorKit;
import org.protege.editor.owl.model.deprecation.DeprecationProfile;
import org.protege.editor.owl.ui.AbstractOWLWizardPanel;
import org.semanticweb.owlapi.model.IRI;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.swing.*;
import java.awt.*;
import java.util.ArrayList;
import java.util.List;
/**
* <NAME>
* Stanford Center for Biomedical Informatics Research
* 29 Aug 2017
*/
public class DeprecationProfilePage extends AbstractOWLWizardPanel {
public static final String ID = "DeprecationProfilePage";
@Nonnull
private final DeprecateEntityWizardState wizardState;
private final JComboBox<DeprecationProfile> profilesCombo;
private final JLabel descriptionLabel = new JLabel();
private final List<DeprecationProfile> deprecationProfiles = new ArrayList<>();
public DeprecationProfilePage(@Nonnull OWLEditorKit owlEditorKit,
@Nonnull DeprecateEntityWizardState wizardState,
@Nonnull List<DeprecationProfile> deprecationProfiles) {
super(ID, "Deprecation Profile", owlEditorKit);
this.deprecationProfiles.addAll(deprecationProfiles);
this.wizardState = wizardState;
setInstructions("<b>Please choose a deprecation profile</b>");
profilesCombo = new JComboBox<>(deprecationProfiles.toArray(new DeprecationProfile [deprecationProfiles.size()]));
profilesCombo.setRenderer(new DeprecationProfileRenderer());
profilesCombo.addActionListener(e -> setDescription());
JPanel content = new JPanel(new BorderLayout(7, 7));
content.add(profilesCombo, BorderLayout.NORTH);
content.add(descriptionLabel, BorderLayout.SOUTH);
descriptionLabel.setBorder(BorderFactory.createEmptyBorder(0, 7, 0, 0));
JPanel holder = new JPanel(new BorderLayout());
holder.add(content, BorderLayout.NORTH);
setContent(holder);
}
private void setActiveSelected() {
deprecationProfiles.forEach(profile -> {
profile.getActivatedBy().ifPresent(activatedByIri -> {
Optional<IRI> ontologyIRI = getOWLModelManager().getActiveOntology().getOntologyID().getOntologyIRI();
if(ontologyIRI.isPresent()) {
if(ontologyIRI.get().equals(activatedByIri)) {
profilesCombo.setSelectedItem(profile);
setDescription();
}
}
});
});
setDescription();
}
private void setDescription() {
String description = ((DeprecationProfile) profilesCombo.getSelectedItem()).getDescription();
descriptionLabel.setText(String.format("<html><body>%s</body></html>",
description.replace("\n", "<br>")));
}
@Nullable
@Override
public Object getNextPanelDescriptor() {
return DeprecationReasonPage.ID;
}
@Override
public void aboutToDisplayPanel() {
setActiveSelected();
}
@Override
public void aboutToHidePanel() {
wizardState.setDeprecationProfile((DeprecationProfile) profilesCombo.getSelectedItem());
}
}
| 1,395 |
14,668 | // Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chromeos/components/cdm_factory_daemon/mojom/decrypt_config_mojom_traits.h"
#include "media/base/encryption_pattern.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace chromeos {
TEST(DecryptConfigStructTraitsTest, ConvertEncryptionPattern) {
auto input = media::EncryptionPattern(22, 42);
std::vector<uint8_t> data =
chromeos::cdm::mojom::EncryptionPattern::Serialize(&input);
media::EncryptionPattern output;
EXPECT_TRUE(chromeos::cdm::mojom::EncryptionPattern::Deserialize(
std::move(data), &output));
EXPECT_EQ(input.crypt_byte_block(), output.crypt_byte_block());
EXPECT_EQ(input.skip_byte_block(), output.skip_byte_block());
}
TEST(DecryptConfigStructTraitsTest, ConvertSubsampleEntry) {
auto input = media::SubsampleEntry(22, 42);
std::vector<uint8_t> data =
chromeos::cdm::mojom::SubsampleEntry::Serialize(&input);
media::SubsampleEntry output;
EXPECT_TRUE(chromeos::cdm::mojom::SubsampleEntry::Deserialize(std::move(data),
&output));
EXPECT_EQ(input.clear_bytes, output.clear_bytes);
EXPECT_EQ(input.cypher_bytes, output.cypher_bytes);
}
TEST(DecryptConfigStructTraitsTest, ConvertDecryptConfig) {
std::unique_ptr<media::DecryptConfig> input =
std::make_unique<media::DecryptConfig>(
media::EncryptionScheme::kCbcs, "FAKEKEY",
std::string(media::DecryptConfig::kDecryptionKeySize, '1'),
std::vector<media::SubsampleEntry>({media::SubsampleEntry(1, 3)}),
absl::make_optional<media::EncryptionPattern>(22, 42));
std::vector<uint8_t> data =
chromeos::cdm::mojom::DecryptConfig::Serialize(&input);
std::unique_ptr<media::DecryptConfig> output;
EXPECT_TRUE(chromeos::cdm::mojom::DecryptConfig::Deserialize(std::move(data),
&output));
EXPECT_EQ(input->encryption_scheme(), output->encryption_scheme());
EXPECT_EQ(input->key_id(), output->key_id());
EXPECT_EQ(input->iv(), output->iv());
EXPECT_EQ(input->subsamples().size(), output->subsamples().size());
EXPECT_EQ(input->subsamples()[0].clear_bytes,
output->subsamples()[0].clear_bytes);
EXPECT_EQ(input->subsamples()[0].cypher_bytes,
output->subsamples()[0].cypher_bytes);
EXPECT_EQ(input->encryption_pattern(), output->encryption_pattern());
}
} // namespace chromeos | 1,065 |
5,169 | <gh_stars>1000+
{
"name": "LPKeyboard",
"version": "1.0",
"summary": "A short description of LPKeyboard.",
"description": "自定义组件:键盘,实现自定义键盘",
"homepage": "https://github.com/l819183457",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"authors": {
"ziqiang.liang": "<EMAIL>"
},
"platforms": {
"ios": "7.0"
},
"source": {
"git": "https://github.com/l819183457/LPKeyboard.git",
"tag": "v1.0"
},
"source_files": "keyboard/*.{h,m,png}",
"exclude_files": "Classes/Exclude"
}
| 273 |
9,959 | // version 14:
public record Record<T>(T field) implements Cloneable {
} | 22 |
353 | <filename>searchbackend/simhashsearchindex_test.cpp
// Copyright 2017 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "gtest/gtest.h"
#include "searchbackend/simhashsearchindex.hpp"
TEST(simhashsearchindex, initialize) {
// Run the constructor.
{
SimHashSearchIndex index("./testindex.index", true, 28);
}
// Should've run the destructor.
EXPECT_EQ(unlink("./testindex.index"), 0);
}
TEST(simhashsearchindex, addfunction) {
SimHashSearchIndex index("./testindex.index", true, 28);
index.AddFunction(0xDEADBEEF0BADBABE, 0x0BADFEEDBA551055, 0x1,
0x400000);
EXPECT_EQ(index.GetIndexSetSize(), 28);
EXPECT_EQ(unlink("./testindex.index"), 0);
}
TEST(simhashsearchindex, persistence) {
{
SimHashSearchIndex index("./testindex.index", true, 28);
index.AddFunction(0xDEADBEEF0BADBABE, 0x0BADFEEDBA551055, 0x1,
0x400000);
EXPECT_EQ(index.GetIndexSetSize(), 28);
}
SimHashSearchIndex index2("./testindex.index", false, 28);
EXPECT_EQ(index2.GetIndexSetSize(), 28);
EXPECT_EQ(unlink("./testindex.index"), 0);
}
TEST(simhashsearchindex, querytopn_precise) {
SimHashSearchIndex index("./testindex.index", true, 28);
std::array<uint64_t, 12> constantarray = {
0xba5eba11bedabb1eUL,
0xbe5077edb0a710adUL,
0xb01dfacecab005e0UL,
0xca11ab1eca55e77eUL,
0xdeadbea700defec8UL,
0xf01dab1ef005ba11UL,
0x0ddba115ca1ab1e0UL,
0x7e1eca57deadbeefUL,
0xca5cadab1ef00d50UL,
0x0b501e7edecea5edUL,
0x7e55e118df00d500UL,
0x0e1ec7edba11a575UL };
for (uint32_t i = 0; i < constantarray.size()-1; ++i) {
index.AddFunction(constantarray[i], constantarray[i+1],
static_cast<uint64_t>(i), static_cast<uint64_t>(i));
}
EXPECT_EQ(index.GetIndexSetSize(), 28 * (constantarray.size()-1));
for (uint32_t i = 0; i < constantarray.size()-1; ++i) {
uint64_t hash_a = constantarray[i];
uint64_t hash_b = constantarray[i+1];
std::vector<std::pair<float, SimHashSearchIndex::FileAndAddress>> results;
index.QueryTopN(hash_a, hash_b, 5, &results);
EXPECT_EQ(results[0].second.first, i);
}
EXPECT_EQ(unlink("./testindex.index"), 0);
}
TEST(simhashsearchindex, querytopn) {
SimHashSearchIndex index("./testindex.index", true, 28);
std::array<uint64_t, 12> constantarray = {
0xba5eba11bedabb1eUL,
0xbe5077edb0a710adUL,
0xb01dfacecab005e0UL,
0xca11ab1eca55e77eUL,
0xdeadbea700defec8UL,
0xf01dab1ef005ba11UL,
0x0ddba115ca1ab1e0UL,
0x7e1eca57deadbeefUL,
0xca5cadab1ef00d50UL,
0x0b501e7edecea5edUL,
0x7e55e118df00d500UL,
0x0e1ec7edba11a575UL };
for (uint32_t i = 0; i < constantarray.size()-1; ++i) {
index.AddFunction(constantarray[i], constantarray[i+1],
static_cast<uint64_t>(i), static_cast<uint64_t>(i));
}
EXPECT_EQ(index.GetIndexSetSize(), 28 * (constantarray.size()-1));
std::array<uint64_t, 3> distortions = {
0x0180018001800180UL,
0x0101010101010101UL,
0x8080808080808080UL };
for (uint32_t i = 0; i < constantarray.size()-1; ++i) {
uint64_t hash_a = constantarray[i];
uint64_t hash_b = constantarray[i+1];
for (uint32_t distortion_index = 0; distortion_index < distortions.size();
++distortion_index) {
hash_a ^= distortions[distortion_index];
hash_b ^= distortions[distortion_index];
std::vector<std::pair<float, SimHashSearchIndex::FileAndAddress>> results;
index.QueryTopN(hash_a, hash_b, 5, &results);
EXPECT_EQ(results[0].second.first, i);
}
}
EXPECT_EQ(unlink("./testindex.index"), 0);
}
| 1,741 |
1,545 | <reponame>pkumar-singh/bookkeeper
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from bookkeeper.common import protobuf_helpers
from bookkeeper.proto import common_pb2
from bookkeeper.proto import kv_pb2
from google.protobuf import any_pb2
from google.protobuf import message
from google.protobuf import source_context_pb2
from google.protobuf import struct_pb2
from google.protobuf import timestamp_pb2
from google.protobuf import type_pb2
def test_from_any_pb_success():
in_message = common_pb2.Endpoint(port=5181)
in_message_any = any_pb2.Any()
in_message_any.Pack(in_message)
out_message =\
protobuf_helpers.from_any_pb(common_pb2.Endpoint, in_message_any)
assert in_message == out_message
def test_from_any_pb_failure():
in_message = any_pb2.Any()
in_message.Pack(common_pb2.Endpoint(port=5181))
with pytest.raises(TypeError):
protobuf_helpers.from_any_pb(kv_pb2.KeyValue, in_message)
def test_check_protobuf_helpers_ok():
assert protobuf_helpers.check_oneof() is None
assert protobuf_helpers.check_oneof(foo='bar') is None
assert protobuf_helpers.check_oneof(foo='bar', baz=None) is None
assert protobuf_helpers.check_oneof(foo=None, baz='bacon') is None
assert (protobuf_helpers.check_oneof(foo='bar', spam=None, eggs=None)
is None)
def test_check_protobuf_helpers_failures():
with pytest.raises(ValueError):
protobuf_helpers.check_oneof(foo='bar', spam='eggs')
with pytest.raises(ValueError):
protobuf_helpers.check_oneof(foo='bar', baz='bacon', spam='eggs')
with pytest.raises(ValueError):
protobuf_helpers.check_oneof(foo='bar', spam=0, eggs=None)
def test_get_messages():
kv = protobuf_helpers.get_messages(kv_pb2)
# Ensure that Date was exported properly.
assert kv['KeyValue'] is kv_pb2.KeyValue
# Ensure that no non-Message objects were exported.
for value in kv.values():
assert issubclass(value, message.Message)
def test_get_dict_absent():
with pytest.raises(KeyError):
assert protobuf_helpers.get({}, 'foo')
def test_get_dict_present():
assert protobuf_helpers.get({'foo': 'bar'}, 'foo') == 'bar'
def test_get_dict_default():
assert protobuf_helpers.get({}, 'foo', default='bar') == 'bar'
def test_get_dict_nested():
assert protobuf_helpers.get({'foo': {'bar': 'baz'}}, 'foo.bar') == 'baz'
def test_get_dict_nested_default():
assert protobuf_helpers.get({}, 'foo.baz', default='bacon') == 'bacon'
assert (
protobuf_helpers.get({'foo': {}}, 'foo.baz', default='bacon') ==
'bacon')
def test_get_msg_sentinel():
msg = timestamp_pb2.Timestamp()
with pytest.raises(KeyError):
assert protobuf_helpers.get(msg, 'foo')
def test_get_msg_present():
msg = timestamp_pb2.Timestamp(seconds=42)
assert protobuf_helpers.get(msg, 'seconds') == 42
def test_get_msg_default():
msg = timestamp_pb2.Timestamp()
assert protobuf_helpers.get(msg, 'foo', default='bar') == 'bar'
def test_invalid_object():
with pytest.raises(TypeError):
protobuf_helpers.get(object(), 'foo', 'bar')
def test_set_dict():
mapping = {}
protobuf_helpers.set(mapping, 'foo', 'bar')
assert mapping == {'foo': 'bar'}
def test_set_msg():
msg = timestamp_pb2.Timestamp()
protobuf_helpers.set(msg, 'seconds', 42)
assert msg.seconds == 42
def test_set_dict_nested():
mapping = {}
protobuf_helpers.set(mapping, 'foo.bar', 'baz')
assert mapping == {'foo': {'bar': 'baz'}}
def test_set_invalid_object():
with pytest.raises(TypeError):
protobuf_helpers.set(object(), 'foo', 'bar')
def test_setdefault_dict_unset():
mapping = {}
protobuf_helpers.setdefault(mapping, 'foo', 'bar')
assert mapping == {'foo': 'bar'}
def test_setdefault_dict_falsy():
mapping = {'foo': None}
protobuf_helpers.setdefault(mapping, 'foo', 'bar')
assert mapping == {'foo': 'bar'}
def test_setdefault_dict_truthy():
mapping = {'foo': 'bar'}
protobuf_helpers.setdefault(mapping, 'foo', 'baz')
assert mapping == {'foo': 'bar'}
def test_field_mask_singular_field_diffs():
original = type_pb2.Type(name='name')
modified = type_pb2.Type()
assert (protobuf_helpers.field_mask(original, modified).paths ==
['name'])
original = type_pb2.Type(name='name')
modified = type_pb2.Type()
assert (protobuf_helpers.field_mask(original, modified).paths ==
['name'])
original = None
modified = type_pb2.Type(name='name')
assert (protobuf_helpers.field_mask(original, modified).paths ==
['name'])
original = type_pb2.Type(name='name')
modified = None
assert (protobuf_helpers.field_mask(original, modified).paths ==
['name'])
def test_field_mask_message_diffs():
original = type_pb2.Type()
modified = type_pb2.Type(source_context=source_context_pb2.SourceContext(
file_name='name'))
assert (protobuf_helpers.field_mask(original, modified).paths ==
['source_context.file_name'])
original = type_pb2.Type(source_context=source_context_pb2.SourceContext(
file_name='name'))
modified = type_pb2.Type()
assert (protobuf_helpers.field_mask(original, modified).paths ==
['source_context'])
original = type_pb2.Type(source_context=source_context_pb2.SourceContext(
file_name='name'))
modified = type_pb2.Type(source_context=source_context_pb2.SourceContext(
file_name='other_name'))
assert (protobuf_helpers.field_mask(original, modified).paths ==
['source_context.file_name'])
original = None
modified = type_pb2.Type(source_context=source_context_pb2.SourceContext(
file_name='name'))
assert (protobuf_helpers.field_mask(original, modified).paths ==
['source_context.file_name'])
original = type_pb2.Type(source_context=source_context_pb2.SourceContext(
file_name='name'))
modified = None
assert (protobuf_helpers.field_mask(original, modified).paths ==
['source_context'])
def test_field_mask_repeated_diffs():
original = struct_pb2.ListValue()
modified = struct_pb2.ListValue(values=[struct_pb2.Value(number_value=1.0),
struct_pb2.Value(number_value=2.0)])
assert protobuf_helpers.field_mask(original, modified).paths == ['values']
original = struct_pb2.ListValue(values=[struct_pb2.Value(number_value=1.0),
struct_pb2.Value(number_value=2.0)])
modified = struct_pb2.ListValue()
assert protobuf_helpers.field_mask(original, modified).paths == ['values']
original = None
modified = struct_pb2.ListValue(values=[struct_pb2.Value(number_value=1.0),
struct_pb2.Value(number_value=2.0)])
assert protobuf_helpers.field_mask(original, modified).paths == ['values']
original = struct_pb2.ListValue(values=[struct_pb2.Value(number_value=1.0),
struct_pb2.Value(number_value=2.0)])
modified = None
assert protobuf_helpers.field_mask(original, modified).paths == ['values']
original = struct_pb2.ListValue(values=[struct_pb2.Value(number_value=1.0),
struct_pb2.Value(number_value=2.0)])
modified = struct_pb2.ListValue(values=[struct_pb2.Value(number_value=2.0),
struct_pb2.Value(number_value=1.0)])
assert protobuf_helpers.field_mask(original, modified).paths == ['values']
def test_field_mask_map_diffs():
original = struct_pb2.Struct()
modified = struct_pb2.Struct(
fields={'foo': struct_pb2.Value(number_value=1.0)})
assert protobuf_helpers.field_mask(original, modified).paths == ['fields']
original = struct_pb2.Struct(
fields={'foo': struct_pb2.Value(number_value=1.0)})
modified = struct_pb2.Struct()
assert protobuf_helpers.field_mask(original, modified).paths == ['fields']
original = None
modified = struct_pb2.Struct(
fields={'foo': struct_pb2.Value(number_value=1.0)})
assert protobuf_helpers.field_mask(original, modified).paths == ['fields']
original = struct_pb2.Struct(
fields={'foo': struct_pb2.Value(number_value=1.0)})
modified = None
assert protobuf_helpers.field_mask(original, modified).paths == ['fields']
original = struct_pb2.Struct(
fields={'foo': struct_pb2.Value(number_value=1.0)})
modified = struct_pb2.Struct(
fields={'foo': struct_pb2.Value(number_value=2.0)})
assert protobuf_helpers.field_mask(original, modified).paths == ['fields']
original = struct_pb2.Struct(
fields={'foo': struct_pb2.Value(number_value=1.0)})
modified = struct_pb2.Struct(
fields={'bar': struct_pb2.Value(number_value=1.0)})
assert protobuf_helpers.field_mask(original, modified).paths == ['fields']
| 4,018 |
5,169 | {
"name": "IVBaseKit",
"version": "0.1.0",
"summary": "Base class used in iwown iOS developer team",
"license": "MIT",
"authors": {
"xuezou": "<EMAIL>"
},
"homepage": "https://github.com/xuezou/IVBaseKit",
"source": {
"git": "https://github.com/xuezou/IVBaseKit.git",
"tag": "0.1.0"
},
"platforms": {
"ios": "8.0"
},
"frameworks": "Foundation",
"vendored_frameworks": "IVBaseKit/IVBaseKit.framework",
"requires_arc": true
}
| 202 |
2,360 | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyAgateDbf(PythonPackage):
"""agate-dbf adds read support for dbf files to agate."""
homepage = "https://agate-dbf.readthedocs.io/en/latest/"
pypi = "agate-dbf/agate-dbf-0.2.1.tar.gz"
version('0.2.1', sha256='00c93c498ec9a04cc587bf63dd7340e67e2541f0df4c9a7259d7cb3dd4ce372f')
depends_on('py-setuptools', type='build')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
| 290 |
2,329 | package annos;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@Retention(RetentionPolicy.RUNTIME)
public @interface SimpleAnnotation2 {
String value() default "hello";
int number() default 42;
}
| 73 |
5,169 | {
"name": "SouthgisLogger",
"version": "0.0.3",
"summary": "Southgis Logger Utils.It is monitoring APP crash reason ,and User operation.",
"description": " A longer description of SouthgisLogger in Markdown format.\n\n * Think: Why did you write this? What is the focus? What does it do?\n * CocoaPods will be using this to generate tags, and improve search results.\n * Try to keep it short, snappy and to the point.\n * Finally, don't worry about the indent, CocoaPods strips it!\n",
"homepage": "https://github.com/crash-wu/SouthgisLogger",
"license": {
"type": "MIT",
"file": "FILE_LICENSE"
},
"authors": {
"crash_wu": "<EMAIL>"
},
"platforms": {
"ios": "7.0"
},
"source": {
"git": "https://github.com/crash-wu/SouthgisLogger.git",
"tag": "0.0.3"
},
"source_files": [
"TianDituFramework/SouthgisLogger",
"*.{h,m}"
],
"requires_arc": true,
"dependencies": {
"YYModel": [
],
"YTKKeyValueStore": [
],
"FMDB": [
]
}
}
| 502 |
309 | <reponame>Mu-L/EFAK
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.smartloli.kafka.eagle.web.service.impl;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.smartloli.kafka.eagle.common.protocol.ConsumerInfo;
import org.smartloli.kafka.eagle.common.protocol.DisplayInfo;
import org.smartloli.kafka.eagle.common.protocol.OwnerInfo;
import org.smartloli.kafka.eagle.common.protocol.TopicConsumerInfo;
import org.smartloli.kafka.eagle.common.protocol.consumer.ConsumerGroupsInfo;
import org.smartloli.kafka.eagle.common.protocol.consumer.ConsumerSummaryInfo;
import org.smartloli.kafka.eagle.common.protocol.topic.TopicOffsetsInfo;
import org.smartloli.kafka.eagle.common.protocol.topic.TopicSummaryInfo;
import org.smartloli.kafka.eagle.common.util.KConstants;
import org.smartloli.kafka.eagle.common.util.KConstants.D3;
import org.smartloli.kafka.eagle.common.util.KConstants.Topic;
import org.smartloli.kafka.eagle.common.util.StrUtils;
import org.smartloli.kafka.eagle.core.factory.KafkaFactory;
import org.smartloli.kafka.eagle.core.factory.KafkaService;
import org.smartloli.kafka.eagle.web.dao.MBeanDao;
import org.smartloli.kafka.eagle.web.dao.TopicDao;
import org.smartloli.kafka.eagle.web.service.ConsumerService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.*;
import java.util.Map.Entry;
/**
* Kafka consumer data interface, and set up the return data set.
*
* @author smartloli.
* <p>
* Created by Aug 15, 2016.
* <p>
* Update by hexiang 20170216
*/
@Service
public class ConsumerServiceImpl implements ConsumerService {
@Autowired
private MBeanDao mbeanDao;
@Autowired
private TopicDao topicDao;
/**
* Kafka service interface.
*/
private KafkaService kafkaService = new KafkaFactory().create();
/**
* Get active topic graph data from kafka cluster.
*/
public String getActiveGraph(String clusterAlias) {
JSONObject target = new JSONObject();
target.put("active", getActiveGraphDatasets(clusterAlias));
return target.toJSONString();
}
/**
* Get active graph from zookeeper.
*/
private String getActiveGraphDatasets(String clusterAlias) {
Map<String, List<String>> activeTopics = kafkaService.getActiveTopic(clusterAlias);
JSONObject target = new JSONObject();
JSONArray targets = new JSONArray();
target.put("name", "Active Topics");
int count = 0;
for (Entry<String, List<String>> entry : activeTopics.entrySet()) {
JSONObject subTarget = new JSONObject();
JSONArray subTargets = new JSONArray();
if (count > KConstants.D3.SIZE) {
subTarget.put("name", "...");
JSONObject subInSubTarget = new JSONObject();
subInSubTarget.put("name", "...");
subTargets.add(subInSubTarget);
subTarget.put("children", subTargets);
targets.add(subTarget);
break;
} else {
subTarget.put("name", entry.getKey());
for (String str : entry.getValue()) {
JSONObject subInSubTarget = new JSONObject();
if (subTargets.size() > D3.CHILD_SIZE) {
subInSubTarget.put("name", "...");
subTargets.add(subInSubTarget);
break;
} else {
subInSubTarget.put("name", str);
subTargets.add(subInSubTarget);
}
}
}
count++;
subTarget.put("children", subTargets);
targets.add(subTarget);
}
target.put("children", targets);
return target.toJSONString();
}
/**
* Get kafka active number & storage offset in zookeeper.
*/
private int getActiveNumber(String clusterAlias, String group, List<String> topics) {
Map<String, List<String>> activeTopics = kafkaService.getActiveTopic(clusterAlias);
int sum = 0;
for (String topic : topics) {
if (activeTopics.containsKey(group + "_" + topic)) {
sum++;
}
}
return sum;
}
/**
* Storage offset in kafka or zookeeper.
*/
public String getActiveTopic(String clusterAlias, String formatter) {
if ("kafka".equals(formatter)) {
return getKafkaActiveTopic(clusterAlias);
} else {
return getActiveGraph(clusterAlias);
}
}
/**
* Judge consumers storage offset in kafka or zookeeper.
*/
public String getConsumer(String clusterAlias, String formatter, DisplayInfo page) {
if ("kafka".equals(formatter)) {
return getKafkaConsumer(page, clusterAlias);
} else {
// remove old kafka metadata
return "";
}
}
/**
* Get consumer size from kafka topic.
*/
public int getConsumerCount(String clusterAlias, String formatter) {
if ("kafka".equals(formatter)) {
return kafkaService.getKafkaConsumerGroups(clusterAlias);
} else {
return kafkaService.getConsumers(clusterAlias).size();
}
}
/**
* List the name of the topic in the consumer detail information.
*/
private String getConsumerDetail(String clusterAlias, String group, String search) {
Map<String, List<String>> consumers = kafkaService.getConsumers(clusterAlias);
Map<String, List<String>> actvTopics = kafkaService.getActiveTopic(clusterAlias);
List<TopicConsumerInfo> kafkaConsumerDetails = new ArrayList<TopicConsumerInfo>();
int id = 0;
for (String topic : consumers.get(group)) {
if (StrUtils.isNull(search)) {
TopicConsumerInfo consumerDetail = new TopicConsumerInfo();
consumerDetail.setId(++id);
consumerDetail.setTopic(topic);
if (actvTopics.containsKey(group + "_" + topic)) {
consumerDetail.setConsumering(Topic.RUNNING);
} else {
consumerDetail.setConsumering(Topic.SHUTDOWN);
}
kafkaConsumerDetails.add(consumerDetail);
} else {
if (search.contains(topic) || topic.contains(search)) {
TopicConsumerInfo consumerDetail = new TopicConsumerInfo();
consumerDetail.setId(++id);
consumerDetail.setTopic(topic);
if (actvTopics.containsKey(group + "_" + topic)) {
consumerDetail.setConsumering(Topic.RUNNING);
} else {
consumerDetail.setConsumering(Topic.SHUTDOWN);
}
kafkaConsumerDetails.add(consumerDetail);
}
}
}
return kafkaConsumerDetails.toString();
}
/**
* Judge consumer storage offset in kafka or zookeeper.
*/
public String getConsumerDetail(String clusterAlias, String formatter, String group, String search) {
if ("kafka".equals(formatter)) {
return getKafkaConsumerDetail(clusterAlias, group, search);
} else {
return getConsumerDetail(clusterAlias, group, search);
}
}
/**
* Get active grahp data & storage offset in kafka topic.
*/
private Object getKafkaActive(String clusterAlias) {
JSONArray consumerGroups = JSON.parseArray(kafkaService.getKafkaConsumer(clusterAlias));
JSONObject target = new JSONObject();
JSONArray targets = new JSONArray();
target.put("name", "Active Topics");
int count = 0;
for (Object object : consumerGroups) {
JSONObject consumerGroup = (JSONObject) object;
JSONObject subTarget = new JSONObject();
JSONArray subTargets = new JSONArray();
if (count > KConstants.D3.SIZE) {
subTarget.put("name", "...");
JSONObject subInSubTarget = new JSONObject();
subInSubTarget.put("name", "...");
subTargets.add(subInSubTarget);
subTarget.put("children", subTargets);
targets.add(subTarget);
break;
} else {
subTarget.put("name", consumerGroup.getString("group"));
for (String str : getKafkaTopicSets(clusterAlias, consumerGroup.getString("group"))) {
JSONObject subInSubTarget = new JSONObject();
if (subTargets.size() > D3.CHILD_SIZE) {
subInSubTarget.put("name", "...");
subTargets.add(subInSubTarget);
break;
} else {
subInSubTarget.put("name", str);
subTargets.add(subInSubTarget);
}
}
}
count++;
subTarget.put("children", subTargets);
targets.add(subTarget);
}
target.put("children", targets);
return target.toJSONString();
}
/**
* Get active topic from kafka cluster & storage offset in kafka topic.
*/
private String getKafkaActiveTopic(String clusterAlias) {
JSONObject target = new JSONObject();
target.put("active", getKafkaActive(clusterAlias));
return target.toJSONString();
}
/**
* Get kafka consumer & storage offset in kafka topic.
*/
private String getKafkaConsumer(DisplayInfo page, String clusterAlias) {
List<ConsumerInfo> kafkaConsumerPages = new ArrayList<ConsumerInfo>();
JSONArray consumerGroups = JSON.parseArray(kafkaService.getKafkaConsumer(clusterAlias, page));
int id = page.getiDisplayStart();
for (Object object : consumerGroups) {
JSONObject consumerGroup = (JSONObject) object;
String group = consumerGroup.getString("group");
ConsumerInfo consumer = new ConsumerInfo();
consumer.setGroup(group);
consumer.setId(++id);
consumer.setNode(consumerGroup.getString("node"));
OwnerInfo ownerInfo = kafkaService.getKafkaActiverNotOwners(clusterAlias, group);
consumer.setTopics(ownerInfo.getTopicSets().size());
consumer.setActiveTopics(getKafkaActiveTopicNumbers(clusterAlias, group));
consumer.setActiveThreads(ownerInfo.getActiveSize());
kafkaConsumerPages.add(consumer);
}
return kafkaConsumerPages.toString();
}
/**
* Get kafka active topic by active graph.
*/
private Set<String> getKafkaTopicSets(String clusterAlias, String group) {
Set<String> consumerTopics = kafkaService.getKafkaConsumerTopic(clusterAlias, group);
Set<String> activerTopics = kafkaService.getKafkaActiverTopics(clusterAlias, group);
for (String topic : consumerTopics) {
if (isConsumering(clusterAlias, group, topic) == Topic.RUNNING) {
activerTopics.add(topic);
}
}
Set<String> activeTopicSets = new HashSet<>();
for (String topic : consumerTopics) {
if (activerTopics.contains(topic)) {
activeTopicSets.add(topic);
} else {
if (isConsumering(clusterAlias, group, topic) == Topic.RUNNING) {
activeTopicSets.add(topic);
}
}
}
return activeTopicSets;
}
/**
* Get kafka active topic total.
*/
private int getKafkaActiveTopicNumbers(String clusterAlias, String group) {
Set<String> consumerTopics = kafkaService.getKafkaConsumerTopic(clusterAlias, group);
Set<String> activerTopics = kafkaService.getKafkaActiverTopics(clusterAlias, group);
for (String topic : consumerTopics) {
if (isConsumering(clusterAlias, group, topic) == Topic.RUNNING) {
activerTopics.add(topic);
}
}
int active = 0;
for (String topic : consumerTopics) {
if (activerTopics.contains(topic)) {
active++;
} else {
if (isConsumering(clusterAlias, group, topic) == Topic.RUNNING) {
active++;
}
}
}
return active;
}
/**
* Get consumer detail from kafka topic.
*/
private String getKafkaConsumerDetail(String clusterAlias, String group, String search) {
Set<String> consumerTopics = kafkaService.getKafkaConsumerTopic(clusterAlias, group);
Set<String> activerTopics = kafkaService.getKafkaActiverTopics(clusterAlias, group);
for (String topic : consumerTopics) {
if (isConsumering(clusterAlias, group, topic) == Topic.RUNNING) {
activerTopics.add(topic);
}
}
List<TopicConsumerInfo> kafkaConsumerPages = new ArrayList<TopicConsumerInfo>();
int id = 0;
for (String topic : consumerTopics) {
if (StrUtils.isNull(search)) {
TopicConsumerInfo consumerDetail = new TopicConsumerInfo();
consumerDetail.setId(++id);
consumerDetail.setTopic(topic);
if (activerTopics.contains(topic)) {
consumerDetail.setConsumering(Topic.RUNNING);
} else {
consumerDetail.setConsumering(isConsumering(clusterAlias, group, topic));
}
kafkaConsumerPages.add(consumerDetail);
} else {
if (search.contains(topic) || topic.contains(search)) {
TopicConsumerInfo consumerDetail = new TopicConsumerInfo();
consumerDetail.setId(++id);
consumerDetail.setTopic(topic);
if (activerTopics.contains(topic)) {
consumerDetail.setConsumering(Topic.RUNNING);
} else {
consumerDetail.setConsumering(isConsumering(clusterAlias, group, topic));
}
kafkaConsumerPages.add(consumerDetail);
}
}
}
return kafkaConsumerPages.toString();
}
/**
* Check if the application is consuming.
*/
public int isConsumering(String clusterAlias, String group, String topic) {
Map<String, Object> params = new HashMap<String, Object>();
params.put("cluster", clusterAlias);
params.put("group", group);
params.put("topic", topic);
List<TopicOffsetsInfo> topicOffsets = mbeanDao.getConsumerRateTopic(params);
if (topicOffsets.size() == 2) {
try {
long resultOffsets = Math.abs(Long.parseLong(topicOffsets.get(0).getOffsets()) - Long.parseLong(topicOffsets.get(1).getOffsets()));
long resultLogSize = Math.abs(Long.parseLong(topicOffsets.get(0).getLogsize()) - Long.parseLong(topicOffsets.get(0).getOffsets()));
/**
* offset equal offset,maybe producer rate equal consumer rate.
*/
if (resultOffsets == 0) {
/**
* logsize equal offsets,follow two states.<br>
* 1. maybe application shutdown.<br>
* 2. maybe application run, but producer rate equal
* consumer rate.<br>
*/
if (resultLogSize == 0) {
return Topic.PENDING;
} else {
return Topic.SHUTDOWN;
}
} else {
return Topic.RUNNING;
}
} catch (Exception e) {
e.printStackTrace();
}
} else if (topicOffsets.size() == 1) {
long resultLogSize = Math.abs(Long.parseLong(topicOffsets.get(0).getLogsize()) - Long.parseLong(topicOffsets.get(0).getOffsets()));
if (resultLogSize == 0) {
return Topic.PENDING;
} else {
return Topic.SHUTDOWN;
}
}
return Topic.SHUTDOWN;
}
@Override
public long countConsumerGroupPages(Map<String, Object> params) {
return topicDao.countConsumerGroupPages(params);
}
@Override
public long countConsumerSummaryPages(Map<String, Object> params) {
return topicDao.countConsumerSummaryPages(params);
}
@Override
public List<ConsumerGroupsInfo> getConsumerGroupPages(String clusterAlias, String group, DisplayInfo page) {
Map<String, Object> params = new HashMap<>();
params.put("cluster", clusterAlias);
params.put("group", group);
params.put("start", page.getiDisplayStart());
params.put("size", page.getiDisplayLength());
params.put("search", page.getSearch());
return topicDao.getConsumerGroupPages(params);
}
@Override
public List<ConsumerSummaryInfo> getConsumerSummaryPages(String clusterAlias, DisplayInfo page) {
Map<String, Object> params = new HashMap<>();
params.put("cluster", clusterAlias);
params.put("start", page.getiDisplayStart());
params.put("size", page.getiDisplayLength());
params.put("search", page.getSearch());
return topicDao.getConsumerSummaryPages(params);
}
@Override
public String getKafkaConsumerGraph(String clusterAlias) {
Map<String, Object> params = new HashMap<>();
params.put("cluster", clusterAlias);
params.put("start", 0);
params.put("size", D3.SIZE + 1); // 10 + 1
List<ConsumerSummaryInfo> consumerSummarys = topicDao.getConsumerSummaryPages(params);
if (consumerSummarys != null) {
// {
// name : "Active Topics",
// children : targets
// }
JSONObject target = new JSONObject();
// targets :
// [
// {
// name : test-consumer-group2,
// children : [
// name : t1,
// name : t2
// ]
// },
// {
// name : consumer30,
// children : [
// name : t3,
// name : t4
// ]
// }
// ]
JSONArray targets = new JSONArray();
target.put("name", "Active Topics");
int count = 1;
/**
* cluster group topic_number coordinator active_topic active_thread_total
* cluster1 consumer30 1 cul-tourism-0008:8085 1 1
* cluster1 consumer4 1 cul-tourism-0008:8085 1 1
* cluster1 test-consumer-group2 2 cul-tourism-0008:8085 2 2
* cluster1 test-consumer-group5 1 cul-tourism-0008:8085 1 1
*/
for (ConsumerSummaryInfo consumerSummary : consumerSummarys) {
/**
* {
* name : consumer30,
* children : [
* name : t1,
* name : t2
* ]
* }
*/
JSONObject subTarget = new JSONObject();
JSONArray subTargets = new JSONArray();
if (count > KConstants.D3.SIZE) {
subTarget.put("name", "...");
JSONObject subInSubTarget = new JSONObject();
subInSubTarget.put("name", "...");
subTargets.add(subInSubTarget);
subTarget.put("children", subTargets);
targets.add(subTarget);
break;
} else {
subTarget.put("name", consumerSummary.getGroup());
Map<String, Object> paramChilds = new HashMap<>();
paramChilds.put("cluster", clusterAlias);
paramChilds.put("group", consumerSummary.getGroup());
paramChilds.put("start", 0);
paramChilds.put("size", D3.CHILD_SIZE + 1); // 5 + 1
paramChilds.put("status", "0");// running
/**
* cluster group topic status
* cluster1 test-consumer-group2 topic_1 0
* cluster1 test-consumer-group2 topic_2 0
*/
List<ConsumerGroupsInfo> consumerGroups = topicDao.getConsumerGroupPages(paramChilds);
int child = 1;
/**
* cluster group topic status
* cluster1 test-consumer-group2 topic_1 0
*/
for (ConsumerGroupsInfo consumerGroup : consumerGroups) {
JSONObject subInSubTarget = new JSONObject();
if (child > D3.CHILD_SIZE) {
subInSubTarget.put("name", "...");
subTargets.add(subInSubTarget);
break;
} else {
subInSubTarget.put("name", consumerGroup.getTopic());
subTargets.add(subInSubTarget);
}
child++;
}
}
count++;
subTarget.put("children", subTargets);
targets.add(subTarget);
}
target.put("children", targets);
// {
// active : target
// }
JSONObject result = new JSONObject();
result.put("active", target.toJSONString());
return result.toJSONString();
} else {
return "";
}
}
/**
* Get active topic : consumers .
* @param clusterAlias
* @return
*/
@Override
public String getKafkaTopicGraph(String clusterAlias) {
Map<String, Object> params = new HashMap<>();
params.put("cluster", clusterAlias);
params.put("start", 0);
params.put("size", D3.SIZE + 1); // 10 + 1
List<TopicSummaryInfo> topicSummarys = topicDao.getTopicSummaryPages(params);
if (topicSummarys != null) {
// {
// name : "Active Topics",
// children : targets
// }
JSONObject target = new JSONObject();
// targets :
// [
// {
// name : t1,
// children : [
// name : test-consumer-group2,
// name : consumer30
// ]
// },
// {
// name : t2,
// children : [
// name : consumer30,
// name : consumer2
// ]
// }
// ]
JSONArray targets = new JSONArray();
target.put("name", "Active Topics");
int count = 1;
/**
* cluster topic group_number active_group active_thread_total
* cluster1 topic_1 2 2 2
* cluster1 topic_2 3 3 3
*/
for (TopicSummaryInfo topicSummary : topicSummarys) {
/**
* {
* name : 2,
* children : [
* name : t1,
* name : t2
* ]
* }
*/
JSONObject subTarget = new JSONObject();
JSONArray subTargets = new JSONArray();
if (count > KConstants.D3.SIZE) {
subTarget.put("name", "...");
JSONObject subInSubTarget = new JSONObject();
subInSubTarget.put("name", "...");
subTargets.add(subInSubTarget);
subTarget.put("children", subTargets);
targets.add(subTarget);
break;
} else {
subTarget.put("name", topicSummary.getTopic());
Map<String, Object> paramChilds = new HashMap<>();
paramChilds.put("cluster", clusterAlias);
paramChilds.put("topic", topicSummary.getTopic());
paramChilds.put("start", 0);
paramChilds.put("size", D3.CHILD_SIZE + 1); // 5 + 1
paramChilds.put("status", "0");// running
/**
* cluster group topic status
* cluster1 test-consumer-group2 topic_1 0
* cluster1 test-consumer-group2 topic_2 0
*/
List<ConsumerGroupsInfo> consumerGroups = topicDao.getTopicPages(paramChilds);
int child = 1;
/**
* cluster group topic status
* cluster1 test-consumer-group2 topic_1 0
*/
for (ConsumerGroupsInfo consumerGroup : consumerGroups) {
JSONObject subInSubTarget = new JSONObject();
if (child > D3.CHILD_SIZE) {
subInSubTarget.put("name", "...");
subTargets.add(subInSubTarget);
break;
} else {
subInSubTarget.put("name", consumerGroup.getGroup());
subTargets.add(subInSubTarget);
}
child++;
}
}
count++;
subTarget.put("children", subTargets);
targets.add(subTarget);
}
target.put("children", targets);
// {
// active : target
// }
JSONObject result = new JSONObject();
result.put("active", target.toJSONString());
return result.toJSONString();
} else {
return "";
}
}
}
| 13,732 |
3,426 | <gh_stars>1000+
/*
* Copyright (c) 2019-2021 GeyserMC. http://geysermc.org
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
* @author GeyserMC
* @link https://github.com/GeyserMC/Geyser
*/
package org.geysermc.connector.utils;
import com.github.steveice10.mc.protocol.data.game.entity.metadata.Position;
import com.nukkitx.math.vector.Vector3i;
import com.nukkitx.nbt.NbtMap;
import com.nukkitx.protocol.bedrock.packet.BlockEntityDataPacket;
import it.unimi.dsi.fastutil.objects.ObjectArrayList;
import org.geysermc.connector.network.session.GeyserSession;
import org.geysermc.connector.network.translators.world.block.entity.BedrockOnlyBlockEntity;
import org.geysermc.connector.network.translators.world.block.entity.BlockEntityTranslator;
import org.geysermc.connector.network.translators.world.block.entity.FlowerPotBlockEntityTranslator;
import org.geysermc.connector.registry.Registries;
import javax.annotation.Nonnull;
import java.util.HashMap;
import java.util.Map;
public class BlockEntityUtils {
/**
* A list of all block entities that require the Java block state in order to fill out their block entity information.
* This list will be smaller with cache chunks on as we don't need to double-cache data
*/
public static final ObjectArrayList<BedrockOnlyBlockEntity> BEDROCK_ONLY_BLOCK_ENTITIES = new ObjectArrayList<>();
/**
* Contains a list of irregular block entity name translations that can't be fit into the regex
*/
public static final Map<String, String> BLOCK_ENTITY_TRANSLATIONS = new HashMap<String, String>() {
{
// Bedrock/Java differences
put("minecraft:enchanting_table", "EnchantTable");
put("minecraft:jigsaw", "JigsawBlock");
put("minecraft:piston_head", "PistonArm");
put("minecraft:trapped_chest", "Chest");
// There are some legacy IDs sent but as far as I can tell they are not needed for things to work properly
}
};
private static final BlockEntityTranslator EMPTY_TRANSLATOR = Registries.BLOCK_ENTITIES.get("Empty");
static {
// Seeing as there are only two - and, hopefully, will only ever be two - we can hardcode this
BEDROCK_ONLY_BLOCK_ENTITIES.add((BedrockOnlyBlockEntity) Registries.BLOCK_ENTITIES.get().get("Chest"));
BEDROCK_ONLY_BLOCK_ENTITIES.add(new FlowerPotBlockEntityTranslator());
}
public static String getBedrockBlockEntityId(String id) {
// These are the only exceptions when it comes to block entity ids
String value = BLOCK_ENTITY_TRANSLATIONS.get(id);
if (value != null) {
return value;
}
id = id.replace("minecraft:", "")
.replace("_", " ");
// Split at every space or capital letter - for the latter, some legacy Java block entity tags are the correct format already
String[] words;
if (!id.toUpperCase().equals(id)) { // Otherwise we get [S, K, U, L, L]
words = id.split("(?=[A-Z])| "); // Split at every space or note or before every capital letter
} else {
words = id.split(" ");
}
for (int i = 0; i < words.length; i++) {
words[i] = words[i].substring(0, 1).toUpperCase() + words[i].substring(1).toLowerCase();
}
return String.join("", words);
}
public static BlockEntityTranslator getBlockEntityTranslator(String name) {
BlockEntityTranslator blockEntityTranslator = Registries.BLOCK_ENTITIES.get(name);
if (blockEntityTranslator != null) {
return blockEntityTranslator;
}
return EMPTY_TRANSLATOR;
}
public static void updateBlockEntity(GeyserSession session, @Nonnull NbtMap blockEntity, Position position) {
updateBlockEntity(session, blockEntity, Vector3i.from(position.getX(), position.getY(), position.getZ()));
}
public static void updateBlockEntity(GeyserSession session, @Nonnull NbtMap blockEntity, Vector3i position) {
BlockEntityDataPacket blockEntityPacket = new BlockEntityDataPacket();
blockEntityPacket.setBlockPosition(position);
blockEntityPacket.setData(blockEntity);
session.sendUpstreamPacket(blockEntityPacket);
}
}
| 1,846 |
1,543 | <reponame>charllie/hibernate-types
package com.vladmihalcea.hibernate.type.json.internal;
import com.vladmihalcea.hibernate.type.model.BaseEntity;
import org.junit.Test;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.Objects;
import java.util.Set;
import static org.junit.Assert.assertTrue;
public class JsonTypeDescriptorTest {
/**
* If JSON serialization is used,
* the {@link JsonTypeDescriptor#areEqual(Object, Object)} depends on the order of the elements.
* <p>
* If the first collection contains the all element of another collection,
* then the two collection are equaled.
* <p>
* If the JSON object of the `theFirst` form would be :
* {
* "formFields":[1, 2, 3]
* }
* <p>
* And, the JSON object of the `theSecond` form would be:
* {
* "formFields":[3, 2, 1]
* }
* <p>
* The two JSON objects should be equal.
*/
@Test
public void testSetsAreEqual() {
JsonTypeDescriptor descriptor = new JsonTypeDescriptor();
Form theFirst = createForm(1, 2, 3);
Form theSecond = createForm(3, 2, 1);
assertTrue(descriptor.areEqual(theFirst, theSecond));
}
private Form createForm(Integer... numbers) {
Form form = new Form();
Set<FormField> formFields = new LinkedHashSet<>();
Arrays.asList(numbers).forEach(o -> {
FormField formField = new FormField();
formField.setNumber(o);
formFields.add(formField);
});
form.setFormFields(formFields);
return form;
}
public static class FormField {
private Integer number;
public Integer getNumber() {
return number;
}
public void setNumber(Integer number) {
this.number = number;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FormField formField = (FormField) o;
return Objects.equals(number, formField.number);
}
@Override
public int hashCode() {
return Objects.hash(number);
}
}
public static class Form extends BaseEntity {
private Set<FormField> formFields;
public Set<FormField> getFormFields() {
return formFields;
}
public void setFormFields(Set<FormField> formFields) {
this.formFields = formFields;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Form)) return false;
Form form = (Form) o;
return Objects.equals(formFields, form.formFields);
}
@Override
public int hashCode() {
return Objects.hash(formFields);
}
}
}
| 1,293 |
763 | package org.batfish.common.bdd;
import static com.google.common.base.Preconditions.checkArgument;
import java.util.Arrays;
import java.util.stream.Stream;
import net.sf.javabdd.BDD;
import net.sf.javabdd.BDDFactory;
import net.sf.javabdd.BDDPairing;
import net.sf.javabdd.JFactory;
/** Various utility methods for working with {@link BDD}s. */
public class BDDUtils {
/** Create a new {@link BDDFactory} object with {@code numVariables} boolean variables. */
public static BDDFactory bddFactory(int numVariables) {
BDDFactory factory = JFactory.init(10000, 1000);
factory.setCacheRatio(64);
factory.setVarNum(numVariables); // reserve 32 1-bit variables
return factory;
}
/**
* Swap the constraints on multiple {@link BDDInteger BDDIntegers} in a {@link BDD}. Usage:
* swap(bdd, a1, a2, b1, b2, ...). Swaps a1 and a2, b1 and b2, etc.
*/
public static BDD swap(BDD bdd, BDDInteger... vars) {
return bdd.replace(swapPairing(vars));
}
/** Create a {@link BDDPairing} for swapping variables. */
public static BDDPairing swapPairing(BDDInteger... vars) {
checkArgument(vars.length > 0, "Requires at least 2 variables");
checkArgument(vars.length % 2 == 0, "Requires an even number of variables");
BDDFactory factory = vars[0].getFactory();
Stream.Builder<BDDInteger> left = Stream.builder();
Stream.Builder<BDDInteger> right = Stream.builder();
for (int i = 0; i < vars.length; i += 2) {
checkArgument(
vars[i].size() == vars[i + 1].size(),
"Cannot swap variables with unequal number of bits");
left.add(vars[i]);
right.add(vars[i + 1]);
}
BDD[] bv1 = left.build().flatMap(var -> Arrays.stream(var.getBitvec())).toArray(BDD[]::new);
BDD[] bv2 = right.build().flatMap(var -> Arrays.stream(var.getBitvec())).toArray(BDD[]::new);
BDDPairing pairing = factory.makePair();
for (int i = 0; i < bv1.length; i++) {
pairing.set(bv1[i].var(), bv2[i].var());
pairing.set(bv2[i].var(), bv1[i].var());
}
return pairing;
}
}
| 807 |
1,043 | <filename>micro-core/src/main/java/com/oath/micro/server/spring/SpringContextFactory.java
package com.oath.micro.server.spring;
import java.util.Arrays;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import com.oath.cyclops.types.persistent.PersistentSet;
import com.oath.cyclops.util.ExceptionSoftener;
import cyclops.reactive.ReactiveSeq;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
import com.oath.micro.server.InternalErrorCode;
import com.oath.micro.server.Plugin;
import com.oath.micro.server.PluginLoader;
import com.oath.micro.server.config.Config;
import com.oath.micro.server.config.Microserver;
import lombok.AllArgsConstructor;
import lombok.experimental.Wither;
@AllArgsConstructor
public class SpringContextFactory {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
private final PersistentSet<Class> classes;
private final Config config;
@Wither
private final SpringBuilder springBuilder;
public SpringContextFactory(Config config, Class<?> c, Set<Class<?>> classes) {
PersistentSet<Class> s = config.getClasses()
.plusAll(classes);
s= s.plus(c);
Microserver microserver = c.getAnnotation(Microserver.class);
final PersistentSet<Class> immutableS = s;
s = Optional.ofNullable(microserver)
.flatMap(ms -> Optional.ofNullable(ms.blacklistedClasses()))
.map(bl -> {
Set<Class> blacklistedClasses = Arrays.stream(bl)
.collect(Collectors.toSet());
return (PersistentSet<Class>)immutableS.stream()
.filter(clazz -> !blacklistedClasses.contains(clazz)).hashSet();
})
.orElse(immutableS);
this.classes = s;
this.config = config;
springBuilder = ReactiveSeq.fromStream(PluginLoader.INSTANCE.plugins.get()
.stream())
.filter(m -> m.springBuilder() != null)
.map(Plugin::springBuilder)
.findFirst()
.orElse(new SpringApplicationConfigurator());
}
public SpringContextFactory(SpringBuilder builder, Config config, Class<?> c, Set<Class<?>> classes) {
PersistentSet<Class> s = config.getClasses();
for(Class next : classes){
s = s.plus(next);
}
s = s.plus(c);
Microserver microserver = c.getAnnotation(Microserver.class);
final PersistentSet<Class> immutableS = s;
s = Optional.ofNullable(microserver)
.flatMap(ms -> Optional.ofNullable(ms.blacklistedClasses()))
.map(bl -> {
Set<Class> blacklistedClasses = Arrays.stream(bl)
.collect(Collectors.toSet());
PersistentSet<Class> rs = immutableS.stream()
.filter(clazz -> !blacklistedClasses.contains(clazz))
.hashSet();
return rs;
})
.orElse(immutableS);
this.classes = s;
this.config = config;
springBuilder = builder;
}
public ApplicationContext createSpringContext() {
try {
ApplicationContext springContext = springBuilder.createSpringApp(config, classes.stream().toArray(i->new Class[classes.size()]));
return springContext;
} catch (Exception e) {
logger.error(InternalErrorCode.STARTUP_FAILED_SPRING_INITIALISATION.toString(), e.getMessage());
ExceptionSoftener.throwSoftenedException(e);
}
return null;
}
public Class[] classes() {
return springBuilder.classes(config, classes.stream().toArray(i->new Class[classes.size()]));
}
}
| 2,008 |
312 | //-----------------------------------------------------------------------------
// Copyright (c) 2015 <NAME>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
#ifndef _PLUGINS_SHARED_H
#include <plugins/plugins_shared.h>
#endif
#ifndef _SIM_OBJECT_H_
#include <sim/simObject.h>
#endif
#ifndef _BASE_COMPONENT_H_
#include <scene/components/baseComponent.h>
#endif
#ifndef _RENDER_CAMERA_H_
#include "rendering/renderCamera.h"
#endif
class ScatterSkyComponent : public Scene::BaseComponent, public Rendering::RenderHook
{
private:
typedef Scene::BaseComponent Parent;
protected:
bool mEnabled;
bgfx::TextureHandle mTexture;
bgfx::ProgramHandle mShader;
bgfx::UniformHandle mMatrixUniform;
Graphics::ViewTableEntry* mView;
bool mGenerateSkyCube;
bool mSkyCubeReady;
bgfx::ProgramHandle mGenerateSkyCubeShader;
bgfx::UniformHandle mCubeParamsUniform;
bgfx::UniformHandle mSkyParams1Uniform;
bgfx::UniformHandle mSkyParams2Uniform;
bgfx::UniformHandle mSkyParams3Uniform;
bgfx::UniformHandle mSkyParams4Uniform;
bgfx::UniformHandle mSkyParams5Uniform;
F32 mIntensity;
F32 mSunBrightness;
F32 mSurfaceHeight;
F32 mScatterStrength;
F32 mMieBrightness;
F32 mMieDistribution;
F32 mMieCollectionPower;
F32 mMieStrength;
F32 mRayleighBrightness;
F32 mRayleighCollectionPower;
F32 mRayleighStrength;
F32 mStepCount;
ColorF mAirColor;
Graphics::ViewTableEntry* mTempSkyCubeView[6];
bgfx::FrameBufferHandle mTempSkyCubeBuffers[6];
public:
ScatterSkyComponent();
~ScatterSkyComponent();
virtual void onAddToScene();
virtual void onRemoveFromScene();
virtual void refresh();
virtual void preRender(Rendering::RenderCamera* camera);
virtual void render(Rendering::RenderCamera* camera);
virtual void postRender(Rendering::RenderCamera* camera);
void generateSkyCubeBegin();
void generateSkyCube();
void generateSkyCubeEnd();
static void initPersistFields();
DECLARE_PLUGIN_CONOBJECT(ScatterSkyComponent);
}; | 1,507 |
1,590 | <reponame>DaManDOH/Simd<filename>src/Simd/SimdAvx1Float32.cpp
/*
* Simd Library (http://ermig1979.github.io/Simd).
*
* Copyright (c) 2011-2018 <NAME>.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#include "Simd/SimdMemory.h"
#include "Simd/SimdStore.h"
#include "Simd/SimdExtract.h"
namespace Simd
{
#ifdef SIMD_AVX_ENABLE
namespace Avx
{
template<bool align> void CosineDistance32f(const float * a, const float * b, size_t size, float * distance)
{
if (align)
assert(Aligned(a) && Aligned(b));
size_t partialAlignedSize = AlignLo(size, F);
size_t fullAlignedSize = AlignLo(size, DF);
size_t i = 0;
__m256 _aa[2] = { _mm256_setzero_ps(), _mm256_setzero_ps() };
__m256 _ab[2] = { _mm256_setzero_ps(), _mm256_setzero_ps() };
__m256 _bb[2] = { _mm256_setzero_ps(), _mm256_setzero_ps() };
if (fullAlignedSize)
{
for (; i < fullAlignedSize; i += DF)
{
__m256 a0 = Load<align>(a + i + 0 * F);
__m256 b0 = Load<align>(b + i + 0 * F);
_aa[0] = _mm256_add_ps(_aa[0], _mm256_mul_ps(a0, a0));
_ab[0] = _mm256_add_ps(_ab[0], _mm256_mul_ps(a0, b0));
_bb[0] = _mm256_add_ps(_bb[0], _mm256_mul_ps(b0, b0));
__m256 a1 = Load<align>(a + i + 1 * F);
__m256 b1 = Load<align>(b + i + 1 * F);
_aa[1] = _mm256_add_ps(_aa[1], _mm256_mul_ps(a1, a1));
_ab[1] = _mm256_add_ps(_ab[1], _mm256_mul_ps(a1, b1));
_bb[1] = _mm256_add_ps(_bb[1], _mm256_mul_ps(b1, b1));
}
_aa[0] = _mm256_add_ps(_aa[0], _aa[1]);
_ab[0] = _mm256_add_ps(_ab[0], _ab[1]);
_bb[0] = _mm256_add_ps(_bb[0], _bb[1]);
}
for (; i < partialAlignedSize; i += F)
{
__m256 a0 = Load<align>(a + i);
__m256 b0 = Load<align>(b + i);
_aa[0] = _mm256_add_ps(_aa[0], _mm256_mul_ps(a0, a0));
_ab[0] = _mm256_add_ps(_ab[0], _mm256_mul_ps(a0, b0));
_bb[0] = _mm256_add_ps(_bb[0], _mm256_mul_ps(b0, b0));
}
float aa = ExtractSum(_aa[0]), ab = ExtractSum(_ab[0]), bb = ExtractSum(_bb[0]);
for (; i < size; ++i)
{
float _a = a[i];
float _b = b[i];
aa += _a * _a;
ab += _a * _b;
bb += _b * _b;
}
*distance = 1.0f - ab / ::sqrt(aa*bb);
}
void CosineDistance32f(const float * a, const float * b, size_t size, float * distance)
{
if (Aligned(a) && Aligned(b))
CosineDistance32f<true>(a, b, size, distance);
else
CosineDistance32f<false>(a, b, size, distance);
}
}
#endif// SIMD_AVX_ENABLE
}
| 2,085 |
636 | #!/usr/bin/env python3
# Provided by The Python Standard Library
import json
import argparse
import asyncio
import time
import urllib.request
import sys
from ctypes import *
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("AGENCY_URL")
parser.add_argument("WALLET_KEY")
parser.add_argument("--wallet-name", help="optional name for libindy wallet")
parser.add_argument("--wallet-type", help="optional type of libindy wallet")
parser.add_argument("--agent-seed", help="optional seed used to create enterprise->agent DID/VK")
parser.add_argument("--enterprise-seed", help="optional seed used to create enterprise DID/VK")
parser.add_argument("--pool-config", help="optional additional config for connection to pool nodes ({timeout: Opt<int>, extended_timeout: Opt<int>, preordered_nodes: Opt<array<string>>})")
parser.add_argument("-v", "--verbose", action="store_true")
return parser.parse_args()
def get_agency_info(agency_url):
agency_info = {}
agency_resp = ''
#Get agency's did and verkey:
try:
agency_req=urllib.request.urlopen('{}/agency'.format(agency_url))
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
sys.stderr.write("Failed looking up agency did/verkey: '{}': {}\n".format(exc_type.__name__,exc_value))
print(json.dumps({
'provisioned': False,
'provisioned_status': "Failed: Could not retrieve agency info from: {}/agency: '{}': {}".format(agency_url,exc_type.__name__,exc_value)
},indent=2))
sys.exit(1)
agency_resp = agency_req.read()
try:
agency_info = json.loads(agency_resp.decode())
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
sys.stderr.write("Failed parsing response from agency endpoint: {}/agency: '{}': {}\n".format(agency_url,exc_type.__name__,exc_value))
sys.stderr.write("RESPONSE: {}".format(agency_resp))
print(json.dumps({
'provisioned': False,
'provisioned_status': "Failed: Could not parse response from agency endpoint from: {}/agency: '{}': {}".format(agency_url,exc_type.__name__,exc_value)
},indent=2))
sys.exit(1)
return agency_info
def register_agent(args):
vcx = CDLL("/usr/lib/libvcx.so")
if args.verbose:
c_debug = c_char_p('debug'.encode('utf-8'))
vcx.vcx_set_default_logger(c_debug)
agency_info = get_agency_info(args.AGENCY_URL)
json_str = json.dumps({'agency_url':args.AGENCY_URL,
'agency_did':agency_info['DID'],
'agency_verkey':agency_info['verKey'],
'wallet_key':args.WALLET_KEY,
'wallet_name':args.wallet_name,
'wallet_type':args.wallet_type,
'pool_config':args.pool_config,
'agent_seed':args.agent_seed,
'enterprise_seed':args.enterprise_seed})
c_json = c_char_p(json_str.encode('utf-8'))
rc = vcx.vcx_provision_agent(c_json)
if rc == 0:
sys.stderr.write("could not register agent. Try again with '-v' for more details\n")
print(json.dumps({
'provisioned': False,
'provisioned_status': "Failed: Could not register agent. Try again with '-v' for more details"
},indent=2))
else:
pointer = c_int(rc)
string = cast(pointer.value, c_char_p)
new_config = json.loads(string.value.decode('utf-8'))
if 'payment_method' not in new_config:
new_config['payment_method'] = 'null'
print(json.dumps(new_config, indent=2, sort_keys=True))
async def main():
args = parse_args()
register_agent(args)
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
time.sleep(.1)
| 1,581 |
414 | <filename>ios-sealtalk/融云 Demo WatchKit Extension/RCWKExtension/Rows/SentLocationMessageRow.h
//
// SentLocationMessageRow.h
// RongIMWatchKit
//
// Created by litao on 15/4/29.
// Copyright (c) 2015年 RongCloud. All rights reserved.
//
#import "LocationMessageRow.h"
@interface SentLocationMessageRow : LocationMessageRow
@end
| 117 |
3,212 | <gh_stars>1000+
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.standard.relp.frame;
import org.apache.nifi.processors.standard.relp.response.RELPResponse;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Map;
public class TestRELPEncoder {
@Test
public void testEncodingWithData() throws IOException {
final RELPFrame frame = new RELPFrame.Builder()
.txnr(1)
.command("rsp")
.dataLength(5)
.data("12345".getBytes(StandardCharsets.UTF_8))
.build();
final RELPEncoder encoder = new RELPEncoder(StandardCharsets.UTF_8);
final byte[] result = encoder.encode(frame);
final String expected = "1 rsp 5 12345\n";
Assert.assertEquals(expected, new String(result, StandardCharsets.UTF_8));
}
@Test
public void testEncodingNoData() throws IOException {
final RELPFrame frame = new RELPFrame.Builder()
.txnr(1)
.command("rsp")
.dataLength(0)
.data(new byte[0])
.build();
final RELPEncoder encoder = new RELPEncoder(StandardCharsets.UTF_8);
final byte[] result = encoder.encode(frame);
final String expected = "1 rsp 0\n";
Assert.assertEquals(expected, new String(result, StandardCharsets.UTF_8));
}
@Test
public void testEncodingOpenResponse() {
final String openFrameData = "relp_version=0\nrelp_software=librelp,1.2.7,http://librelp.adiscon.com\ncommands=syslog";
final String openFrame = "1 open 85 " + openFrameData + "\n";
System.out.println(openFrame);
final RELPDecoder decoder = new RELPDecoder(StandardCharsets.UTF_8);
final RELPEncoder encoder = new RELPEncoder(StandardCharsets.UTF_8);
RELPFrame frame = null;
for (byte b : openFrame.getBytes(StandardCharsets.UTF_8)) {
if (decoder.process(b)) {
frame = decoder.getFrame();
break;
}
}
Assert.assertNotNull(frame);
final Map<String,String> offers = RELPResponse.parseOffers(frame.getData(), StandardCharsets.UTF_8);
final RELPFrame responseFrame = RELPResponse.open(frame.getTxnr(), offers).toFrame(StandardCharsets.UTF_8);
final byte[] response = encoder.encode(responseFrame);
System.out.println(new String(response, StandardCharsets.UTF_8));
}
}
| 1,307 |
325 | <reponame>am11/IL2C<filename>samples/Calculator/Calculator.UEFI/efi_main.c
#include <Calculator.Core.h>
#include <stdint.h>
//////////////////////////////////////////////////////////////////////////
// UEFI interface: code refer from
uintptr_t efi_main(
void* imageHandle,
void* pSystemTable)
{
il2c_initialize(imageHandle, pSystemTable);
Calculator_PolishNotation_Main();
il2c_shutdown();
return 0;
}
| 156 |
852 | #########################
#Author: <NAME>
#Purpose: To investigate the AlCaPCCProducer input and output.
#########################
import FWCore.ParameterSet.Config as cms
process = cms.Process("ALCARECO")
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring('file:/eos/cms/store/data/Run2015D/AlCaLumiPixels/ALCARECO/LumiPixels-PromptReco-v4/000/260/039/00000/1CF2A210-5B7E-E511-8F4F-02163E014145.root','file:/eos/cms/store/data/Run2015D/AlCaLumiPixels/ALCARECO/LumiPixels-PromptReco-v4/000/260/039/00000/1E2B0829-707E-E511-B51B-02163E0145FE.root','file:/eos/cms/store/data/Run2015D/AlCaLumiPixels/ALCARECO/LumiPixels-PromptReco-v4/000/260/039/00000/2666E76A-707E-E511-92E4-02163E014689.root','file:/eos/cms/store/data/Run2015D/AlCaLumiPixels/ALCARECO/LumiPixels-PromptReco-v4/000/260/039/00000/2A1E3304-707E-E511-946C-02163E014241.root')
)
#Added process to select the appropriate events
process.OutALCARECOPromptCalibProdPCC = cms.PSet(
SelectEvents = cms.untracked.PSet(
SelectEvents = cms.vstring('pathALCARECOPromptCalibProdPCC')
),
outputCommands = cms.untracked.vstring('drop *',
'keep *_alcaPCCProducer_*_*',
'keep *_MEtoEDMConvertSiStrip_*_*')
)
#Make sure that variables match in producer.cc and .h
process.alcaPCCProducer = cms.EDProducer("AlcaPCCProducer",
pixelClusterLabel = cms.InputTag("siPixelClustersForLumi"),
#Mod factor to count lumi and the string to specify output
trigstring = cms.untracked.string("alcaPCCRand")
)
process.OutALCARECOLumiPixels = cms.PSet(
SelectEvents = cms.untracked.PSet(
SelectEvents = cms.vstring('pathALCARECOLumiPixels')
),
outputCommands = cms.untracked.vstring('drop *',
'keep *_siPixelClustersForLumi_*_*',
'keep *_TriggerResults_*_HLT')
)
process.OutALCARECOLumiPixels_noDrop = cms.PSet(
SelectEvents = cms.untracked.PSet(
SelectEvents = cms.vstring('pathALCARECOLumiPixels')
),
outputCommands = cms.untracked.vstring('keep *_siPixelClustersForLumi_*_*',
'keep *_TriggerResults_*_HLT')
)
process.siPixelClustersForLumi = cms.EDProducer("SiPixelClusterProducer",
ChannelThreshold = cms.int32(1000),
ClusterThreshold = cms.double(4000.0),
MissCalibrate = cms.untracked.bool(True),
SeedThreshold = cms.int32(1000),
SplitClusters = cms.bool(False),
VCaltoElectronGain = cms.int32(65),
VCaltoElectronOffset = cms.int32(-414),
maxNumberOfClusters = cms.int32(-1),
payloadType = cms.string('Offline'),
src = cms.InputTag("siPixelDigisForLumi")
)
process.siPixelDigisForLumi = cms.EDProducer("SiPixelRawToDigi",
CablingMapLabel = cms.string(''),
ErrorList = cms.vint32(29),
IncludeErrors = cms.bool(True),
InputLabel = cms.InputTag("hltFEDSelectorLumiPixels"),
Regions = cms.PSet(
),
UsePhase1 = cms.bool(False),
UsePilotBlade = cms.bool(False),
UseQualityInfo = cms.bool(False),
UserErrorList = cms.vint32(40)
)
#HLT filter for PCC
process.ALCARECOHltFilterForPCC = cms.EDFilter("HLTHighLevel",
HLTPaths = cms.vstring("*Random*"),
eventSetupPathsKey = cms.string(""),
TriggerResultsTag = cms.InputTag("TriggerResults","","HLT"),
andOr = cms.bool(True),
throw = cms.bool(False)
)
#From the end path, this is where we specify format for our output.
process.ALCARECOStreamPromptCalibProdPCC = cms.OutputModule("PoolOutputModule",
SelectEvents = cms.untracked.PSet(
SelectEvents = cms.vstring('pathALCARECOPromptCalibProdPCC')
),
dataset = cms.untracked.PSet(
dataTier = cms.untracked.string('ALCAPROMPT'),
filterName = cms.untracked.string('PromptCalibProdPCC')
),
eventAutoFlushCompressedSize = cms.untracked.int32(5242880),
fileName = cms.untracked.string('ProdPCC_Random_100.root'),
outputCommands = cms.untracked.vstring('drop *',
'keep *_alcaPCCProducer_*_*',
'keep *_MEtoEDMConvertSiStrip_*_*')
)
#
process.alcaPCC = cms.Sequence(process.alcaPCCProducer)
#This is the key sequence that we are adding first...
process.seqALCARECOPromptCalibProdPCC = cms.Sequence(process.ALCARECOHltFilterForPCC+process.alcaPCCProducer)
process.pathALCARECOPromptCalibProdPCC = cms.Path(process.seqALCARECOPromptCalibProdPCC)
process.seqALCARECOLumiPixels = cms.Sequence(process.siPixelDigisForLumi+process.siPixelClustersForLumi)
process.pathALCARECOLumiPixels = cms.Path(process.seqALCARECOLumiPixels)
process.ALCARECOStreamPromptCalibProdOutPath = cms.EndPath(process.ALCARECOStreamPromptCalibProdPCC)
process.MessageLogger = cms.Service("MessageLogger",
cerr = cms.untracked.PSet(
FwkJob = cms.untracked.PSet(
limit = cms.untracked.int32(0)
),
FwkReport = cms.untracked.PSet(
limit = cms.untracked.int32(10000000),
reportEvery = cms.untracked.int32(100000)
),
FwkSummary = cms.untracked.PSet(
limit = cms.untracked.int32(10000000),
reportEvery = cms.untracked.int32(1)
),
INFO = cms.untracked.PSet(
limit = cms.untracked.int32(0)
),
Root_NoDictionary = cms.untracked.PSet(
limit = cms.untracked.int32(0)
),
default = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
noTimeStamps = cms.untracked.bool(False),
threshold = cms.untracked.string('INFO'),
enableStatistics = cms.untracked.bool(True),
statisticsThreshold = cms.untracked.string('WARNING')
),
debugModules = cms.untracked.vstring(),
default = cms.untracked.PSet(
),
suppressDebug = cms.untracked.vstring(),
suppressInfo = cms.untracked.vstring(),
suppressWarning = cms.untracked.vstring()
)
#added line for additional output summary `
process.options = cms.untracked.PSet( wantSummary = cms.untracked.bool(True) )
process.schedule = cms.Schedule(*[ process.pathALCARECOPromptCalibProdPCC, process.ALCARECOStreamPromptCalibProdOutPath ])
| 2,645 |
675 | <reponame>blico/intellij
package com.google.idea.sdkcompat.vcs;
import com.intellij.openapi.vcs.changes.VcsIgnoredFilesHolder;
/**
* Compat for {@link VcsManagedFilesHolder}. VcsIgnoredFilesHolder was renamed to
* VcsManagedFilesHolder starting with 2021.2.
*
* <p>#api211
*/
public interface VcsManagedFilesHolderCompat extends VcsIgnoredFilesHolder {}
| 131 |
14,668 | // Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "extensions/common/features/feature_flags.h"
#include <algorithm>
#include <array>
#include "base/check.h"
#include "base/feature_list.h"
namespace extensions {
namespace {
// Feature flags for extension features. These can be used to implement remote
// kill switches for extension features. Note any such feature flags must
// generally be removed once the API has been stable for a few releases.
constexpr std::array<base::Feature, 0> kFeatureFlags{};
const std::vector<base::Feature>* g_feature_flags_test_override = nullptr;
template <typename T>
const base::Feature* GetFeature(T begin,
T end,
const std::string& feature_flag) {
T it =
std::find_if(begin, end, [&feature_flag](const base::Feature& feature) {
return feature.name == feature_flag;
});
return it == end ? nullptr : &(*it);
}
const base::Feature* GetFeature(const std::string& feature_flag) {
if (g_feature_flags_test_override) {
return GetFeature(g_feature_flags_test_override->begin(),
g_feature_flags_test_override->end(), feature_flag);
}
return GetFeature(std::begin(kFeatureFlags), std::end(kFeatureFlags),
feature_flag);
}
} // namespace
bool IsFeatureFlagEnabled(const std::string& feature_flag) {
const base::Feature* feature = GetFeature(feature_flag);
CHECK(feature) << feature_flag;
return base::FeatureList::IsEnabled(*feature);
}
ScopedFeatureFlagsOverride CreateScopedFeatureFlagsOverrideForTesting(
const std::vector<base::Feature>* features) {
return base::AutoReset<const std::vector<base::Feature>*>(
&g_feature_flags_test_override, features);
}
} // namespace extensions
| 665 |
462 | <gh_stars>100-1000
{
"appDesc": {
"description": "App description.",
"message": "Rýchly e-mail s vyhľadávaním a bez spamu."
},
"appName": {
"description": "App name.",
"message": "Gmail"
}
}
| 111 |
1,144 | /*
* #%L
* de.metas.adempiere.adempiere.base
* %%
* Copyright (C) 2021 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
package de.metas.impexp.spreadsheet.process;
import de.metas.impexp.spreadsheet.csv.JdbcCSVExporter;
import de.metas.impexp.spreadsheet.excel.JdbcExcelExporter;
import de.metas.impexp.spreadsheet.service.SpreadsheetExporterService;
import de.metas.process.JavaProcess;
import de.metas.process.SpreadsheetExportOptions;
import de.metas.process.SpreadsheetFormat;
import org.adempiere.exceptions.AdempiereException;
import org.adempiere.exceptions.FillMandatoryException;
import org.adempiere.util.lang.impl.TableRecordReference;
import org.compiere.SpringContextHolder;
import org.compiere.util.Env;
import org.compiere.util.Evaluatee;
import org.compiere.util.Evaluatees;
import java.io.File;
import java.util.ArrayList;
public class ExportToSpreadsheetProcess extends JavaProcess
{
final SpreadsheetExporterService spreadsheetExporterService = SpringContextHolder.instance.getBean(SpreadsheetExporterService.class);
@Override
protected String doIt()
{
final String sql = getSql();
final Evaluatee evalCtx = getEvalContext();
final File resultFile;
final SpreadsheetExportOptions spreadsheetExportOptions = getProcessInfo().getSpreadsheetExportOptions();
final SpreadsheetFormat spreadsheetFormat = spreadsheetExportOptions.getFormat();
if (spreadsheetFormat == SpreadsheetFormat.Excel)
{
final JdbcExcelExporter jdbcExcelExporter = JdbcExcelExporter.builder()
.ctx(getCtx())
.translateHeaders(spreadsheetExportOptions.isTranslateHeaders())
.applyFormatting(spreadsheetExportOptions.isExcelApplyFormatting())
.build();
spreadsheetExporterService.processDataFromSQL(sql, evalCtx, jdbcExcelExporter);
resultFile = jdbcExcelExporter.getResultFile();
}
else if (spreadsheetFormat == SpreadsheetFormat.CSV)
{
final JdbcCSVExporter jdbcCSVExporter = JdbcCSVExporter.builder()
.adLanguage(Env.getAD_Language(getCtx()))
.translateHeaders(spreadsheetExportOptions.isTranslateHeaders())
.fieldDelimiter(spreadsheetExportOptions.getCsvFieldDelimiter())
.build();
spreadsheetExporterService.processDataFromSQL(sql, evalCtx, jdbcCSVExporter);
resultFile = jdbcCSVExporter.getResultFile();
}
else
{
throw new AdempiereException("Unknown spreadsheet format: " + spreadsheetFormat);
}
getResult().setReportData(resultFile);
return MSG_OK;
}
private String getSql()
{
return getProcessInfo().getSQLStatement().orElseThrow(() -> new FillMandatoryException("SQLStatement"));
}
private Evaluatee getEvalContext()
{
final ArrayList<Evaluatee> contexts = new ArrayList<>();
//
// 1: Add process parameters
contexts.add(Evaluatees.ofRangeAwareParams(getParameterAsIParams()));
//
// 2: underlying record
final String recordTableName = getTableName();
final int recordId = getRecord_ID();
if (recordTableName != null && recordId > 0)
{
final TableRecordReference recordRef = TableRecordReference.of(recordTableName, recordId);
final Evaluatee evalCtx = Evaluatees.ofTableRecordReference(recordRef);
if (evalCtx != null)
{
contexts.add(evalCtx);
}
}
//
// 3: global context
contexts.add(Evaluatees.ofCtx(getCtx()));
return Evaluatees.compose(contexts);
}
}
| 1,324 |
435 | <filename>pybay-2017/videos/al-sweigart-stdio-game-jam-pybay-2017-lightning-talk.json
{
"description": "Oakland's video game museum will host a hackathon to create small, complete game programs that beginners can read. These text-based games only use stdio, so they're easy for beginners to read.",
"language": "eng",
"recorded": "2017-08-11",
"related_urls": [
{
"label": "talk slides",
"url": "https://speakerdeck.com/pybay/2017-al-sweigart-stdio-game-jam"
}
],
"speakers": [
"<NAME>"
],
"tags": [
"lightning talks"
],
"thumbnail_url": "https://i.ytimg.com/vi/bS2yRgN7Epg/hqdefault.jpg",
"title": "STDIO Game Jam",
"videos": [
{
"type": "youtube",
"url": "https://www.youtube.com/watch?v=bS2yRgN7Epg"
}
]
}
| 325 |
1,235 | #include <iostream>
#include <set>
using namespace std;
int main() {
set<int> myset;
for (int i=1; i<5; ++i)
myset.insert(i*10);
for (int i=1; i<5; ++i)
myset.insert(i*10);
cout << "myset size: " << myset.size() << endl;
cout << endl;
for (set<int>::iterator it=myset.begin(); it!=myset.end(); ++it)
cout << *it << ' ';
cout << endl;
cout << endl;
for (int i=1; i<5; ++i)
cout << i << " count: " << myset.count(i) << endl;
cout << endl;
for (int i=1; i<5; ++i)
cout << i*10 << " count: " << myset.count(i*10) << endl;
cout << endl;
set<int>::iterator it = myset.find(30);
if (it != myset.end())
cout << "30 is in the set";
cout << endl;
}
/*
myset size: 4
10 20 30 40
1 count: 0
2 count: 0
3 count: 0
4 count: 0
10 count: 1
20 count: 1
30 count: 1
40 count: 1
30 is in the set
*/
| 393 |
419 | #pragma once
#include "../_Module/API.h"
#include "System/Resource/ResourceLoader.h"
//-------------------------------------------------------------------------
namespace KRG::Render
{
class MaterialLoader : public Resource::ResourceLoader
{
public:
MaterialLoader();
private:
virtual bool LoadInternal( ResourceID const& resID, Resource::ResourceRecord* pResourceRecord, Serialization::BinaryMemoryArchive& archive ) const final;
virtual Resource::InstallResult Install( ResourceID const& resID, Resource::ResourceRecord* pResourceRecord, Resource::InstallDependencyList const& installDependencies ) const final;
};
} | 185 |
1,247 | <gh_stars>1000+
// Copyright 2019 <NAME>. See LICENSE file for terms.
#include "test.hpp"
int main(int argc, char *argv[]) {
path_start();
volatile int ac = argc;
char x = CHAR_RAND;
if (argc == 2 && argv[1][0] == x) {
path_goal();
}
else if (ac == 2 && argv[1][0] == x) {
path_nongoal();
}
}
| 140 |
477 | <gh_stars>100-1000
import copy
import shlex
import toml as hjson
import io
import re
from django import forms
from django.template import Template, Context
from django.db.models import Sum
from django.utils.safestring import mark_safe
from django.utils.timezone import now
from django.contrib import messages
from django.urls import reverse
from django.core.validators import validate_slug
from pagedown.widgets import PagedownWidget
from django.conf import settings
from snowpenguin.django.recaptcha2.fields import ReCaptchaField
from snowpenguin.django.recaptcha2.widgets import ReCaptchaWidget
from biostar.accounts.models import User, Profile, UserImage
from . import models, auth, factory, util
from .const import *
from .models import Project, Data, Analysis, Job, Access
from pprint import pprint
# Share the logger with models.
logger = models.logger
TEXT_UPLOAD_MAX = 10000
# Maximum file size that can be uploaded to recipe in megabytes.
MAX_RECIPE_FILE_MB = 5
def join(*args):
return os.path.abspath(os.path.join(*args))
def ascii_only(text):
try:
text.encode('ascii')
except Exception:
raise forms.ValidationError('Text may only contain plain text (ASCII) characters')
def check_size(fobj, maxsize=0.3, field=None):
# maxsize in megabytes!
error_msg = ''
try:
if fobj and fobj.size > maxsize * 1024 * 1024.0:
curr_size = fobj.size / 1024 / 1024.0
prefix = f'{field} field : '.capitalize() if field else ''
error_msg = prefix + f"file too large, {curr_size:0.1f}MB should be < {maxsize:0.1f}MB"
except Exception as exc:
error_msg = f"File size validation error: {exc}"
if error_msg:
raise forms.ValidationError(error_msg)
return fobj
def check_upload_limit(file, user):
"""
Checks if the file pushes user over their upload limit."
"""
# Existing data.
data = Data.objects.filter(owner=user, method=Data.UPLOAD)
# The current cumulative size of the current data.
current_size = data.aggregate(Sum("size"))["size__sum"] or 0
# The projected size in MB.
projected_size = file.size + current_size
# Maximal cumulative sizes.
max_size = user.profile.upload_size * 1024 * 1024
# Current file size in MB
file_mb = file.size / 1024 / 1024
# Verify projected data sizes.
if projected_size > max_size:
msg = f"You don't have enough storage space for data of size <b>{file_mb:.2f} MB</b>"
raise forms.ValidationError(mark_safe(msg))
return file
def clean_file(fobj, user, project, check_name=True):
if not fobj:
return fobj
check_size(fobj=fobj, maxsize=settings.MAX_FILE_SIZE_MB)
check_upload_limit(file=fobj, user=user)
# Check if this name already exists.
if check_name and Data.objects.filter(name=fobj.name, project=project).exists():
msg = "Name already exists. Upload another file or rename existing data."
raise forms.ValidationError(msg)
return fobj
def add_captcha_field(request, fields):
"""Used to dynamically load captcha field into forms"""
# Trusted users do not need a captcha check
if request.user.is_authenticated and request.user.profile.trusted:
return
# Mutates the fields dict to add captcha field.
if settings.RECAPTCHA_PRIVATE_KEY:
fields["captcha"] = ReCaptchaField(widget=ReCaptchaWidget())
return
class ProjectForm(forms.ModelForm):
image = forms.ImageField(required=False)
text = forms.CharField(widget=PagedownWidget())
# Should not edit uid because data directories get recreated
def __init__(self, request, create=False, *args, **kwargs):
super().__init__(*args, **kwargs)
self.request = request
self.create = create
# choices = filter(lambda pri: pri[0] != Project.SHAREABLE, Project.PRIVACY_CHOICES)
self.fields["privacy"] = forms.CharField(widget=forms.Select(choices=Project.PRIVACY_CHOICES),
initial=self.instance.privacy,
required=False)
class Meta:
model = Project
fields = ['name', 'text', 'privacy', 'rank', 'image', 'uid']
def clean_image(self):
cleaned_data = super(ProjectForm, self).clean()
image = cleaned_data.get('image')
check_size(fobj=image)
return image
def clean_uid(self):
cleaned_data = super(ProjectForm, self).clean()
uid = cleaned_data['uid']
project = Project.objects.filter(uid=uid).exclude(id=self.instance.id).first()
# Validate uid only has ascii characters.
ascii_only(uid)
if project:
raise forms.ValidationError("Project with this uid already exists.")
return uid
def clean(self):
cleaned_data = super(ProjectForm, self).clean()
user = self.request.user
projects = Project.objects.filter(owner=user)
privacy = cleaned_data.get("privacy") or 0
# Trusted users are allowed everything.
if user.is_authenticated and (user.is_staff or user.profile.trusted):
return cleaned_data
# Check project limit.
if self.create and projects.count() > settings.MAX_PROJECTS:
raise forms.ValidationError(
f"You have exceeded the maximum number of projects allowed:{settings.MAX_PROJECTS}.")
# Check privacy.
if int(privacy) == Project.PUBLIC:
raise forms.ValidationError(f"Only staff members can make public projects for now.")
return cleaned_data
def custom_save(self, owner):
"""Used to save on creation using custom function."""
name = self.cleaned_data["name"]
text = self.cleaned_data["text"]
stream = self.cleaned_data["image"]
project = auth.create_project(user=owner, name=name, text=text, stream=stream)
project.save()
return project
class DataUploadForm(forms.ModelForm):
file = forms.FileField(required=False)
input_text = forms.CharField(max_length=TEXT_UPLOAD_MAX, required=False)
data_name = forms.CharField(required=False)
type = forms.CharField(max_length=32, required=False)
def __init__(self, user, project, *args, **kwargs):
self.user = user
self.project = project
super().__init__(*args, **kwargs)
def save(self, **kwargs):
text = self.cleaned_data["text"]
stream = self.cleaned_data["file"]
input_text = self.cleaned_data['input_text']
type = self.cleaned_data["type"]
name = self.cleaned_data['data_name']
if stream:
name = name or stream.name
else:
stream = io.StringIO(initial_value=input_text)
data = auth.create_data(stream=stream, name=name, text=text, user=self.user,
project=self.project, type=type)
if input_text and not self.cleaned_data["file"]:
Data.objects.filter(pk=data.pk).update(method=Data.TEXTAREA)
stream.close()
return data
class Meta:
model = Data
fields = ['data_name', 'input_text', 'text', "type"]
def clean(self):
cleaned_data = super(DataUploadForm, self).clean()
upload = cleaned_data.get("file")
text = cleaned_data.get("input_text")
if not (upload or text):
raise forms.ValidationError("Upload a file or write into the text field to create some data.")
if upload:
clean_file(fobj=upload, user=self.user,
project=self.project, check_name=False)
else:
if not cleaned_data.get("data_name"):
raise forms.ValidationError("Name is required with text inputs.")
total_count = Data.objects.filter(owner=self.user, deleted=False).count()
# Current data count is less than threshold
valid = total_count < self.user.profile.data_threshold()
if valid:
return cleaned_data
raise forms.ValidationError(f"Exceeded maximum amount of data.")
def clean_type(self):
cleaned_data = super(DataUploadForm, self).clean()
fobj = cleaned_data.get('file')
if fobj:
name = fobj.name
else:
name = cleaned_data.get('data_name')
root, ext = os.path.splitext(name)
ext = ext[1:]
datatype = EXT_TO_TYPE.get(ext, cleaned_data.get('type'))
datatype = datatype.upper() or ext.upper()
return datatype
class DataEditForm(forms.ModelForm):
type = forms.CharField(max_length=32, required=False)
def __init__(self, user, *args, **kwargs):
self.user = user
super().__init__(*args, **kwargs)
if self.instance.method == Data.UPLOAD:
self.fields["file"] = forms.FileField(required=False)
elif self.instance.method == Data.TEXTAREA:
initial = ''.join(open(self.instance.get_files()[0], 'r').readlines())
self.fields["input_text"] = forms.CharField(max_length=TEXT_UPLOAD_MAX,
required=True,
initial=initial)
def save(self, commit=True):
cleaned_data = super(DataEditForm, self).clean()
fobj = cleaned_data.get('file')
input_text = cleaned_data.get("input_text")
current_file = self.instance.get_files()[0]
if input_text:
fobj = io.StringIO(initial_value=input_text)
if fobj:
util.write_stream(stream=fobj, dest=current_file)
self.instance.lastedit_user = self.user
self.instance.lasedit_date = now()
Project.objects.filter(uid=self.instance.project.uid).update(lastedit_user=self.user,
lastedit_date=now()
)
return super(DataEditForm, self).save(commit)
class Meta:
model = Data
fields = ['name', 'text', "type"]
def clean_file(self):
cleaned_data = super(DataEditForm, self).clean()
return clean_file(fobj=cleaned_data.get('file'),
user=self.user,
project=self.instance.project,
check_name=False)
def clean_type(self):
cleaned_data = super(DataEditForm, self).clean()
datatype = cleaned_data.get('type')
datatype = datatype.upper()
return datatype
class RecipeForm(forms.ModelForm):
"""
Fields that are not submitted are set to existing values.
"""
image = forms.ImageField(required=False)
uid = forms.CharField(max_length=32, validators=[validate_slug], required=False)
json_text = forms.CharField(max_length=MAX_TEXT_LEN, initial="", required=False)
template = forms.CharField(max_length=MAX_TEXT_LEN, initial="# code", required=False)
name = forms.CharField(max_length=MAX_NAME_LEN, required=False)
rank = forms.FloatField(required=False, initial=100)
text = forms.CharField(initial="Recipe description", widget=PagedownWidget(), required=False)
def __init__(self, user, project=None, *args, **kwargs):
self.user = user
self.project = project
super().__init__(*args, **kwargs)
authorized = self.instance.security
choices = Analysis.SECURITY_STATES
self.fields['security'] = forms.IntegerField(
widget=forms.Select(attrs={'class': 'ui dropdown'}, choices=choices),
initial=authorized, required=False)
class Meta:
model = Analysis
fields = ["name", "rank", "text", "uid", "json_text", "template", "security"]
def get_initial(self):
"""
Returns the initial data to use for forms on this view.
"""
initial = super(RecipeForm, self).get_initial()
for field in self.Meta.fields:
initial[field] = getattr(self.instance, field)
return initial
def validate_writable(self):
# Check write access when editing
is_writable = auth.writeable_recipe(user=self.user, source=self.instance, project=self.project)
if not is_writable:
raise forms.ValidationError('You need write access to the original recipe to edit.')
def clean(self):
"""
Applies security measures to recipe editing.
"""
cleaned_data = super(RecipeForm, self).clean()
# Anonymous users cannot edit.
if self.user.is_anonymous:
raise forms.ValidationError('You need to be logged in.')
# Check to see if the recipe is writable.
self.validate_writable()
# Fill with default values.
for field in self.Meta.fields:
if cleaned_data.get(field) is None:
cleaned_data[field] = getattr(self.instance, field)
return cleaned_data
def clean_image(self):
cleaned_data = super(RecipeForm, self).clean()
image = cleaned_data.get('image')
check_size(fobj=image)
return image
def clean_uid(self):
uid = self.cleaned_data['uid']
# Ensure the correct uid gets set when given empty string.
if not uid:
uid = getattr(self.instance, 'uid')
return uid
def clean_json_text(self):
cleaned_data = super(RecipeForm, self).clean()
json_text = cleaned_data.get('json_text')
# Ensure correct JSON syntax.
try:
hjson.loads(json_text)
except Exception as exc:
msg = util.toml_error(exp_msg=exc, text=json_text)
raise forms.ValidationError(msg)
return json_text
def clean_security(self):
cleaned_data = super(RecipeForm, self).clean()
# User is not superuser.
template = cleaned_data.get('template') or self.instance.template
json_text = cleaned_data.get('json_text') or self.instance.json_text
# Shortcuts to security conditions.
template_changed = (template != self.instance.template)
json_changed = (json_text != self.instance.json_text)
# User is not superuser.
superuser = self.user.is_superuser
# The current state of authorization
security = cleaned_data['security']
if security != self.instance.security and not superuser:
raise forms.ValidationError("Only super users can change recipe security")
# Recipe becomes un-authorized when the template or JSON are changed.
if superuser:
security = security
elif (json_changed or template_changed):
security = Analysis.NOT_AUTHORIZED
else:
security = self.instance.security
return security
def save(self, commit=True):
self.instance.lastedit_date = now()
self.instance.lastedit_user = self.user
image = self.cleaned_data['image']
self.instance.image = image or self.instance.image
return super().save(commit)
class JobEditForm(forms.ModelForm):
text = forms.CharField(initial='Results generated by running the recipe.', widget=PagedownWidget(), required=False)
def __init__(self, user, *args, **kwargs):
self.user = user
super().__init__(*args, **kwargs)
class Meta:
model = Job
fields = ['name', "image", 'text']
def save(self, commit=True):
self.instance.lastedit_date = now()
self.instance.lastedit_user = self.user
Project.objects.filter(uid=self.instance.project.uid).update(lastedit_date=now(),
lastedit_user=self.user)
return super().save(commit)
def clean_text(textbox):
return shlex.quote(textbox)
class RecipeInterface(forms.Form):
# The name of results when running the recipe.
# name = forms.CharField(max_length=256, label="Name", help_text="This is how you can identify the run.")
def __init__(self, request, json_data, analysis=None, project=None, add_captcha=True, *args, **kwargs):
super().__init__(*args, **kwargs)
# The json data determines what fields does the form have.
self.json_data = json_data
# The project is required to select data from.
self.analysis = analysis
self.project = analysis.project if analysis else project
# Get request specific information
self.request = request
self.user = self.request.user
# Create the dynamic field from each key in the data.
for name, data in self.json_data.items():
field = factory.dynamic_field(data, self.project)
# Insert only valid fields.
if field:
self.fields[name] = field
def clean(self):
# Validate default fields.
super(RecipeInterface, self).clean()
valid, msg = auth.validate_recipe_run(user=self.user, recipe=self.analysis)
if not valid:
raise forms.ValidationError(msg)
for field, item in self.json_data.items():
if field in self.cleaned_data and item.get('display') == UPLOAD:
stream = self.request.FILES.get(field)
if not stream:
continue
# Validate the file size.
check_size(stream, field=field, maxsize=MAX_RECIPE_FILE_MB)
self.validate_text_fields()
def validate_text_fields(self):
"""Validate Character fields """
bool_map = {'false': False, 'true': True}
# Default pattern matches any alphanumeric string with a given length
default_pattern = r"^\w{1,20}$"
for field, item in self.json_data.items():
val = self.cleaned_data.get(field)
# Validate text fields
if (val is None) or (item.get("display") != TEXTBOX):
continue
# Acceptable regex pattern
regex_pattern = item.get("regex", default_pattern)
if re.fullmatch(regex_pattern, val) is None:
msg = f"{field} : contains invalid patterns. Valid pattern:{regex_pattern}."
raise forms.ValidationError(msg)
| 7,653 |
543 | package com.riiablo.map2.random;
import com.badlogic.gdx.math.RandomXS128;
public class Random extends RandomXS128 {
public Seed seed() {
long seed0 = getState(0);
long seed1 = getState(1);
return Seed.from(seed0, seed1);
}
public void seed(Seed seed) {
super.setState(seed.seed0, seed.seed1);
}
}
| 129 |
852 | #include "TopQuarkAnalysis/TopEventProducers/interface/TopInitSubset.h"
TopInitSubset::TopInitSubset(const edm::ParameterSet& cfg)
: srcToken_(consumes<reco::GenParticleCollection>(cfg.getParameter<edm::InputTag>("src"))) {
produces<reco::GenParticleCollection>();
}
TopInitSubset::~TopInitSubset() {}
void TopInitSubset::produce(edm::Event& evt, const edm::EventSetup& setup) {
edm::Handle<reco::GenParticleCollection> src;
evt.getByToken(srcToken_, src);
const reco::GenParticleRefProd ref = evt.getRefBeforePut<reco::GenParticleCollection>();
std::unique_ptr<reco::GenParticleCollection> sel(new reco::GenParticleCollection);
//fill output collection
fillOutput(*src, *sel);
evt.put(std::move(sel));
}
void TopInitSubset::fillOutput(const reco::GenParticleCollection& src, reco::GenParticleCollection& sel) {
for (reco::GenParticleCollection::const_iterator t = src.begin(); t != src.end(); ++t) {
if (std::abs(t->pdgId()) == TopInitID::tID) {
bool hasTopMother = false;
for (unsigned idx = 0; idx < t->numberOfMothers(); ++idx)
if (std::abs(t->mother(idx)->pdgId()) == TopInitID::tID)
hasTopMother = true;
if (hasTopMother)
continue;
for (unsigned idx = 0; idx < t->numberOfMothers(); ++idx) {
reco::GenParticle* cand = new reco::GenParticle(t->mother(idx)->threeCharge(),
t->mother(idx)->p4(),
t->mother(idx)->vertex(),
t->mother(idx)->pdgId(),
t->mother(idx)->status(),
false);
std::unique_ptr<reco::GenParticle> ptr(cand);
sel.push_back(*ptr);
}
break;
}
}
}
| 921 |
879 | <reponame>luiz158/Hibernate-SpringBoot
package com.bookstore.repository;
import com.bookstore.entity.Author;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
@Repository
public interface AuthorRepository extends JpaRepository<Author, Long> {
default Map<Long, Author> fetchIdAuthor() {
return findAll().stream()
.collect(Collectors.toMap(Author::getId, Function.identity()));
}
@Transactional(readOnly = true)
@Query("SELECT a.genre AS genre, count(a) AS genreCount FROM Author a GROUP BY a.genre")
List<Object[]> groupByGenreObj();
default Map<String, Long> groupByGenre() {
return groupByGenreObj()
.stream()
.collect(
Collectors.toMap(
t -> ((String) t[0]),
t -> ((long) t[1])
)
);
}
}
| 530 |
5,169 | <reponame>Gantios/Specs<gh_stars>1000+
{
"name": "AutoInsetter",
"platforms": {
"ios": "9.0"
},
"requires_arc": true,
"swift_version": "4.2",
"version": "1.7.0",
"summary": "Provide auto insetting capabilities to view controllers.",
"description": "Auto Inset engine that can automatically handle custom insetting of view controllers.",
"homepage": "https://github.com/uias/AutoInsetter",
"license": "MIT",
"authors": {
"<NAME>": "<EMAIL>"
},
"social_media_url": "http://twitter.com/MerrickSapsford",
"source": {
"git": "https://github.com/uias/AutoInsetter.git",
"tag": "1.7.0"
},
"source_files": "Sources/AutoInsetter/**/*.{h,m,swift}"
}
| 271 |
1,038 | package org.dataalgorithms.chap11.statemodel;
import java.util.List;
import java.util.ArrayList;
import java.io.IOException;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.log4j.Logger;
import org.dataalgorithms.util.InputOutputUtil;
/**
* Class containing a number of utility methods for manipulating
* Hadoop's SequenceFiles.
*
*
* @author <NAME>
*
*/
public class ReadDataFromHDFS {
private static final Logger THE_LOGGER =
Logger.getLogger(ReadDataFromHDFS.class);
private ReadDataFromHDFS() {
}
public static List<TableItem> readDirectory(String path) {
return ReadDataFromHDFS.readDirectory(new Path(path));
}
public static List<TableItem> readDirectory(Path path) {
FileSystem fs;
try {
fs = FileSystem.get(new Configuration());
}
catch (IOException e) {
THE_LOGGER.error("Unable to access the hadoop file system!", e);
throw new RuntimeException("Unable to access the hadoop file system!");
}
List<TableItem> list = new ArrayList<TableItem>();
try {
FileStatus[] stat = fs.listStatus(path);
for (int i = 0; i < stat.length; ++i) {
if (stat[i].getPath().getName().startsWith("part")) {
List<TableItem> pairs = readFile(stat[i].getPath(), fs);
list.addAll(pairs);
}
}
}
catch (IOException e) {
THE_LOGGER.error("Unable to access the hadoop file system!", e);
throw new RuntimeException("Error reading the hadoop file system!");
}
return list;
}
@SuppressWarnings("unchecked")
public static List<TableItem> readFile(Path path, FileSystem fs) {
THE_LOGGER.info("path="+path);
List<TableItem> list = new ArrayList<TableItem>();
FSDataInputStream stream = null;
BufferedReader reader = null;
try {
stream = fs.open(path);
reader = new BufferedReader(new InputStreamReader(stream));
String line;
while ((line = reader.readLine()) != null) {
// line = <fromState><,><toState><TAB><count>
THE_LOGGER.info("line="+line);
String[] tokens = line.split("\t"); // TAB separator
if (tokens.length == 2) {
String states = tokens[0];
int count = Integer.parseInt(tokens[1]);
String[] twoStates = states.split(",");
TableItem item = new TableItem(twoStates[0], twoStates[1], count);
list.add(item);
}
}
}
catch (IOException e) {
THE_LOGGER.error("readFileIntoCoxRegressionItem() failed!", e);
throw new RuntimeException("readFileIntoCoxRegressionItem() failed!");
}
finally {
InputOutputUtil.close(reader);
InputOutputUtil.close(stream);
}
return list;
}
public static void main(String[] args) throws Exception {
String path = args[0];
List<TableItem> list = readDirectory(path);
THE_LOGGER.info("list="+list.toString());
}
}
| 1,261 |
619 | package com.enjoyshop.fragment;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.widget.DefaultItemAnimator;
import android.support.v7.widget.DividerItemDecoration;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.View;
import android.widget.TextView;
import com.baidu.location.BDAbstractLocationListener;
import com.baidu.location.BDLocation;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.cjj.MaterialRefreshLayout;
import com.enjoyshop.EnjoyshopApplication;
import com.enjoyshop.R;
import com.enjoyshop.activity.GoodsDetailsActivity;
import com.enjoyshop.adapter.CategoryAdapter;
import com.enjoyshop.adapter.SecondGoodsAdapter;
import com.enjoyshop.bean.Category;
import com.enjoyshop.bean.HotGoods;
import com.enjoyshop.bean.Weather;
import com.enjoyshop.contants.HttpContants;
import com.enjoyshop.service.LocationService;
import com.enjoyshop.utils.LogUtil;
import com.enjoyshop.utils.ToastUtils;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.sunfusheng.marqueeview.MarqueeView;
import com.zhy.http.okhttp.OkHttpUtils;
import com.zhy.http.okhttp.callback.StringCallback;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import butterknife.BindView;
import okhttp3.Call;
import static com.enjoyshop.EnjoyshopApplication.getApplication;
/**
* <pre>
* author : 高勤
* e-mail : <EMAIL>.com
* time : 2017/08/08
* desc : 分类fragment
* version: 1.0
* </pre>
*/
public class CategoryFragment extends BaseFragment {
private static final int STATE_NORMAL = 0;
private static final int STATE_REFREH = 1;
private static final int STATE_MORE = 2;
private int state = STATE_NORMAL; //正常情况
@BindView(R.id.recyclerview_category)
RecyclerView mRecyclerView;
@BindView(R.id.recyclerview_wares)
RecyclerView mRecyclerviewWares;
@BindView(R.id.refresh_layout)
MaterialRefreshLayout mRefreshLaout;
@BindView(R.id.vf_hotmessage)
MarqueeView mVfHotMessage;
@BindView(R.id.tv_city)
TextView mCityName;
@BindView(R.id.tv_day_weather)
TextView mDayWeather;
@BindView(R.id.tv_night_weather)
TextView mNightWeather;
private Gson mGson = new Gson();
private List<Category> categoryFirst = new ArrayList<>(); //一级菜单
private CategoryAdapter mCategoryAdapter; //一级菜单
private SecondGoodsAdapter mSecondGoodsAdapter; //二级菜单
private List<HotGoods.ListBean> datas;
private List<String> mVFMessagesList; //上下轮播的信息
private String provinceName; //省份
private String cityName; //城市名
private String dayWeather;
private String nightWeather;
private LocationService locationService;
private int currPage = 1; //当前是第几页
private int totalPage = 1; //一共有多少页
private int pageSize = 10; //每页数目
@Override
protected int getContentResourseId() {
return R.layout.fragment_category;
}
@Override
protected void init() {
mVFMessagesList = new ArrayList<>();
requestCategoryData(); // 热门商品数据
requestMessageData(); //轮播信息数据
getLocation(); //获取当前城市的位置
}
@Override
public void onResume() {
super.onResume();
mVfHotMessage.startFlipping();
}
private void getLocation() {
locationService = ((EnjoyshopApplication) getApplication()).locationService;
locationService.registerListener(mListener);
locationService.setLocationOption(locationService.getOption());
locationService.start();// 定位SDK
}
private void requestCategoryData() {
OkHttpUtils.get().url(HttpContants.CATEGORY_LIST).build()
.execute(new StringCallback() {
@Override
public void onError(Call call, Exception e, int id) {
LogUtil.e("分类一级", e + "", true);
}
@Override
public void onResponse(String response, int id) {
LogUtil.e("分类一级", response + "", true);
Type collectionType = new TypeToken<Collection<Category>>() {
}.getType();
Collection<Category> enums = mGson.fromJson(response, collectionType);
Iterator<Category> iterator = enums.iterator();
while (iterator.hasNext()) {
Category bean = iterator.next();
categoryFirst.add(bean);
}
showCategoryData();
defaultClick();
}
});
}
private void requestMessageData() {
mVFMessagesList.add("开学季,凭录取通知书购手机6折起");
mVFMessagesList.add("都世丽人内衣今晚20点最低10元开抢");
mVFMessagesList.add("购联想手机达3000元以上即送赠电脑包");
mVFMessagesList.add("秋老虎到来,轻松购为您准备了这些必备生活用品");
mVFMessagesList.add("穿了幸福时光男装,帅呆呆,妹子马上来");
if (!mVFMessagesList.isEmpty()) {
mVfHotMessage.setVisibility(View.VISIBLE);
mVfHotMessage.startWithList(mVFMessagesList);
} else {
mVfHotMessage.setVisibility(View.GONE);
}
}
/**
* 展示一级菜单数据
*/
private boolean isclick = false;
private void showCategoryData() {
mCategoryAdapter = new CategoryAdapter(categoryFirst);
mCategoryAdapter.setOnItemClickListener(new BaseQuickAdapter.OnItemClickListener() {
@Override
public void onItemClick(BaseQuickAdapter adapter, View view, int position) {
Category category = (Category) adapter.getData().get(position);
int id = category.getId();
String name = category.getName();
isclick = true;
defaultClick();
requestWares(id);
}
});
mRecyclerView.setAdapter(mCategoryAdapter);
mRecyclerView.setLayoutManager(new LinearLayoutManager(getActivity()));
mRecyclerView.setItemAnimator(new DefaultItemAnimator());
mRecyclerView.addItemDecoration(new DividerItemDecoration(getActivity(),
DividerItemDecoration.VERTICAL));
}
private void defaultClick() {
//默认选中第0个
if (!isclick) {
Category category = categoryFirst.get(0);
int id = category.getId();
requestWares(id);
}
}
/**
* 二级菜单数据
*
* @param firstCategorId 一级菜单的firstCategorId
*/
private void requestWares(int firstCategorId) {
String url = HttpContants.WARES_LIST + "?categoryId=" + firstCategorId + "&curPage=" +
currPage + "&pageSize=" + pageSize;
OkHttpUtils.get().url(url).build().execute(new StringCallback() {
@Override
public void onError(Call call, Exception e, int id) {
LogUtil.e("二级菜单", e + "", true);
}
@Override
public void onResponse(String response, int id) {
LogUtil.e("二级菜单", response + "", true);
HotGoods hotGoods = mGson.fromJson(response, HotGoods.class);
totalPage = hotGoods.getTotalPage();
currPage = hotGoods.getCurrentPage();
datas = hotGoods.getList();
showData();
}
});
}
/**
* 展示二级菜单的数据
*/
private void showData() {
switch (state) {
case STATE_NORMAL:
mSecondGoodsAdapter = new SecondGoodsAdapter(datas);
mSecondGoodsAdapter.setOnItemClickListener(new BaseQuickAdapter
.OnItemClickListener() {
@Override
public void onItemClick(BaseQuickAdapter adapter, View view, int position) {
HotGoods.ListBean listBean = (HotGoods.ListBean) adapter.getData().get
(position);
Intent intent = new Intent(getContext(), GoodsDetailsActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
intent.setFlags(Intent.FLAG_ACTIVITY_NO_ANIMATION);
Bundle bundle = new Bundle();
bundle.putSerializable("itemClickGoods", (Serializable) listBean);
intent.putExtras(bundle);
startActivity(intent);
}
});
mRecyclerviewWares.setAdapter(mSecondGoodsAdapter);
mRecyclerviewWares.setLayoutManager(new GridLayoutManager(getContext(), 2));
mRecyclerviewWares.setItemAnimator(new DefaultItemAnimator());
mRecyclerviewWares.addItemDecoration(new DividerItemDecoration(getContext(),
DividerItemDecoration.HORIZONTAL));
break;
// case STATE_REFREH:
// mAdatper.clearData();
// mAdatper.addData(datas);
// mRecyclerView.scrollToPosition(0);
// mRefreshLaout.finishRefresh();
// break;
//
// case STATE_MORE:
// mAdatper.addData(mAdatper.getDatas().size(), datas);
// mRecyclerView.scrollToPosition(mAdatper.getDatas().size());
// mRefreshLaout.finishRefreshLoadMore();
// break;
}
}
/*****
*
* 定位结果回调,重写onReceiveLocation方法,可以直接拷贝如下代码到自己工程中修改
*
*/
private BDAbstractLocationListener mListener = new BDAbstractLocationListener() {
@Override
public void onReceiveLocation(BDLocation location) {
if (null != location && location.getLocType() != BDLocation.TypeServerError) {
cityName = location.getCity();
provinceName = location.getProvince();
if (cityName != null) {
mCityName.setText(cityName.substring(0, cityName.length() - 1));
} else {
mCityName.setText("上海");
}
getCityWeather();
} else {
getCityWeather();
}
}
};
/**
* 查询天气数据
*/
private void getCityWeather() {
String city; //有可能查询不到,或者网络异常,所以默认查询城市为 湖北武汉
String province;
if (cityName != null && provinceName != null) {
city = cityName.substring(0, cityName.length() - 1);
province = provinceName.substring(0, provinceName.length() - 1);
} else {
city = "武汉";
province = "湖北";
}
String url = HttpContants.requestWeather + "?key=201f8a7a91c30&city=" + city +
"&province=" + province;
OkHttpUtils.get().url(url).build().execute(new StringCallback() {
@Override
public void onError(Call call, Exception e, int id) {
}
@Override
public void onResponse(String response, int id) {
try {
Weather weather = mGson.fromJson(response, Weather.class);
List<Weather.ResultBean> result = weather.getResult();
//只有一个城市,所以只有一个数据
List<Weather.ResultBean.FutureBean> future = result.get(0).getFuture();
dayWeather = future.get(0).getDayTime();
nightWeather = future.get(0).getNight();
showWeather();
} catch (Exception e) {
ToastUtils.showSafeToast(getContext(), e.getMessage());
}
}
});
}
/**
* 展示天气数据
*/
private void showWeather() {
mDayWeather.setText("白天: " + dayWeather);
mNightWeather.setText("晚间: " + nightWeather);
}
@Override
public void onDestroy() {
super.onDestroy();
locationService.unregisterListener(mListener); //注销掉监听
locationService.stop(); //停止定位服务
}
@Override
public void onPause() {
super.onPause();
mVfHotMessage.stopFlipping();
}
}
| 6,760 |
394 | package com.xyoye.common_component.utils;
import android.content.Context;
import com.xyoye.common_component.base.app.BaseApplication;
/**
* Created by xyoye on 2021/1/6.
*/
public class SecurityHelper {
private static final String ERROR_RESULT = "error";
private static final int KEY_DANDAN = 0xC1000001;
private static final int KEY_BUGLY = 0xC1000002;
private final Context appContext;
static {
System.loadLibrary("security");
}
private SecurityHelper() {
appContext = BaseApplication.Companion.getAppContext();
}
private static class Holder {
static SecurityHelper instance = new SecurityHelper();
}
public static SecurityHelper getInstance() {
return Holder.instance;
}
public String getBuglyId() {
return getKey(KEY_BUGLY, appContext);
}
public String getAppId() {
return getKey(KEY_DANDAN, appContext);
}
public String buildHash(String hashInfo) {
return buildHash(hashInfo, appContext);
}
public Boolean isOfficialApplication() {
return !ERROR_RESULT.equals(getAppId());
}
private static native String getKey(int position, Context context);
private static native String buildHash(String hashInfo, Context context);
}
| 443 |
407 | package com.elasticsearch.cloud.monitor.metric.common.uti;
import lombok.extern.slf4j.Slf4j;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
@Slf4j
public class PropertiesUtil {
private static Properties props;
static{
loadProps();
}
synchronized static private void loadProps(){
log.info("开始加载properties文件内容.......");
props = new Properties();
InputStream in = null;
try {
in = PropertiesUtil.class.getClassLoader().getResourceAsStream("common.properties");
props.load(in);
} catch (FileNotFoundException e) {
log.error("jdbc.properties文件未找到");
} catch (IOException e) {
log.error("出现IOException");
} finally {
try {
if(null != in) {
in.close();
}
} catch (IOException e) {
log.error("jdbc.properties文件流关闭出现异常");
}
}
log.info("加载properties文件内容完成...........");
log.info("properties文件内容:" + props);
}
public static String getProperty(String key){
if(null == props) {
loadProps();
}
return props.getProperty(key);
}
public static String getProperty(String key, String defaultValue) {
if(null == props) {
loadProps();
}
return props.getProperty(key, defaultValue);
}
}
| 747 |
2,027 | <filename>primitive/src/main/java/io/atomix/primitive/log/LogSession.java
/*
* Copyright 2018-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.atomix.primitive.log;
import java.util.concurrent.CompletableFuture;
import java.util.function.Consumer;
import io.atomix.primitive.PrimitiveState;
import io.atomix.primitive.partition.PartitionId;
import io.atomix.primitive.session.SessionId;
import io.atomix.utils.concurrent.ThreadContext;
/**
* Log session.
*/
public interface LogSession {
/**
* Returns the session partition ID.
*
* @return the session partition ID
*/
PartitionId partitionId();
/**
* Returns the session identifier.
*
* @return the session identifier
*/
SessionId sessionId();
/**
* Returns the partition thread context.
*
* @return the partition thread context
*/
ThreadContext context();
/**
* Returns the log producer.
*
* @return the log producer
*/
LogProducer producer();
/**
* Returns the log consumer.
*
* @return the log consumer
*/
LogConsumer consumer();
/**
* Returns the current session state.
*
* @return the current session state
*/
PrimitiveState getState();
/**
* Registers a session state change listener.
*
* @param listener The callback to call when the session state changes.
*/
void addStateChangeListener(Consumer<PrimitiveState> listener);
/**
* Removes a state change listener.
*
* @param listener the state change listener to remove
*/
void removeStateChangeListener(Consumer<PrimitiveState> listener);
/**
* Connects the log session.
*
* @return a future to be completed once the log session has been connected
*/
CompletableFuture<LogSession> connect();
/**
* Closes the log session.
*
* @return a future to be completed once the log session has been closed
*/
CompletableFuture<Void> close();
/**
* Log session builder.
*/
abstract class Builder implements io.atomix.utils.Builder<LogSession> {
}
}
| 764 |
575 | // Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "ui/views/test/views_test_helper.h"
#include "ui/views/test/test_views_delegate.h"
namespace views {
std::unique_ptr<TestViewsDelegate>
ViewsTestHelper::GetFallbackTestViewsDelegate() {
return std::make_unique<TestViewsDelegate>();
}
void ViewsTestHelper::SetUpTestViewsDelegate(
TestViewsDelegate* delegate,
base::Optional<ViewsDelegate::NativeWidgetFactory> factory) {
if (factory.has_value())
delegate->set_native_widget_factory(factory.value());
}
void ViewsTestHelper::SetUp() {}
gfx::NativeWindow ViewsTestHelper::GetContext() {
return nullptr;
}
} // namespace views
| 256 |
2,151 | // Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef NGBaseFragmentBuilder_h
#define NGBaseFragmentBuilder_h
#include "base/memory/scoped_refptr.h"
#include "third_party/blink/renderer/core/core_export.h"
#include "third_party/blink/renderer/core/layout/ng/ng_style_variant.h"
#include "third_party/blink/renderer/platform/text/text_direction.h"
#include "third_party/blink/renderer/platform/text/writing_mode.h"
#include "third_party/blink/renderer/platform/wtf/allocator.h"
namespace blink {
class ComputedStyle;
class CORE_EXPORT NGBaseFragmentBuilder {
STACK_ALLOCATED();
public:
virtual ~NGBaseFragmentBuilder();
const ComputedStyle& Style() const {
DCHECK(style_);
return *style_;
}
NGBaseFragmentBuilder& SetStyleVariant(NGStyleVariant);
NGBaseFragmentBuilder& SetStyle(scoped_refptr<const ComputedStyle>,
NGStyleVariant);
WritingMode GetWritingMode() const { return writing_mode_; }
TextDirection Direction() const { return direction_; }
protected:
NGBaseFragmentBuilder(scoped_refptr<const ComputedStyle>,
WritingMode,
TextDirection);
NGBaseFragmentBuilder(WritingMode, TextDirection);
private:
scoped_refptr<const ComputedStyle> style_;
WritingMode writing_mode_;
TextDirection direction_;
protected:
NGStyleVariant style_variant_;
};
} // namespace blink
#endif // NGBaseFragmentBuilder
| 572 |
779 | <gh_stars>100-1000
package org.everit.json.schema;
import static org.everit.json.schema.JSONMatcher.sameJsonAs;
import static org.hamcrest.MatcherAssert.assertThat;
import org.json.JSONObject;
import org.junit.jupiter.api.Test;
public class ConditionalSchemaTest {
static final StringSchema MAX_LENGTH_STRING_SCHEMA = StringSchema.builder().maxLength(4).schemaLocation("#/else").build();
static final StringSchema MIN_LENGTH_STRING_SCHEMA = StringSchema.builder().minLength(6).schemaLocation("#/then").build();
static final StringSchema PATTERN_STRING_SCHEMA = StringSchema.builder().pattern("f.*o").schemaLocation("#/if").build();
private static final ResourceLoader LOADER = new ResourceLoader("/org/everit/jsonvalidator/tostring/");
private static ConditionalSchema.Builder initCompleteSchema() {
return ConditionalSchema.builder()
.ifSchema(TrueSchema.builder().build())
.thenSchema(ObjectSchema.builder()
.requiresObject(true)
.addRequiredProperty("prop").build())
.elseSchema(EmptySchema.builder().build());
}
// only if
@Test
public void onlyIfSuccessEvenIfDataIsInvalidAgainstSubschema() {
ConditionalSchema.builder().ifSchema(StringSchema.builder().maxLength(2).build()).build().validate("foo");
}
@Test
public void onlyIfSuccess() {
ConditionalSchema.builder().ifSchema(MAX_LENGTH_STRING_SCHEMA).build().validate("foo");
}
// only then
@Test
public void onlyThenSuccessEvenIfDataIsInvalidAgainstSubschema() {
ConditionalSchema.builder().thenSchema(StringSchema.builder().maxLength(2).build()).build().validate("foo");
}
@Test
public void onlyThenSuccess() {
ConditionalSchema.builder().thenSchema(MAX_LENGTH_STRING_SCHEMA).build().validate("foo");
}
// only else
@Test
public void onlyElseSuccessEvenIfDataIsInvalidAgainstSubschema() {
ConditionalSchema.builder().elseSchema(StringSchema.builder().maxLength(1).build()).build().validate("foo");
}
@Test
public void onlyElseSuccess() {
ConditionalSchema.builder().elseSchema(MAX_LENGTH_STRING_SCHEMA).build().validate("foo");
}
// if-then
@Test
public void ifSubschemaSuccessThenSubschemaFailure() {
ConditionalSchema.Builder subject = ConditionalSchema.builder().ifSchema(MAX_LENGTH_STRING_SCHEMA)
.thenSchema(PATTERN_STRING_SCHEMA);
TestSupport.failureOf(subject)
.expectedKeyword("then")
.expectedPointer("#")
.input("bar")
.expect();
}
@Test
public void ifSubschemaFailureThenSubschemaFailure() {
ConditionalSchema.builder().ifSchema(PATTERN_STRING_SCHEMA).thenSchema(MAX_LENGTH_STRING_SCHEMA).build().validate("barbar");
}
@Test
public void ifSubschemaSuccessThenSubschemaSuccess() {
ConditionalSchema.builder().ifSchema(PATTERN_STRING_SCHEMA).thenSchema(MAX_LENGTH_STRING_SCHEMA).build().validate("foo");
}
@Test
public void ifSubschemaFailureThenSubschemaSuccess() {
ConditionalSchema.builder().ifSchema(PATTERN_STRING_SCHEMA).thenSchema(MAX_LENGTH_STRING_SCHEMA).build().validate("bar");
}
// if-else
@Test
public void ifSubschemaSuccessElseSubschemaFailure() {
ConditionalSchema.builder().ifSchema(MAX_LENGTH_STRING_SCHEMA).elseSchema(PATTERN_STRING_SCHEMA).build().validate("bar");
}
@Test
public void ifSubschemaFailureElseSubschemaFailure() {
ConditionalSchema.Builder subject = ConditionalSchema.builder().ifSchema(PATTERN_STRING_SCHEMA)
.elseSchema(MAX_LENGTH_STRING_SCHEMA);
TestSupport.failureOf(subject)
.expectedKeyword("else")
.expectedPointer("#")
.input("barbar")
.expect();
}
@Test
public void ifSubschemaSuccessElseSubschemaSuccess() {
ConditionalSchema.builder().ifSchema(PATTERN_STRING_SCHEMA).elseSchema(MAX_LENGTH_STRING_SCHEMA).build().validate("foo");
}
@Test
public void ifSubschemaFailureElseSubschemaSuccess() {
ConditionalSchema.builder().ifSchema(PATTERN_STRING_SCHEMA).elseSchema(MAX_LENGTH_STRING_SCHEMA).build().validate("bar");
}
// then-else
@Test
public void thenSubschemaSuccessElseSubschemaFailure() {
ConditionalSchema.builder().thenSchema(MAX_LENGTH_STRING_SCHEMA).elseSchema(PATTERN_STRING_SCHEMA).build().validate("bar");
}
@Test
public void thenSubschemaFailureElseSubschemaFailure() {
ConditionalSchema.builder().thenSchema(PATTERN_STRING_SCHEMA).elseSchema(MAX_LENGTH_STRING_SCHEMA).build().validate("barbar");
}
@Test
public void thenSubschemaSuccessElseSubschemaSuccess() {
ConditionalSchema.builder().thenSchema(PATTERN_STRING_SCHEMA).elseSchema(MAX_LENGTH_STRING_SCHEMA).build().validate("foo");
}
@Test
public void thenSubschemaFailureElseSubschemaSuccess() {
ConditionalSchema.builder().thenSchema(PATTERN_STRING_SCHEMA).elseSchema(MAX_LENGTH_STRING_SCHEMA).build().validate("bar");
}
// if-then-else
@Test
public void ifSubschemaSuccessThenSubschemaSuccessElseSubSchemaSuccess() {
ConditionalSchema.builder().ifSchema(PATTERN_STRING_SCHEMA).thenSchema(MAX_LENGTH_STRING_SCHEMA)
.elseSchema(MAX_LENGTH_STRING_SCHEMA).build().validate("foo");
}
@Test
public void ifSubschemaSuccessThenSubschemaSuccessElseSubSchemaFailure() {
ConditionalSchema.builder().ifSchema(PATTERN_STRING_SCHEMA).thenSchema(MAX_LENGTH_STRING_SCHEMA)
.elseSchema(MIN_LENGTH_STRING_SCHEMA).build().validate("foo");
}
@Test
public void ifSubschemaSuccessThenSubschemaFailureElseSubSchemaSuccess() {
ConditionalSchema.Builder subject = ConditionalSchema.builder().ifSchema(PATTERN_STRING_SCHEMA).thenSchema(MAX_LENGTH_STRING_SCHEMA)
.elseSchema(MIN_LENGTH_STRING_SCHEMA);
TestSupport.failureOf(subject)
.expectedKeyword("then")
.expectedPointer("#")
.input("foobar")
.expect();
}
@Test
public void ifSubschemaSuccessThenSubschemaFailureElseSubSchemaFailure() {
ConditionalSchema.Builder subject = ConditionalSchema.builder().ifSchema(PATTERN_STRING_SCHEMA).thenSchema(MAX_LENGTH_STRING_SCHEMA)
.elseSchema(MIN_LENGTH_STRING_SCHEMA);
TestSupport.failureOf(subject)
.expectedKeyword("then")
.expectedPointer("#")
.input("foooo")
.expect();
}
@Test
public void ifSubschemaFailureThenSubschemaSuccessElseSubSchemaSuccess() {
ConditionalSchema.builder().ifSchema(MAX_LENGTH_STRING_SCHEMA).thenSchema(PATTERN_STRING_SCHEMA)
.elseSchema(MIN_LENGTH_STRING_SCHEMA).build().validate("foobar");
}
@Test
public void ifSubschemaFailureThenSubschemaSuccessElseSubSchemaFailure() {
ConditionalSchema.Builder subject = ConditionalSchema.builder().ifSchema(PATTERN_STRING_SCHEMA).thenSchema(MAX_LENGTH_STRING_SCHEMA)
.elseSchema(MIN_LENGTH_STRING_SCHEMA);
TestSupport.failureOf(subject)
.expectedKeyword("else")
.expectedPointer("#")
.input("bar")
.expect();
}
@Test
public void ifSubschemaFailureThenSubschemaFailureElseSubSchemaSuccess() {
ConditionalSchema.builder().ifSchema(PATTERN_STRING_SCHEMA).thenSchema(MIN_LENGTH_STRING_SCHEMA)
.elseSchema(MAX_LENGTH_STRING_SCHEMA).build().validate("bar");
}
@Test
public void ifSubschemaFailureThenSubschemaFailureElseSubSchemaFailure() {
ConditionalSchema.Builder subject = ConditionalSchema.builder().ifSchema(PATTERN_STRING_SCHEMA).thenSchema(MIN_LENGTH_STRING_SCHEMA)
.elseSchema(MAX_LENGTH_STRING_SCHEMA);
TestSupport.failureOf(subject)
.expectedKeyword("else")
.expectedPointer("#")
.input("barbarbar")
.expect();
}
@Test
public void toStringTest() {
ConditionalSchema subject = initCompleteSchema().build();
JSONObject actual = new JSONObject(subject.toString());
assertThat(actual, sameJsonAs(LOADER.readObj("conditionalschema.json")));
}
@Test
public void toString_noIf() {
ConditionalSchema subject = initCompleteSchema().ifSchema(null).build();
JSONObject expectedSchemaJson = LOADER.readObj("conditionalschema.json");
expectedSchemaJson.remove("if");
JSONObject actual = new JSONObject(subject.toString());
assertThat(actual, sameJsonAs(expectedSchemaJson));
}
@Test
public void toString_noThen() {
ConditionalSchema subject = initCompleteSchema().thenSchema(null).build();
JSONObject expectedSchemaJson = LOADER.readObj("conditionalschema.json");
expectedSchemaJson.remove("then");
JSONObject actual = new JSONObject(subject.toString());
assertThat(actual, sameJsonAs(expectedSchemaJson));
}
@Test
public void toString_noElse() {
ConditionalSchema subject = initCompleteSchema().thenSchema(null).elseSchema(null).build();
JSONObject expectedSchemaJson = LOADER.readObj("conditionalschema.json");
expectedSchemaJson.remove("then");
expectedSchemaJson.remove("else");
JSONObject actual = new JSONObject(subject.toString());
assertThat(actual, sameJsonAs(expectedSchemaJson));
}
}
| 4,058 |
2,661 | /*
* Copyright 2017 Google Inc.
* Copyright 2020 The Open GEE Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Class for determination of connectedness of groups of assets.
#ifndef GEO_EARTH_ENTERPRISE_SRC_FUSION_KHGDAL_RASTERCLUSTERANALYZER_H_
#define GEO_EARTH_ENTERPRISE_SRC_FUSION_KHGDAL_RASTERCLUSTERANALYZER_H_
#include <string>
#include <vector>
#include <list>
#include "common/khExtents.h"
class khGDALDataset;
class khVirtualRaster;
// Cluster
// A Cluster is a set of connected insets in tile space.
// Note: Connected here means that the bounding boxes(BB) which encompasses
// each inset touch or intersect. Multiple insets may combine to produce a
// cluster in such a way that the aggregate cluster BB contains empty tiles.
// These empty tiles could intersect a new insets BB making the new inset
// part of the cluster without actually touching or intersecting any other
// individual inset BB in the cluster.
class Cluster {
public:
Cluster(const khExtents<std::uint32_t> &extent, const std::string &file)
: extents(extent),
file_list(1, file),
is_new(true),
// Set is_updated to true for unification - allows updating of a flag
// is_updated in Cluster::Join().
is_updated(true) {
insets_area = Area();
}
void Print() const;
bool Connects(const Cluster& other) const {
// Note: Connected here means that the bounding box(BB) which encompasses
// the new(other) inset touches or intersects the cluster BB.
// The new(other) inset may connect with the cluster without actually
// touching or intersecting any other individual inset BB in the cluster.
return extents.connects(other.extents);
}
std::uint64_t Area() const {
return extents.width() * extents.height();
}
std::uint64_t InsetsArea() const {
return insets_area;
}
void Join(const Cluster& tojoin) {
is_updated = true;
extents.grow(tojoin.extents);
insets_area += tojoin.insets_area;
file_list.insert(file_list.end(), tojoin.file_list.begin(),
tojoin.file_list.end());
}
// Updates status - marks cluster new if cluster has been updated, and
// resets is_updated flag.
// Returns whether cluster's status has been updated (whether cluster is new).
// Note: it is used in ClusterAnalyzer for algorithm optimization.
bool UpdateStatus() {
is_new = is_updated;
is_updated = false;
return is_new;
}
khExtents<std::uint32_t> extents;
std::vector<std::string> file_list;
// Note: the is_new, is_updated flags are used in ClusterAnalyzer for
// algorithm optimization.
bool is_new;
bool is_updated;
std::uint64_t insets_area;
};
// Class ClusterAnalyzer analyzes virtual raster for:
// - insets clustering in raster product tile space;
// - insets coverage in mosaiced area;
class ClusterAnalyzer {
public:
// Compute clusters of insets to check if insets of a virtual raster
// intersect in a tilespace.
// The infile is a *.khvr file, override_srs - use given SRS,
// is_mercator - whether projection is Mercator.
void CalcAssetClusters(const std::string &infile,
const std::string &override_srs,
const bool is_mercator);
// The virtraster is a virtual raster, toplevel - normalized top resolution
// level, is_mercator - whether projection is Mercator.
void CalcAssetClusters(const khVirtualRaster &virtraster,
const size_t toplevel,
const bool is_mercator);
// Print inset clusters.
void PrintClusters() const;
// Calculates sum of inset areas, virtual raster area in tile space and
// their ratio.
// The srcDS is a GDAL dataset built based on a virtual raster.
void CalcAssetAreas(const khGDALDataset &srsDS);
// The infile is a *.khvr file, is_mercator - whether projection is Mercator.
void CalcAssetAreas(const std::string &infile,
const bool is_mercator);
// The infile is a *.khvr file, override_srs - use given SRS,
// override_srs - use given SRS, is_mercator - whether projection is Mercator.
void CalcAssetAreas(const std::string &infile,
const std::string &override_srs,
const bool is_mercator);
// The virtraster is a virtual raster, toplevel - normalized top resolution
// level, is_mercator - whether projection is Mercator.
void CalcAssetAreas(const khVirtualRaster &virtraster,
const size_t toplevel,
const bool is_mercator);
private:
// Area ratio threshold specifes a threshold when report warning.
static const double area_ratio_threshold;
typedef std::list<Cluster> ClusterList;
ClusterList clusters_;
// Triggers update of status for all clusters.
// Returns whether any of clusters has been updated.
bool UpdateClustersStatus();
void AddCluster(const Cluster& cluster) {
clusters_.push_back(cluster);
}
size_t NumClusters() const {
return clusters_.size();
}
// Calculates sum of insets areas based on information in clusters.
std::uint64_t CalcRasterInsetsArea() const;
// Calculates virtual raster (mosaic) area based on information in clusters.
std::uint64_t CalcRasterArea() const;
// Prints virtual raster info: sum of inset areas, raster area, their ratio.
void PrintRasterInfo(const std::uint64_t raster_inset_area,
const std::uint64_t raster_area,
const bool is_area_check) const;
};
#endif // GEO_EARTH_ENTERPRISE_SRC_FUSION_KHGDAL_RASTERCLUSTERANALYZER_H_
| 2,143 |
1,253 | <filename>graph/c++/Segment_tree.cpp<gh_stars>1000+
#include <bits/stdc++.h>
using namespace std;
// limit for array size
const int N = 100000;
int n; // array size
// Max size of tree
int tree[2 * N];
// function to build the tree
void build( int arr[])
{
// insert leaf nodes in tree
for (int i=0; i<n; i++)
tree[n+i] = arr[i];
// build the tree by calculating parents
for (int i = n - 1; i > 0; --i)
tree[i] = tree[i<<1] + tree[i<<1 | 1];
}
// function to update a tree node
void updateTreeNode(int p, int value)
{
// set value at position p
tree[p+n] = value;
p = p+n;
// move upward and update parents
for (int i=p; i > 1; i >>= 1)
tree[i>>1] = tree[i] + tree[i^1];
}
// function to get sum on interval [l, r)
int query(int l, int r)
{
int res = 0;
// loop to find the sum in the range
for (l += n, r += n; l < r; l >>= 1, r >>= 1)
{
if (l&1)
res += tree[l++];
if (r&1)
res += tree[--r];
}
return res;
}
// driver program to test the above function
int main()
{
int a[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12};
// n is global
n = sizeof(a)/sizeof(a[0]);
// build tree
build(a);
// print the sum in range(1,2) index-based
cout << query(1, 3)<<endl;
// modify element at 2nd index
updateTreeNode(2, 1);
// print the sum in range(1,2) index-based
cout << query(1, 3)<<endl;
return 0;
}
| 832 |
405 | // Copyright (c) 2019, QuantStack and Mamba Contributors
//
// Distributed under the terms of the BSD 3-Clause License.
//
// The full license is in the file LICENSE, distributed with this software.
#ifndef MAMBA_CORE_POOL_HPP
#define MAMBA_CORE_POOL_HPP
#include <list>
#include "repo.hpp"
extern "C"
{
#include "solv/pooltypes.h"
}
namespace spdlog
{
class logger;
}
namespace mamba
{
class MPool
{
public:
MPool();
~MPool();
MPool(const MPool&) = delete;
MPool& operator=(const MPool&) = delete;
MPool(MPool&&) = delete;
MPool& operator=(MPool&&) = delete;
void set_debuglevel();
void create_whatprovides();
operator Pool*();
MRepo& add_repo(MRepo&& repo);
void remove_repo(Id repo_id);
private:
std::pair<spdlog::logger*, std::string> m_debug_logger;
Pool* m_pool;
std::list<MRepo> m_repo_list;
};
} // namespace mamba
#endif // MAMBA_POOL_HPP
| 454 |
4,697 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* License); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Copyright (c) 2021, OPEN AI LAB
* Author: <EMAIL>
* original model: https://github.com/WXinlong/SOLO
*/
#include <vector>
#include <string>
#include <algorithm>
#include <cmath>
#include <stdlib.h>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <set>
#include <map>
#include "common.h"
#include "tengine/c_api.h"
#include "tengine_operations.h"
#include "graph/graph.h"
#include "graph/subgraph.h"
#include "graph/node.h"
#include "graph/tensor.h"
#include "operator/prototype/convolution_param.h"
typedef int (*common_test)(graph_t, const char* input_name, const char* node_name, int data_type, int layout, int n, int c, int h, int w, int outc);
int create_input_node(graph_t graph, const char* node_name, int data_type, int layout, int n, int c, int h, int w, int dims_count = 4)
{
if (0 == n) dims_count = 3;
if (0 == c) dims_count = 2;
if (0 == h) dims_count = 1;
if (0 == w)
{
fprintf(stderr, "Dim of input node is not allowed. { n, c, h, w } = {%d, %d, %d, %d}.\n", n, c, h, w);
return -1;
}
node_t node = create_graph_node(graph, node_name, "InputOp");
if (NULL == node)
{
fprintf(stderr, "Create %d dims node(%s) failed. ", dims_count, node_name);
return -1;
}
tensor_t tensor = create_graph_tensor(graph, node_name, data_type);
if (NULL == tensor)
{
release_graph_node(node);
fprintf(stderr, "Create %d dims tensor for node(%s) failed. ", dims_count, node_name);
return -1;
}
int ret = set_node_output_tensor(node, 0, tensor, TENSOR_TYPE_INPUT);
if (0 != ret)
{
release_graph_tensor(tensor);
release_graph_node(node);
fprintf(stderr, "Set %d dims output tensor for node(%s) failed. ", dims_count, node_name);
return -1;
}
switch (dims_count)
{
case 1:
{
int dims_array[1] = {w};
set_tensor_shape(tensor, dims_array, dims_count);
break;
}
case 2:
{
int dims_array[2] = {h, w};
set_tensor_shape(tensor, dims_array, dims_count);
break;
}
case 3:
{
if (TENGINE_LAYOUT_NCHW == layout)
{
int dims_array[3] = {c, h, w};
set_tensor_shape(tensor, dims_array, dims_count);
break;
}
if (TENGINE_LAYOUT_NHWC == layout)
{
int dims_array[3] = {h, w, c};
set_tensor_shape(tensor, dims_array, dims_count);
break;
}
}
case 4:
{
if (TENGINE_LAYOUT_NCHW == layout)
{
int dims_array[4] = {n, c, h, w};
set_tensor_shape(tensor, dims_array, dims_count);
break;
}
if (TENGINE_LAYOUT_NHWC == layout)
{
int dims_array[4] = {n, h, w, c};
set_tensor_shape(tensor, dims_array, dims_count);
break;
}
}
case 5:
{
if (TENGINE_LAYOUT_NCHW == layout)
{
int dims_array[5] = {1, n, c, h, w};
set_tensor_shape(tensor, dims_array, dims_count);
break;
}
if (TENGINE_LAYOUT_NHWC == layout)
{
int dims_array[5] = {1, n, h, w, c};
set_tensor_shape(tensor, dims_array, dims_count);
break;
}
}
default:
fprintf(stderr, "Cannot support %d dims tensor.\n", dims_count);
}
release_graph_tensor(tensor);
release_graph_node(node);
return 0;
}
graph_t create_common_test_graph(const char* test_node_name, int data_type, int layout, int n, int c, int h, int w, int outc, common_test test_func, int dims_num = 4)
{
graph_t graph = create_graph(NULL, NULL, NULL);
if (NULL == graph)
{
fprintf(stderr, "get graph failed.\n");
return NULL;
}
if (set_graph_layout(graph, layout) < 0)
{
fprintf(stderr, "set layout failed.\n");
return NULL;
}
const char* input_name = "input_node";
if (create_input_node(graph, input_name, data_type, layout, n, c, h, w, dims_num) < 0)
{
fprintf(stderr, "create input node failed.\n");
return NULL;
}
if (test_func(graph, input_name, test_node_name, data_type, layout, n, c, h, w, outc) < 0)
{
fprintf(stderr, "create test node failed.\n");
return NULL;
}
/* set input/output node */
const char* inputs[] = {input_name};
const char* outputs[] = {test_node_name};
if (set_graph_input_node(graph, inputs, sizeof(inputs) / sizeof(char*)) < 0)
{
fprintf(stderr, "set inputs failed.\n");
return NULL;
}
if (set_graph_output_node(graph, outputs, sizeof(outputs) / sizeof(char*)) < 0)
{
fprintf(stderr, "set outputs failed.\n");
return NULL;
}
return graph;
}
struct Object
{
int cx;
int cy;
int label;
float prob;
cv::Mat mask;
};
static inline float intersection_area(const Object& a, const Object& b, int img_w, int img_h)
{
float area = 0.f;
for (int y = 0; y < img_h; y = y + 4)
{
for (int x = 0; x < img_w; x = x + 4)
{
const uchar* mp1 = a.mask.ptr(y);
const uchar* mp2 = b.mask.ptr(y);
if (mp1[x] == 255 && mp2[x] == 255) area += 1.f;
}
}
return area;
}
static inline float area(const Object& a, int img_w, int img_h)
{
float area = 0.f;
for (int y = 0; y < img_h; y = y + 4)
{
for (int x = 0; x < img_w; x = x + 4)
{
const uchar* mp = a.mask.ptr(y);
if (mp[x] == 255) area += 1.f;
}
}
return area;
}
static void qsort_descent_inplace(std::vector<Object>& objects, int left, int right)
{
int i = left;
int j = right;
float p = objects[(left + right) / 2].prob;
while (i <= j)
{
while (objects[i].prob > p)
i++;
while (objects[j].prob < p)
j--;
if (i <= j)
{
// swap
std::swap(objects[i], objects[j]);
i++;
j--;
}
}
#pragma omp parallel sections
{
#pragma omp section
{
if (left < j) qsort_descent_inplace(objects, left, j);
}
#pragma omp section
{
if (i < right) qsort_descent_inplace(objects, i, right);
}
}
}
static void qsort_descent_inplace(std::vector<Object>& objects)
{
if (objects.empty())
return;
qsort_descent_inplace(objects, 0, objects.size() - 1);
}
static void nms_sorted_segs(const std::vector<Object>& objects, std::vector<int>& picked, float nms_threshold, int img_w, int img_h)
{
picked.clear();
const int n = objects.size();
std::vector<float> areas(n);
for (int i = 0; i < n; i++)
{
areas[i] = area(objects[i], img_w, img_h);
}
for (int i = 0; i < n; i++)
{
const Object& a = objects[i];
int keep = 1;
for (int j = 0; j < (int)picked.size(); j++)
{
const Object& b = objects[picked[j]];
// intersection over union
float inter_area = intersection_area(a, b, img_w, img_h);
float union_area = areas[i] + areas[picked[j]] - inter_area;
// float IoU = inter_area / union_area
if (inter_area / union_area > nms_threshold)
keep = 0;
}
if (keep)
picked.push_back(i);
}
}
static void kernel_pick(const float* cate_pred, int w, int h, std::vector<int>& picked, int num_class, float cate_thresh)
{
for (int q = 0; q < num_class; q++)
{
for (int i = 0; i < h; i++)
{
for (int j = 0; j < w; j++)
{
int index = i * w + j;
float cate_score = cate_pred[q * h * w + index];
if (cate_score < cate_thresh)
{
continue;
}
else
picked.push_back(index);
}
}
}
}
void show_usage()
{
fprintf(
stderr,
"[Usage]: [-h]\n [-m model_file] [-i image_file] [-r repeat_count] [-t thread_count]\n");
}
void get_input_data(const char* image_file, float* input_data, int letterbox_rows, int letterbox_cols, const float* mean, const float* scale, int& wpad, int& hpad)
{
cv::Mat sample = cv::imread(image_file, 1);
cv::Mat img;
if (sample.channels() == 1)
cv::cvtColor(sample, img, cv::COLOR_GRAY2RGB);
else
cv::cvtColor(sample, img, cv::COLOR_BGR2RGB);
/* letterbox process to support different letterbox size */
float scale_letterbox;
int resize_rows;
int resize_cols;
if ((letterbox_rows * 1.0 / img.rows) < (letterbox_cols * 1.0 / img.cols))
{
scale_letterbox = letterbox_rows * 1.0 / img.rows;
}
else
{
scale_letterbox = letterbox_cols * 1.0 / img.cols;
}
resize_cols = int(scale_letterbox * img.cols);
resize_rows = int(scale_letterbox * img.rows);
cv::resize(img, img, cv::Size(resize_cols, resize_rows));
img.convertTo(img, CV_32FC3);
// Generate a gray image for letterbox using opencv
cv::Mat img_new(letterbox_cols, letterbox_rows, CV_32FC3, cv::Scalar(0, 0, 0));
int top = (letterbox_rows - resize_rows) / 2;
int bot = (letterbox_rows - resize_rows + 1) / 2;
int left = (letterbox_cols - resize_cols) / 2;
int right = (letterbox_cols - resize_cols + 1) / 2;
hpad = letterbox_rows - resize_rows;
wpad = letterbox_cols - resize_cols;
// Letterbox filling
cv::copyMakeBorder(img, img_new, top, bot, left, right, cv::BORDER_CONSTANT, cv::Scalar(114.f, 114.f, 114.f));
float* img_data = (float*)img_new.data;
/* nhwc to nchw */
for (int h = 0; h < letterbox_rows; h++)
{
for (int w = 0; w < letterbox_cols; w++)
{
for (int c = 0; c < 3; c++)
{
int in_index = h * letterbox_cols * 3 + w * 3 + c;
int out_index = c * letterbox_rows * letterbox_cols + h * letterbox_cols + w;
input_data[out_index] = (img_data[in_index] - mean[c]) * scale[c];
}
}
}
}
int create_test_conv_node(graph_t graph, const char* input_name, const char* node_name, int data_type, int layout, int n, int c, int h, int w, int outc)
{
(void)layout;
(void)n;
(void)c;
(void)h;
(void)w;
(void)outc;
/* create the test node */
struct node* test_node = (struct node*)create_graph_node(graph, node_name, "Convolution");
tensor_t input_tensor = get_graph_tensor(graph, input_name);
if (nullptr == input_tensor)
{
fprintf(stderr, "create test node failed.\n");
return -1;
}
/* create the sub node to product another input tensors which the test node is needed, such as weight/bias/slope tensor. */
/* weight */
node_t weight_node = create_graph_node(graph, "weight", "Const");
tensor_t weight_tensor = create_graph_tensor(graph, "weight", TENGINE_DT_FP32);
set_node_output_tensor(weight_node, 0, weight_tensor, TENSOR_TYPE_CONST);
int weight_dims[4] = {outc, c, 1, 1}; // channel num
set_tensor_shape(weight_tensor, weight_dims, 4);
/* bias */
node_t bias_node = create_graph_node(graph, "bias", "Const");
tensor_t bias_tensor = create_graph_tensor(graph, "bias", TENGINE_DT_FP32);
set_node_output_tensor(bias_node, 0, bias_tensor, TENSOR_TYPE_CONST);
int bias_dims[1] = {outc}; // channel num
set_tensor_shape(bias_tensor, bias_dims, 1);
/* input tensors of test node */
set_node_input_tensor(test_node, 0, input_tensor);
set_node_input_tensor(test_node, 1, weight_tensor);
set_node_input_tensor(test_node, 2, bias_tensor);
/* output tensors of test node */
tensor_t output_tensor = create_graph_tensor(graph, node_name, data_type);
set_node_output_tensor(test_node, 0, output_tensor, TENSOR_TYPE_VAR);
/* set params */
struct conv_param* conv_param = (struct conv_param*)(struct node*)test_node->op.param_mem;
conv_param->kernel_h = 1;
conv_param->kernel_w = 1;
conv_param->stride_h = 1;
conv_param->stride_w = 1;
conv_param->pad_h0 = 0;
conv_param->pad_h1 = 0;
conv_param->pad_w0 = 0;
conv_param->pad_w1 = 0;
conv_param->dilation_h = 1;
conv_param->dilation_w = 1;
conv_param->input_channel = c;
conv_param->output_channel = outc;
conv_param->group = 1;
conv_param->activation = -1;
return 0;
}
static int ins_decode(float* kernel_pred, float* feature_pred,
std::vector<int>& kernel_picked, std::map<int, int>& kernel_map, std::vector<std::vector<float> >& ins_pred, int c_in)
{
std::set<int> kernel_pick_set;
kernel_pick_set.insert(kernel_picked.begin(), kernel_picked.end());
int c_out = kernel_pick_set.size();
int ret = 0;
if (c_out > 0)
{
std::vector<float> bias_data(c_out, 0);
//init graph
ret = init_tengine();
if (0 != ret)
fprintf(stderr, "Tengine init failed.\n");
// create
graph_t graph = create_common_test_graph("conv", TENGINE_DT_FP32, TENGINE_LAYOUT_NCHW, 1, c_in, 112, 112, c_out, &create_test_conv_node);
if (nullptr == graph)
return -1;
//set_log_level(LOG_INFO);
//dump_graph(graph);
/* fill test data */
// set quantize params
struct tensor* input_tensor = (struct tensor*)get_graph_tensor(graph, "input_node");
struct tensor* weight_tensor = (struct tensor*)get_graph_tensor(graph, "weight");
struct tensor* bias_tensor = (struct tensor*)get_graph_tensor(graph, "bias");
struct tensor* output_tensor = (struct tensor*)get_graph_tensor(graph, "conv");
// set input data
set_tensor_buffer(input_tensor, feature_pred, c_in * 112 * 112 * sizeof(float));
std::vector<float> weights(c_in * c_out);
std::set<int>::iterator pick_c;
int count_c = 0;
for (pick_c = kernel_pick_set.begin(); pick_c != kernel_pick_set.end(); pick_c++)
{
kernel_map[*pick_c] = count_c;
for (int j = 0; j < c_in; j++)
{
weights[count_c * c_in + j] = kernel_pred[c_in * (*pick_c) + j];
}
count_c++;
}
// set weight data
set_tensor_buffer(weight_tensor, weights.data(), c_in * c_out * sizeof(float));
// set bias data
set_tensor_buffer(bias_tensor, bias_data.data(), c_out * sizeof(float));
// graph run
if (prerun_graph(graph) < 0)
{
fprintf(stderr, "Pre-run graph failed.\n");
return -1;
}
if (0 != run_graph(graph, 1))
{
fprintf(stderr, "Run graph error.\n");
postrun_graph(graph);
destroy_graph(graph);
release_tengine();
return -1;
}
/* get output*/
int output_size = output_tensor->elem_num;
float* output_fp32 = (float*)output_tensor->data;
for (int i = 0; i < output_tensor->dims[1]; i++)
{
std::vector<float> tmp;
for (int j = 0; j < output_tensor->dims[2] * output_tensor->dims[3]; j++)
tmp.push_back(output_fp32[i * output_tensor->dims[2] * output_tensor->dims[3] + j]);
ins_pred.push_back(tmp);
}
// exit
postrun_graph(graph);
destroy_graph(graph);
release_tengine();
}
return 0;
}
static inline float sigmoid(float x)
{
return static_cast<float>(1.f / (1.f + exp(-x)));
}
void generate_res(float* cate_pred, std::vector<std::vector<float> > ins_pred, std::map<int, int>& kernel_map,
std::vector<std::vector<Object> >& objects, float cate_thresh,
float conf_thresh, int img_w, int img_h, int num_class, float stride, int wpad, int hpad,
int cate_pred_w, int cate_pred_h, int cate_pred_c)
{
int w = cate_pred_w;
int h = cate_pred_h;
int w_ins = 112;
int h_ins = 112;
for (int q = 0; q < num_class; q++)
{
const float* cate_ptr = cate_pred + q * w * h;
for (int i = 0; i < h; i++)
{
for (int j = 0; j < w; j++)
{
int index = i * w + j;
float cate_socre = cate_ptr[index];
if (cate_socre < cate_thresh)
{
continue;
}
const float* ins_ptr = ins_pred[kernel_map[index]].data();
cv::Mat mask(h_ins, w_ins, CV_32FC1);
float sum_mask = 0.f;
int count_mask = 0;
{
mask = cv::Scalar(0.f);
float* mp = (float*)mask.data;
for (int m = 0; m < w_ins * h_ins; m++)
{
float mask_score = sigmoid(ins_ptr[m]);
if (mask_score > 0.5)
{
mp[m] = mask_score;
sum_mask += mask_score;
count_mask++;
}
}
}
if (count_mask < stride)
{
continue;
}
float mask_score = sum_mask / (float(count_mask) + 1e-6);
float socre = mask_score * cate_socre;
if (socre < conf_thresh)
{
continue;
}
cv::Mat mask_cut;
cv::Rect rect(wpad / 8, hpad / 8, w_ins - wpad / 4, h_ins - hpad / 4);
mask_cut = mask(rect);
cv::Mat mask2;
cv::resize(mask_cut, mask2, cv::Size(img_w, img_h));
Object obj;
obj.mask = cv::Mat(img_h, img_w, CV_8UC1);
float sum_mask_y = 0.f;
float sum_mask_x = 0.f;
int area = 0;
{
obj.mask = cv::Scalar(0);
for (int y = 0; y < img_h; y++)
{
const float* mp2 = mask2.ptr<const float>(y);
uchar* bmp = obj.mask.ptr<uchar>(y);
for (int x = 0; x < img_w; x++)
{
if (mp2[x] > 0.5f)
{
bmp[x] = 255;
sum_mask_y += (float)y;
sum_mask_x += (float)x;
area++;
}
else
bmp[x] = 0;
}
}
}
if (area < 100) continue;
obj.cx = int(sum_mask_x / area);
obj.cy = int(sum_mask_y / area);
obj.label = q + 1;
obj.prob = socre;
objects[q].push_back(obj);
}
}
}
}
static void draw_objects(const cv::Mat& bgr, const std::vector<Object>& objects, const char* save_path)
{
static const char* class_names[] = {"background",
"person", "bicycle", "car", "motorcycle", "airplane", "bus",
"train", "truck", "boat", "traffic light", "fire hydrant",
"stop sign", "parking meter", "bench", "bird", "cat", "dog",
"horse", "sheep", "cow", "elephant", "bear", "zebra", "giraffe",
"backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee",
"skis", "snowboard", "sports ball", "kite", "baseball bat",
"baseball glove", "skateboard", "surfboard", "tennis racket",
"bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl",
"banana", "apple", "sandwich", "orange", "broccoli", "carrot",
"hot dog", "pizza", "donut", "cake", "chair", "couch",
"potted plant", "bed", "dining table", "toilet", "tv", "laptop",
"mouse", "remote", "keyboard", "cell phone", "microwave", "oven",
"toaster", "sink", "refrigerator", "book", "clock", "vase",
"scissors", "teddy bear", "hair drier", "toothbrush"};
static const unsigned char colors[81][3] = {
{56, 0, 255},
{226, 255, 0},
{0, 94, 255},
{0, 37, 255},
{0, 255, 94},
{255, 226, 0},
{0, 18, 255},
{255, 151, 0},
{170, 0, 255},
{0, 255, 56},
{255, 0, 75},
{0, 75, 255},
{0, 255, 169},
{255, 0, 207},
{75, 255, 0},
{207, 0, 255},
{37, 0, 255},
{0, 207, 255},
{94, 0, 255},
{0, 255, 113},
{255, 18, 0},
{255, 0, 56},
{18, 0, 255},
{0, 255, 226},
{170, 255, 0},
{255, 0, 245},
{151, 255, 0},
{132, 255, 0},
{75, 0, 255},
{151, 0, 255},
{0, 151, 255},
{132, 0, 255},
{0, 255, 245},
{255, 132, 0},
{226, 0, 255},
{255, 37, 0},
{207, 255, 0},
{0, 255, 207},
{94, 255, 0},
{0, 226, 255},
{56, 255, 0},
{255, 94, 0},
{255, 113, 0},
{0, 132, 255},
{255, 0, 132},
{255, 170, 0},
{255, 0, 188},
{113, 255, 0},
{245, 0, 255},
{113, 0, 255},
{255, 188, 0},
{0, 113, 255},
{255, 0, 0},
{0, 56, 255},
{255, 0, 113},
{0, 255, 188},
{255, 0, 94},
{255, 0, 18},
{18, 255, 0},
{0, 255, 132},
{0, 188, 255},
{0, 245, 255},
{0, 169, 255},
{37, 255, 0},
{255, 0, 151},
{188, 0, 255},
{0, 255, 37},
{0, 255, 0},
{255, 0, 170},
{255, 0, 37},
{255, 75, 0},
{0, 0, 255},
{255, 207, 0},
{255, 0, 226},
{255, 245, 0},
{188, 255, 0},
{0, 255, 18},
{0, 255, 75},
{0, 255, 151},
{255, 56, 0},
{245, 255, 0}};
cv::Mat image = bgr.clone();
int color_index = 0;
for (size_t i = 0; i < objects.size(); i++)
{
const Object& obj = objects[i];
if (obj.prob < 0.15)
continue;
fprintf(stderr, "%d = %.5f at %.2d %.2d\n", obj.label, obj.prob,
obj.cx, obj.cy);
const unsigned char* color = colors[color_index % 81];
color_index++;
char text[256];
sprintf(text, "%s %.1f%%", class_names[obj.label], obj.prob * 100);
int baseLine = 0;
cv::Size label_size = cv::getTextSize(text, cv::FONT_HERSHEY_SIMPLEX, 0.5, 1, &baseLine);
int x = obj.cx;
int y = obj.cy;
cv::rectangle(image, cv::Rect(cv::Point(x, y), cv::Size(label_size.width, label_size.height + baseLine)),
cv::Scalar(255, 255, 255), -1);
cv::putText(image, text, cv::Point(x, y + label_size.height),
cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 0, 0));
// draw mask
for (int y = 0; y < image.rows; y++)
{
const uchar* mp = obj.mask.ptr(y);
uchar* p = image.ptr(y);
for (int x = 0; x < image.cols; x++)
{
if (mp[x] == 255)
{
p[0] = cv::saturate_cast<uchar>(p[0] * 0.5 + color[0] * 0.5);
p[1] = cv::saturate_cast<uchar>(p[1] * 0.5 + color[1] * 0.5);
p[2] = cv::saturate_cast<uchar>(p[2] * 0.5 + color[2] * 0.5);
}
p += 3;
}
}
}
cv::imwrite(save_path, image);
}
int main(int argc, char* argv[])
{
const char* model_file = nullptr;
const char* image_file = nullptr;
int img_c = 3;
const float mean[3] = {123.68f, 116.78f, 103.94f};
const float scale[3] = {1.0 / 58.40f, 1.0 / 57.12f, 1.0 / 57.38f};
// allow none square letterbox, set default letterbox size
int letterbox_rows = 448;
int letterbox_cols = 448;
int repeat_count = 1;
int num_thread = 1;
int res;
while ((res = getopt(argc, argv, "m:i:r:t:h:")) != -1)
{
switch (res)
{
case 'm':
model_file = optarg;
break;
case 'i':
image_file = optarg;
break;
case 'r':
repeat_count = std::strtoul(optarg, nullptr, 10);
break;
case 't':
num_thread = std::strtoul(optarg, nullptr, 10);
break;
case 'h':
show_usage();
return 0;
default:
break;
}
}
/* check files */
if (nullptr == model_file)
{
fprintf(stderr, "Error: Tengine model file not specified!\n");
show_usage();
return -1;
}
if (nullptr == image_file)
{
fprintf(stderr, "Error: Image file not specified!\n");
show_usage();
return -1;
}
if (!check_file_exist(model_file) || !check_file_exist(image_file))
return -1;
cv::Mat img = cv::imread(image_file, 1);
if (img.empty())
{
fprintf(stderr, "cv::imread %s failed\n", image_file);
return -1;
}
/* set runtime options */
struct options opt;
opt.num_thread = num_thread;
opt.cluster = TENGINE_CLUSTER_ALL;
opt.precision = TENGINE_MODE_FP32;
opt.affinity = 0;
/* inital tengine */
if (init_tengine() != 0)
{
fprintf(stderr, "Initial tengine failed.\n");
return -1;
}
fprintf(stderr, "tengine-lite library version: %s\n", get_tengine_version());
/* create graph, load tengine model xxx.tmfile */
graph_t graph = create_graph(nullptr, "tengine", model_file);
if (graph == nullptr)
{
fprintf(stderr, "Create graph failed.\n");
return -1;
}
int img_size = letterbox_rows * letterbox_cols * img_c;
int dims[] = {1, 3, int(letterbox_rows), int(letterbox_cols)};
int dims3[] = {1, 2, int(letterbox_rows / 8), int(letterbox_cols / 8)};
int dims4[] = {1, 2, int(letterbox_rows / 16), int(letterbox_cols / 16)};
int dims5[] = {1, 2, int(letterbox_rows / 32), int(letterbox_cols / 32)};
std::vector<float> input_data(img_size);
std::vector<float> input_data3(2 * 56 * 56);
std::vector<float> input_data4(2 * 28 * 28);
std::vector<float> input_data5(2 * 14 * 14);
tensor_t input_tensor = get_graph_tensor(graph, "input");
tensor_t p3_input_tensor = get_graph_tensor(graph, "p3_input");
tensor_t p4_input_tensor = get_graph_tensor(graph, "p4_input");
tensor_t p5_input_tensor = get_graph_tensor(graph, "p5_input");
if (input_tensor == nullptr || p3_input_tensor == nullptr || p4_input_tensor == nullptr || p5_input_tensor == nullptr)
{
fprintf(stderr, "Get input tensor failed\n");
return -1;
}
if (set_tensor_shape(input_tensor, dims, 4) < 0 || set_tensor_shape(p3_input_tensor, dims3, 4) < 0 || set_tensor_shape(p4_input_tensor, dims4, 4) < 0 || set_tensor_shape(p5_input_tensor, dims5, 4) < 0)
{
fprintf(stderr, "Set input tensor shape failed\n");
return -1;
}
if (set_tensor_buffer(input_tensor, input_data.data(), img_size * 4) < 0 || set_tensor_buffer(p3_input_tensor, input_data3.data(), 2 * 56 * 56 * 4) < 0 || set_tensor_buffer(p4_input_tensor, input_data4.data(), 2 * 28 * 28 * 4) < 0 || set_tensor_buffer(p5_input_tensor, input_data5.data(), 2 * 14 * 14 * 4) < 0)
{
fprintf(stderr, "Set input tensor buffer failed\n");
return -1;
}
/* prerun graph, set work options(num_thread, cluster, precision) */
if (prerun_graph_multithread(graph, opt) < 0)
{
fprintf(stderr, "Prerun multithread graph failed.\n");
return -1;
}
int wpad, hpad;
/* prepare process input data, set the data mem to input tensor */
get_input_data(image_file, input_data.data(), letterbox_rows, letterbox_cols, mean, scale, wpad, hpad);
int pw = int(letterbox_cols / 8);
int ph = int(letterbox_rows / 8);
float step_h = 2.f / (ph - 1);
float step_w = 2.f / (pw - 1);
for (int h = 0; h < ph; h++)
{
for (int w = 0; w < pw; w++)
{
input_data3[0 + h * pw + w] = -1.f + step_w * (float)w;
input_data3[ph * pw + h * pw + w] = -1.f + step_h * (float)h;
}
}
pw = int(letterbox_cols / 16);
ph = int(letterbox_rows / 16);
step_h = 2.f / (ph - 1);
step_w = 2.f / (pw - 1);
for (int h = 0; h < ph; h++)
{
for (int w = 0; w < pw; w++)
{
input_data4[0 + h * pw + w] = -1.f + step_w * (float)w;
input_data4[ph * pw + h * pw + w] = -1.f + step_h * (float)h;
}
}
pw = int(letterbox_cols / 32);
ph = int(letterbox_rows / 32);
step_h = 2.f / (ph - 1);
step_w = 2.f / (pw - 1);
for (int h = 0; h < ph; h++)
{
for (int w = 0; w < pw; w++)
{
input_data5[0 + h * pw + w] = -1.f + step_w * (float)w;
input_data5[ph * pw + h * pw + w] = -1.f + step_h * (float)h;
}
}
/* run graph */
double min_time = DBL_MAX;
double max_time = DBL_MIN;
double total_time = 0.;
for (int i = 0; i < repeat_count; i++)
{
double start = get_current_time();
if (run_graph(graph, 1) < 0)
{
fprintf(stderr, "Run graph failed\n");
return -1;
}
double end = get_current_time();
double cur = end - start;
total_time += cur;
min_time = (std::min)(min_time, cur);
max_time = (std::max)(max_time, cur);
}
fprintf(stderr, "Repeat %d times, thread %d, avg time %.2f ms, max_time %.2f ms, min_time %.2f ms\n", repeat_count, num_thread,
total_time / repeat_count, max_time, min_time);
fprintf(stderr, "--------------------------------------\n");
tensor_t feature_pred = get_graph_tensor(graph, "feature_pred");
tensor_t cate_pred1 = get_graph_tensor(graph, "cate_pred1");
tensor_t cate_pred2 = get_graph_tensor(graph, "cate_pred2");
tensor_t cate_pred3 = get_graph_tensor(graph, "cate_pred3");
tensor_t cate_pred4 = get_graph_tensor(graph, "cate_pred4");
tensor_t cate_pred5 = get_graph_tensor(graph, "cate_pred5");
tensor_t kernel_pred1 = get_graph_tensor(graph, "kernel_pred1");
tensor_t kernel_pred2 = get_graph_tensor(graph, "kernel_pred2");
tensor_t kernel_pred3 = get_graph_tensor(graph, "kernel_pred3");
tensor_t kernel_pred4 = get_graph_tensor(graph, "kernel_pred4");
tensor_t kernel_pred5 = get_graph_tensor(graph, "kernel_pred5");
float* feature_pred_data = (float*)get_tensor_buffer(feature_pred);
float* cate_pred1_data = (float*)get_tensor_buffer(cate_pred1);
float* cate_pred2_data = (float*)get_tensor_buffer(cate_pred2);
float* cate_pred3_data = (float*)get_tensor_buffer(cate_pred3);
float* cate_pred4_data = (float*)get_tensor_buffer(cate_pred4);
float* cate_pred5_data = (float*)get_tensor_buffer(cate_pred5);
float* kernel_pred1_data = (float*)get_tensor_buffer(kernel_pred1);
float* kernel_pred2_data = (float*)get_tensor_buffer(kernel_pred2);
float* kernel_pred3_data = (float*)get_tensor_buffer(kernel_pred3);
float* kernel_pred4_data = (float*)get_tensor_buffer(kernel_pred4);
float* kernel_pred5_data = (float*)get_tensor_buffer(kernel_pred5);
int tensor_dims1[] = {0, 0, 0, 0};
int tensor_dims2[] = {0, 0, 0, 0};
int tensor_dims3[] = {0, 0, 0, 0};
int tensor_dims4[] = {0, 0, 0, 0};
int tensor_dims5[] = {0, 0, 0, 0};
get_tensor_shape(cate_pred1, tensor_dims1, 4);
get_tensor_shape(cate_pred2, tensor_dims2, 4);
get_tensor_shape(cate_pred3, tensor_dims3, 4);
get_tensor_shape(cate_pred4, tensor_dims4, 4);
get_tensor_shape(cate_pred5, tensor_dims5, 4);
const int target_size = 448;
const float cate_thresh = 0.3f;
const float confidence_thresh = 0.3f;
const float nms_threshold = 0.3f;
const int keep_top_k = 200;
int num_class = tensor_dims1[1];
std::vector<int> kernel_picked1, kernel_picked2, kernel_picked3, kernel_picked4, kernel_picked5;
kernel_pick(cate_pred1_data, tensor_dims1[2], tensor_dims1[3], kernel_picked1, num_class, cate_thresh);
kernel_pick(cate_pred2_data, tensor_dims2[2], tensor_dims2[3], kernel_picked2, num_class, cate_thresh);
kernel_pick(cate_pred3_data, tensor_dims3[2], tensor_dims3[3], kernel_picked3, num_class, cate_thresh);
kernel_pick(cate_pred4_data, tensor_dims4[2], tensor_dims4[3], kernel_picked4, num_class, cate_thresh);
kernel_pick(cate_pred5_data, tensor_dims5[2], tensor_dims5[3], kernel_picked5, num_class, cate_thresh);
int feature_pred_tensor_dim[] = {0, 0, 0, 0};
get_tensor_shape(feature_pred, feature_pred_tensor_dim, 4);
int c_in = feature_pred_tensor_dim[1];
std::map<int, int> kernel_map1, kernel_map2, kernel_map3, kernel_map4, kernel_map5;
std::vector<std::vector<float> > ins_pred1, ins_pred2, ins_pred3, ins_pred4, ins_pred5;
ins_decode(kernel_pred1_data, feature_pred_data, kernel_picked1, kernel_map1, ins_pred1, c_in);
ins_decode(kernel_pred2_data, feature_pred_data, kernel_picked2, kernel_map2, ins_pred2, c_in);
ins_decode(kernel_pred3_data, feature_pred_data, kernel_picked3, kernel_map3, ins_pred3, c_in);
ins_decode(kernel_pred4_data, feature_pred_data, kernel_picked4, kernel_map4, ins_pred4, c_in);
ins_decode(kernel_pred5_data, feature_pred_data, kernel_picked5, kernel_map5, ins_pred5, c_in);
std::vector<std::vector<Object> > class_candidates;
class_candidates.resize(num_class);
generate_res(cate_pred1_data, ins_pred1, kernel_map1, class_candidates, cate_thresh, confidence_thresh, img.cols, img.rows,
num_class, 8.f, wpad, hpad, tensor_dims1[3], tensor_dims1[2], tensor_dims1[1]);
generate_res(cate_pred2_data, ins_pred2, kernel_map2, class_candidates, cate_thresh, confidence_thresh, img.cols, img.rows,
num_class, 8.f, wpad, hpad, tensor_dims2[3], tensor_dims2[2], tensor_dims2[1]);
generate_res(cate_pred3_data, ins_pred3, kernel_map3, class_candidates, cate_thresh, confidence_thresh, img.cols, img.rows,
num_class, 16.f, wpad, hpad, tensor_dims3[3], tensor_dims3[2], tensor_dims3[1]);
generate_res(cate_pred4_data, ins_pred4, kernel_map4, class_candidates, cate_thresh, confidence_thresh, img.cols, img.rows,
num_class, 32.f, wpad, hpad, tensor_dims4[3], tensor_dims4[2], tensor_dims4[1]);
generate_res(cate_pred5_data, ins_pred5, kernel_map5, class_candidates, cate_thresh, confidence_thresh, img.cols, img.rows,
num_class, 32.f, wpad, hpad, tensor_dims5[3], tensor_dims5[2], tensor_dims5[1]);
std::vector<Object> objects;
objects.clear();
for (int i = 0; i < (int)class_candidates.size(); i++)
{
std::vector<Object>& candidates = class_candidates[i];
qsort_descent_inplace(candidates);
std::vector<int> picked;
nms_sorted_segs(candidates, picked, nms_threshold, img.cols, img.rows);
for (int j = 0; j < (int)picked.size(); j++)
{
int z = picked[j];
objects.push_back(candidates[z]);
}
}
qsort_descent_inplace(objects);
// keep_top_k
if (keep_top_k < (int)objects.size())
{
objects.resize(keep_top_k);
}
draw_objects(img, objects, "solov2_result.jpg");
/* release tengine */
postrun_graph(graph);
destroy_graph(graph);
release_tengine();
}
| 18,972 |
5,903 | <filename>engine/src/it/java/org/pentaho/di/job/entries/copyfiles/JobEntryCopyFilesIT.java
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2017 by <NAME> : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.job.entries.copyfiles;
import org.apache.commons.io.FileUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.logging.KettleLogStore;
import org.pentaho.di.job.Job;
import org.pentaho.di.job.JobMeta;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Path;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class JobEntryCopyFilesIT {
private final String EMPTY = "";
private JobEntryCopyFiles entry;
private Path source;
private Path destination;
@BeforeClass
public static void setUpBeforeClass() throws Exception {
KettleLogStore.init();
}
@Before
public void setUp() throws Exception {
entry = new JobEntryCopyFiles( "Job entry copy files" );
entry.setParentJob( new Job() );
entry.setParentJobMeta( new JobMeta() );
source = Files.createTempDirectory( "src" );
destination = Files.createTempDirectory( "dest" );
entry.source_filefolder = new String[] { source.toString() };
entry.destination_filefolder = new String[] { destination.toString() };
entry.wildcard = new String[] { EMPTY };
}
@After
public void tearDown() throws Exception {
FileUtils.forceDelete( source.toFile() );
FileUtils.forceDelete( destination.toFile() );
}
@Test
public void copyFile() throws Exception {
Files.createTempFile( source, "file", "" );
Result result = entry.execute( new Result(), 0 );
assertTrue( result.getResult() );
assertEquals( 0, result.getNrErrors() );
}
@Test
public void copyFileFromSubDirectory() throws Exception {
entry.setIncludeSubfolders( true );
Path subDirectory = Files.createTempDirectory( source, "sub" );
Files.createTempFile( subDirectory, "file", "" );
Result result = entry.execute( new Result(), 0 );
assertTrue( result.getResult() );
assertEquals( 0, result.getNrErrors() );
}
@Test
public void copyFileWithoutOverwrite() throws Exception {
entry.setoverwrite_files( false );
Path pathToFile = Files.createTempFile( source, "file", "" );
FileUtils.copyDirectory( source.toFile(), destination.toFile() );
String path = destination.resolve( pathToFile.getFileName() ).toString();
File file = new File( path );
long createTime = file.lastModified();
Result result = entry.execute( new Result(), 0 );
long copyTime = file.lastModified();
assertTrue( result.getResult() );
assertEquals( 0, result.getNrErrors() );
assertTrue( "File shouldn't be overwritten", createTime == copyTime );
}
@Test
public void copyFileFromSubDirectoryWithoutOverwrite() throws Exception {
entry.setIncludeSubfolders( true );
entry.setoverwrite_files( false );
Path pathToSub = Files.createTempDirectory( source, "sub" );
Path pathToFile = Files.createTempFile( pathToSub, "file", "" );
FileUtils.copyDirectory( source.toFile(), destination.toFile() );
String path = destination.resolve( pathToSub.getFileName() ).resolve( pathToFile.getFileName() ).toString();
File file = new File( path );
long createTime = file.lastModified();
Result result = entry.execute( new Result(), 0 );
long copyTime = file.lastModified();
assertTrue( result.getResult() );
assertEquals( 0, result.getNrErrors() );
assertTrue( "File shouldn't be overwritten", createTime == copyTime );
}
}
| 1,384 |
338 | package com.camnter.newlife.bean.ratingrank;
import java.util.List;
/**
* Description:RatingRankResponse
* Created by:CaMnter
*/
public class RatingRankResponse {
private int code;
private String msg;
private DataEntity data;
public int getCode() { return code;}
public void setCode(int code) { this.code = code;}
public String getMsg() { return msg;}
public void setMsg(String msg) { this.msg = msg;}
public DataEntity getData() { return data;}
public void setData(DataEntity data) { this.data = data;}
public static class DataEntity {
private int totalPage;
private int totalSize;
private List<RatingFund> funds;
public int getTotalPage() { return totalPage;}
public void setTotalPage(int totalPage) { this.totalPage = totalPage;}
public int getTotalSize() { return totalSize;}
public void setTotalSize(int totalSize) { this.totalSize = totalSize;}
public List<RatingFund> getFunds() { return funds;}
public void setFunds(List<RatingFund> funds) { this.funds = funds;}
}
}
| 397 |
458 | /* MemManagerGLES.h
* WhirlyGlobeLib
*
* Created by <NAME> on 2/1/11.
* Copyright 2011-2021 mousebird consulting
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#import "WrapperGLES.h"
#import "ChangeRequest.h"
#import <mutex>
#import <unordered_set>
namespace WhirlyKit
{
/// We'll only keep this many buffers or textures around for reuse
#define WhirlyKitOpenGLMemCacheMax 32
/// Number of buffers we allocate at once
#define WhirlyKitOpenGLMemCacheAllocUnit 32
// Maximum of 8 textures for the moment
#define WhirlyKitMaxTextures 8
/// Used to manage OpenGL buffer IDs and such.
/// They're expensive to create and delete, so we try to do it
/// outside the renderer.
class OpenGLMemManager
{
public:
OpenGLMemManager();
~OpenGLMemManager();
/// Pick a buffer ID off the list or ask OpenGL for one
GLuint getBufferID(unsigned int size=0,GLenum drawType=GL_STATIC_DRAW);
/// Toss the given buffer ID back on the list for reuse
void removeBufferID(GLuint bufID);
/// Pick a texture ID off the list or ask OpenGL for one
GLuint getTexID();
/// Toss the given texture ID back on the list for reuse
void removeTexID(GLuint texID);
/// Clear out any and all buffer IDs that we may have sitting around
void clearBufferIDs();
/// Clear out any and all texture IDs that we have sitting around
void clearTextureIDs();
/// Print out stats about what's in the cache
void dumpStats();
/// Clean up resources, don't cache anything else
void teardown();
/// Globally enable/disable buffer reuse, 0 to disable
static void setBufferReuse(int maxBuffers);
/// Globally enable/disable texture reuse, 0 to disable
static void setTextureReuse(int maxTextures);
protected:
std::mutex idLock;
std::unordered_set<GLuint> buffIDs;
std::unordered_set<GLuint> texIDs;
bool shutdown = false;
static int maxCachedBuffers;
static int maxCachedTextures;
};
/** This is the configuration info passed to setupGL for each
drawable. Sometimes this will be render thread side, sometimes
layer thread side. The defaults should be valid.
*/
class RenderSetupInfoGLES : public RenderSetupInfo
{
public:
RenderSetupInfoGLES();
RenderSetupInfoGLES(Scene *scene);
/// If we're using drawOffset, this is the units
float minZres;
/// Version of OpenGL ES we're using
int glesVersion;
/// GL memory manager
OpenGLMemManager *memManager;
};
}
| 964 |
14,668 | <gh_stars>1000+
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/webauthn/authenticator_request_scheduler.h"
#include "base/memory/weak_ptr.h"
#include "base/supports_user_data.h"
#include "chrome/browser/webauthn/chrome_authenticator_request_delegate.h"
#include "content/public/browser/web_contents.h"
namespace {
// Holds a weak pointer to the active request in a WebContents, if any.
class ActiveRequestWeakHolder : public base::SupportsUserData::Data {
public:
ActiveRequestWeakHolder() = default;
ActiveRequestWeakHolder(const ActiveRequestWeakHolder&) = delete;
ActiveRequestWeakHolder& operator=(const ActiveRequestWeakHolder&) = delete;
~ActiveRequestWeakHolder() override = default;
static ActiveRequestWeakHolder* EnsureForWebContents(
content::WebContents* web_contents) {
static constexpr char kActiveRequestDataKey[] =
"ActiveAuthenticatorRequestKey";
if (!web_contents->GetUserData(kActiveRequestDataKey)) {
web_contents->SetUserData(kActiveRequestDataKey,
std::make_unique<ActiveRequestWeakHolder>());
}
return static_cast<ActiveRequestWeakHolder*>(
web_contents->GetUserData(kActiveRequestDataKey));
}
base::WeakPtr<ChromeAuthenticatorRequestDelegate>& request() {
return request_;
}
private:
base::WeakPtr<ChromeAuthenticatorRequestDelegate> request_;
};
} // namespace
// static
std::unique_ptr<ChromeAuthenticatorRequestDelegate>
AuthenticatorRequestScheduler::CreateRequestDelegate(
content::RenderFrameHost* render_frame_host) {
auto* const web_contents =
content::WebContents::FromRenderFrameHost(render_frame_host);
auto* const active_request_holder =
ActiveRequestWeakHolder::EnsureForWebContents(web_contents);
if (active_request_holder->request())
return nullptr;
auto request =
std::make_unique<ChromeAuthenticatorRequestDelegate>(render_frame_host);
active_request_holder->request() = request->AsWeakPtr();
return request;
}
// static
ChromeAuthenticatorRequestDelegate*
AuthenticatorRequestScheduler::GetRequestDelegate(
content::WebContents* web_contents) {
return ActiveRequestWeakHolder::EnsureForWebContents(web_contents)
->request()
.get();
}
| 782 |
409 | package com.bugsnagreactnativeexample;
import android.app.Application;
import android.content.Context;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.os.Bundle;
import com.facebook.react.ReactApplication;
import com.microsoft.codepush.react.CodePush;
import com.bugsnag.BugsnagReactNative;
import com.facebook.react.ReactNativeHost;
import com.facebook.react.ReactPackage;
import com.facebook.react.shell.MainReactPackage;
import com.facebook.soloader.SoLoader;
import com.bugsnag.BugsnagReactNative;
import java.util.Arrays;
import java.util.List;
public class MainApplication extends Application implements ReactApplication {
private final ReactNativeHost mReactNativeHost = new ReactNativeHost(this) {
@Override
protected String getJSBundleFile() {
return CodePush.getJSBundleFile();
}
@Override
public boolean getUseDeveloperSupport() {
return BuildConfig.DEBUG;
}
@Override
protected List<ReactPackage> getPackages() {
Context appContext = getApplicationContext();
String codePushDeploymentKey = null;
try {
ApplicationInfo ai = appContext.getPackageManager().getApplicationInfo(appContext.getPackageName(), PackageManager.GET_META_DATA);
Bundle data = ai.metaData;
codePushDeploymentKey = data.getString("com.microsoft.codepush.DEPLOYMENT_KEY");
} catch (Exception ignore) {
}
return Arrays.<ReactPackage>asList(
new MainReactPackage(),
new CodePush(codePushDeploymentKey, appContext, BuildConfig.DEBUG),
new CrashyPackage(),
BugsnagReactNative.getPackage()
);
}
};
@Override
public ReactNativeHost getReactNativeHost() {
return mReactNativeHost;
}
@Override
public void onCreate() {
super.onCreate();
BugsnagReactNative.start(this);
SoLoader.init(this, /* native exopackage */ false);
}
}
| 672 |
317 | /**
* Copyright (c) 2014 Oracle and/or its affiliates. All rights reserved.
*
* You may not modify, use, reproduce, or distribute this software except in
* compliance with the terms of the License at:
* https://github.com/javaee/tutorial-examples/LICENSE.txt
*/
package javaeetutorial.web.websocketbot.messages;
/* Represents a join message for the chat */
public class JoinMessage extends Message {
private String name;
public JoinMessage(String name) {
this.name = name;
}
public String getName() {
return name;
}
/* For logging purposes */
@Override
public String toString() {
return "[JoinMessage] " + name;
}
}
| 273 |
778 | <filename>kratos/elements/distance_calculation_element_simplex.h
// | / |
// ' / __| _` | __| _ \ __|
// . \ | ( | | ( |\__ `
// _|\_\_| \__,_|\__|\___/ ____/
// Multi-Physics
//
// License: BSD License
// Kratos default license: kratos/license.txt
//
// Main authors: <NAME>
//
//
#if !defined(KRATOS_DISTANCE_CALCULATION_ELEMENT_H_INCLUDED )
#define KRATOS_DISTANCE_CALCULATION_ELEMENT_H_INCLUDED
// System includes
#include <string>
#include <iostream>
// External includes
// Project includes
#include "containers/array_1d.h"
#include "includes/define.h"
#include "includes/element.h"
#include "includes/serializer.h"
#include "geometries/geometry.h"
#include "utilities/math_utils.h"
#include "utilities/geometry_utilities.h"
// Application includes
#include "includes/variables.h"
#include "includes/kratos_flags.h"
namespace Kratos
{
///@addtogroup FluidDynamicsApplication
///@{
///@name Kratos Globals
///@{
///@}
///@name Type Definitions
///@{
///@}
///@name Enum's
///@{
///@}
///@name Functions
///@{
///@}
///@name Kratos Classes
///@{
/// A stabilized element for the incompressible Navier-Stokes equations.
/**
*/
template< unsigned int TDim >
class DistanceCalculationElementSimplex : public Element
{
public:
///@name Type Definitions
///@{
/// Pointer definition of DistanceCalculationElementSimplex
KRATOS_CLASS_INTRUSIVE_POINTER_DEFINITION(DistanceCalculationElementSimplex);
/// Node type (default is: Node<3>)
typedef Node <3> NodeType;
/// Geometry type (using with given NodeType)
typedef Geometry<NodeType> GeometryType;
/// Definition of nodes container type, redefined from GeometryType
typedef Geometry<NodeType>::PointsArrayType NodesArrayType;
/// Vector type for local contributions to the linear system
typedef Vector VectorType;
/// Matrix type for local contributions to the linear system
typedef Matrix MatrixType;
typedef std::size_t IndexType;
typedef std::size_t SizeType;
typedef std::vector<std::size_t> EquationIdVectorType;
typedef std::vector< Dof<double>::Pointer > DofsVectorType;
typedef PointerVectorSet<Dof<double>, IndexedObject> DofsArrayType;
/// Type for shape function values container
typedef Kratos::Vector ShapeFunctionsType;
/// Type for a matrix containing the shape function gradients
typedef Kratos::Matrix ShapeFunctionDerivativesType;
/// Type for an array of shape function gradient matrices
typedef GeometryType::ShapeFunctionsGradientsType ShapeFunctionDerivativesArrayType;
///@}
///@name Life Cycle
///@{
//Constructors.
/// Default constuctor.
/**
* @param NewId Index number of the new element (optional)
*/
DistanceCalculationElementSimplex(IndexType NewId = 0) :
Element(NewId)
{}
/// Constructor using an array of nodes.
/**
* @param NewId Index of the new element
* @param ThisNodes An array containing the nodes of the new element
*/
DistanceCalculationElementSimplex(IndexType NewId, const NodesArrayType& ThisNodes) :
Element(NewId, ThisNodes)
{}
/// Constructor using a geometry object.
/**
* @param NewId Index of the new element
* @param pGeometry Pointer to a geometry object
*/
DistanceCalculationElementSimplex(IndexType NewId, GeometryType::Pointer pGeometry) :
Element(NewId, pGeometry)
{}
/// Constuctor using geometry and properties.
/**
* @param NewId Index of the new element
* @param pGeometry Pointer to a geometry object
* @param pProperties Pointer to the element's properties
*/
DistanceCalculationElementSimplex(IndexType NewId, GeometryType::Pointer pGeometry, PropertiesType::Pointer pProperties) :
Element(NewId, pGeometry, pProperties)
{}
/// Destructor.
~DistanceCalculationElementSimplex() override
{}
///@}
///@name Operators
///@{
///@}
///@name Operations
///@{
/// Create a new element of this type
/**
* Returns a pointer to a new DistanceCalculationElementSimplex element, created using given input
* @param NewId the ID of the new element
* @param ThisNodes the nodes of the new element
* @param pProperties the properties assigned to the new element
* @return a Pointer to the new element
*/
Element::Pointer Create(IndexType NewId, NodesArrayType const& ThisNodes,
PropertiesType::Pointer pProperties) const override
{
return Element::Pointer(new DistanceCalculationElementSimplex(NewId, GetGeometry().Create(ThisNodes), pProperties));
}
/// Create a new element of this type
/**
* Returns a pointer to a new DistanceCalculationElementSimplex element, created using given input
* @param NewId the ID of the new element
* @param pGeom the geometry to be employed
* @param pProperties the properties assigned to the new element
* @return a Pointer to the new element
*/
Element::Pointer Create(IndexType NewId,
GeometryType::Pointer pGeom,
PropertiesType::Pointer pProperties) const override
{
return Element::Pointer(new DistanceCalculationElementSimplex(NewId, pGeom, pProperties));
}
/// Calculate the element's local contribution to the system for the current step.
void CalculateLocalSystem(MatrixType& rLeftHandSideMatrix,
VectorType& rRightHandSideVector,
const ProcessInfo& rCurrentProcessInfo) override
{
const unsigned int number_of_points = TDim+1;
BoundedMatrix<double, TDim+1, TDim > DN_DX;
array_1d<double, TDim+1 > N;
if (rLeftHandSideMatrix.size1() != number_of_points)
rLeftHandSideMatrix.resize(number_of_points, number_of_points, false);
if (rRightHandSideVector.size() != number_of_points)
rRightHandSideVector.resize(number_of_points, false);
//getting data for the given geometry
double Area;
GeometryUtils::CalculateGeometryData(GetGeometry(), DN_DX, N, Area);
//get distances at the nodes
array_1d<double, TDim+1 > distances;
for(unsigned int i=0; i<number_of_points; i++)
{
distances[i] = GetGeometry()[i].FastGetSolutionStepValue(DISTANCE);
// if (distances[i] >= 0.0 && distances[i] < 1e-30) distances[i] = 1e-30;
}
const double dgauss = inner_prod(N,distances);
const unsigned int step = rCurrentProcessInfo[FRACTIONAL_STEP];
if(step == 1) //solve a poisson problem with a positive/negative heat source depending on the sign of the existing distance function
{
//compute distance on gauss point
this->SetValue(DISTANCE,dgauss); //saving the distance, to see if it changed sign between iterations
//compute LHS
noalias(rLeftHandSideMatrix) = Area*prod(DN_DX,trans(DN_DX));
//compute RHS
double source = 1.0;
if(dgauss < 0.0) source=-1.0;
noalias(rRightHandSideVector) = source*Area*N;
noalias(rRightHandSideVector) -= prod(rLeftHandSideMatrix,distances);
//impose that the normal gradient is 1 on outer faces
unsigned int nboundary = 0;
for(unsigned int i=0; i<TDim+1; i++)
if(GetGeometry()[i].Is(BOUNDARY)) nboundary +=1;
if(nboundary == TDim)
{
array_1d<double,TDim> DN_out(TDim, 0.0);
for(unsigned int i=0; i<TDim+1; i++)
if(GetGeometry()[i].IsNot(BOUNDARY))
{
noalias(DN_out) = row(DN_DX,i);
break;
}
double normDn = norm_2(DN_out);
for(unsigned int i=0; i<TDim+1; i++)
if(GetGeometry()[i].Is(BOUNDARY))
{
rRightHandSideVector[i] += 0.01*source*normDn*Area*(TDim-1); //TODO: check this! it should be TDim*(TDim-1)*N[i] with N[i] on the face and then equal to 1/TDim
// using the area as weighting factor is excessive. Reduced it to get a closer to constant gradient between the regions close and far away from the interface
}
}
}
else //solve an optimization problem with the goal of achievieng a gradient of one for the distance function
{
//for debuggin purposes:
if( dgauss * (this->GetValue(DISTANCE)) < 0.0 )
std::cout << "Element " << this->Id() << " changed sign while redistancing!!" << std::endl;
//compute the gradient of the distance
const array_1d<double,TDim> grad = prod(trans(DN_DX),distances);
double grad_norm = norm_2(grad);
//compute RHS ad grad N_i \cdot ( 1/norm_grad * grad - grad)
//and multiply everything by grad_norm
noalias(rRightHandSideVector) = Area*(1.0 - grad_norm)* prod(DN_DX,grad);
//compute the LHS as an approximation of the tangent.
//such approximation is taken as a laplacian, which comes from the assumption that the
//direction of n does not change when d changes
//
//
//note that the exact tangent could be computed as "P1+P2" with
//n = grad/grad_norm
//P1 = (1.0 - 1.0 / (grad_norm + eps) ) * DN_DX * DN_DX.transpose()
//P2 = 1.0/(grad_norm + eps) * dot(DN_DX * outer(n,n) * DN_DX.transpose() )
//unfortunately the numerical experiments tell that this in too unstable to be used unless a very
//good initial approximation is used
// noalias(rLeftHandSideMatrix) = (Area*(grad_norm - 1.0))*rod(DN_DX,trans(DN_DX) ); //RISKY!!
noalias(rLeftHandSideMatrix) = Area*std::max(grad_norm,0.1)*prod( DN_DX,trans(DN_DX) );
}
}
/// Provides the global indices for each one of this element's local rows
/**
* this determines the elemental equation ID vector for all elemental
* DOFs
* @param rResult A vector containing the global Id of each row
* @param rCurrentProcessInfo the current process info object (unused)
*/
void EquationIdVector(EquationIdVectorType& rResult,
const ProcessInfo& rCurrentProcessInfo) const override
{
unsigned int number_of_nodes = TDim+1;
if (rResult.size() != number_of_nodes)
rResult.resize(number_of_nodes, false);
for (unsigned int i = 0; i < number_of_nodes; i++)
rResult[i] = GetGeometry()[i].GetDof(DISTANCE).EquationId();
}
/// Returns a list of the element's Dofs
/**
* @param ElementalDofList the list of DOFs
* @param rCurrentProcessInfo the current process info instance
*/
void GetDofList(DofsVectorType& rElementalDofList,
const ProcessInfo& rCurrentProcessInfo) const override
{
unsigned int number_of_nodes = TDim+1;
if (rElementalDofList.size() != number_of_nodes)
rElementalDofList.resize(number_of_nodes);
for (unsigned int i = 0; i < number_of_nodes; i++)
rElementalDofList[i] = GetGeometry()[i].pGetDof(DISTANCE);
}
///@}
///@name Access
///@{
///@}
///@name Elemental Data
///@{
/// Checks the input and that all required Kratos variables have been registered.
/**
* This function provides the place to perform checks on the completeness of the input.
* It is designed to be called only once (or anyway, not often) typically at the beginning
* of the calculations, so to verify that nothing is missing from the input
* or that no common error is found.
* @param rCurrentProcessInfo The ProcessInfo of the ModelPart that contains this element.
* @return 0 if no errors were found.
*/
int Check(const ProcessInfo& rCurrentProcessInfo) const override
{
KRATOS_TRY
// Perform basic element checks
int ErrorCode = Kratos::Element::Check(rCurrentProcessInfo);
if(ErrorCode != 0) return ErrorCode;
if(this->GetGeometry().size() != TDim+1)
KRATOS_THROW_ERROR(std::invalid_argument,"wrong number of nodes for element",this->Id());
// Checks on nodes
// Check that the element's nodes contain all required SolutionStepData and Degrees of freedom
for(unsigned int i=0; i<this->GetGeometry().size(); ++i)
{
if(this->GetGeometry()[i].SolutionStepsDataHas(DISTANCE) == false)
KRATOS_THROW_ERROR(std::invalid_argument,"missing DISTANCE variable on solution step data for node ",this->GetGeometry()[i].Id());
}
return 0;
KRATOS_CATCH("");
}
void CalculateRightHandSide(VectorType& rRightHandSideVector,
const ProcessInfo& rCurrentProcessInfo) override
{
KRATOS_ERROR << "should not enter here" << std::endl;
if (rRightHandSideVector.size() != 0)
rRightHandSideVector.resize(0, false);
}
///@}
///@name Inquiry
///@{
///@}
///@name Input and output
///@{
/// Turn back information as a string.
std::string Info() const override
{
std::stringstream buffer;
buffer << "DistanceCalculationElementSimplex #" << Id();
return buffer.str();
}
/// Print information about this object.
void PrintInfo(std::ostream& rOStream) const override
{
rOStream << "DistanceCalculationElementSimplex" << TDim << "D";
}
// /// Print object's data.
// virtual void PrintData(std::ostream& rOStream) const;
///@}
///@name Friends
///@{
///@}
protected:
///@name Protected static Member Variables
///@{
///@}
///@name Protected member Variables
///@{
///@}
///@name Protected Operators
///@{
///@}
///@name Protected Operations
///@{
///@}
///@name Protected Access
///@{
///@}
///@name Protected Inquiry
///@{
///@}
///@name Protected LifeCycle
///@{
///@}
private:
///@name Static Member Variables
///@{
///@}
///@name Member Variables
///@{
///@}
///@name Serialization
///@{
friend class Serializer;
void save(Serializer& rSerializer) const override
{
KRATOS_SERIALIZE_SAVE_BASE_CLASS(rSerializer, Element );
}
void load(Serializer& rSerializer) override
{
KRATOS_SERIALIZE_LOAD_BASE_CLASS(rSerializer, Element);
}
///@}
///@name Private Operators
///@{
///@}
///@name Private Operations
///@{
///@}
///@name Private Access
///@{
///@}
///@name Private Inquiry
///@{
///@}
///@name Un accessible methods
///@{
/// Assignment operator.
DistanceCalculationElementSimplex & operator=(DistanceCalculationElementSimplex const& rOther);
/// Copy constructor.
DistanceCalculationElementSimplex(DistanceCalculationElementSimplex const& rOther);
///@}
}; // Class DistanceCalculationElementSimplex
///@}
///@name Type Definitions
///@{
///@}
///@name Input and output
///@{
/// input stream function
template< unsigned int TDim >
inline std::istream& operator >>(std::istream& rIStream,
DistanceCalculationElementSimplex<TDim>& rThis)
{
return rIStream;
}
/// output stream function
template< unsigned int TDim >
inline std::ostream& operator <<(std::ostream& rOStream,
const DistanceCalculationElementSimplex<TDim>& rThis)
{
rThis.PrintInfo(rOStream);
rOStream << std::endl;
rThis.PrintData(rOStream);
return rOStream;
}
///@}
///@} // Fluid Dynamics Application group
} // namespace Kratos.
#endif // KRATOS_DISTANCE_CALCULATION_ELEMENT_H_INCLUDED defined
| 6,600 |
391 | <gh_stars>100-1000
from ..config.config_settings import ConfigSettings, NoteSortingMethods
class NoteSorter:
sorting_definitions = {
# NO_SORTING is a special case which should be ignored by should_sort
# However it's been kept here for edge cases, and for ease of testing
# It returns 1 to ensure stable sorting, so the results will be in their previous order
NoteSortingMethods.NO_SORTING: lambda i: 1,
NoteSortingMethods.GUID: lambda i: i.anki_object.guid,
NoteSortingMethods.FLAG: lambda i: i.anki_object.flags,
NoteSortingMethods.TAG: lambda i: i.anki_object.tags,
NoteSortingMethods.NOTE_MODEL_NAME: lambda i: i.anki_object._model["name"],
NoteSortingMethods.NOTE_MODEL_ID: lambda i: i.anki_object._model["crowdanki_uuid"],
NoteSortingMethods.FIELD1: lambda i: i.anki_object.fields[0],
NoteSortingMethods.FIELD2: lambda i: i.anki_object.fields[1]
}
def __init__(self, config: ConfigSettings):
self.sort_methods = config.formatted_export_note_sort_methods
self.is_reversed = config.export_notes_reverse_order
def should_sort(self):
return self.sort_methods[0] != NoteSortingMethods.NO_SORTING
def sort_notes(self, notes):
if self.should_sort():
notes = sorted(notes, key=self.get_sort_key)
if self.is_reversed:
notes = list(reversed(notes))
return notes
def get_sort_key(self, note):
return tuple(
key(note)
for key in tuple(
self.sorting_definitions[method_name]
for method_name in self.sort_methods
)
)
| 723 |
1,043 | <reponame>JeffMuchine/micro-server<filename>micro-error-codes/src/main/java/com/oath/micro/server/errors/FormattedErrorCode.java
package com.oath.micro.server.errors;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Getter;
@AllArgsConstructor(access = AccessLevel.PACKAGE)
@Getter
public class FormattedErrorCode {
private final ErrorCode code;
}
| 129 |
1,543 | <reponame>linh22/Object-Detection-and-Tracking
import os
s1="/home/cai/Desktop/TableCapturer/json/"
s2=".json"
for i in range(99):
s3 = str(i).zfill(6)
os.system("labelme_json_to_dataset"+" "+ s1 + s3 + s2)
i+=1
| 110 |
14,668 | <reponame>chromium/chromium
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/ash/policy/enrollment/device_cloud_policy_initializer.h"
#include <memory>
#include <utility>
#include "ash/constants/ash_switches.h"
#include "base/bind.h"
#include "base/logging.h"
#include "base/values.h"
#include "chrome/browser/ash/login/login_pref_names.h"
#include "chrome/browser/ash/policy/core/device_cloud_policy_client_factory_ash.h"
#include "chrome/browser/ash/policy/core/device_cloud_policy_manager_ash.h"
#include "chrome/browser/ash/policy/core/device_cloud_policy_store_ash.h"
#include "chrome/browser/ash/policy/enrollment/enrollment_config.h"
#include "chrome/browser/ash/policy/server_backed_state/server_backed_device_state.h"
#include "chrome/browser/browser_process.h"
#include "chrome/browser/net/system_network_context_manager.h"
#include "chrome/browser/policy/enrollment_status.h"
#include "chrome/common/chrome_content_client.h"
#include "chrome/common/pref_names.h"
#include "chromeos/cryptohome/cryptohome_parameters.h"
#include "chromeos/system/statistics_provider.h"
#include "chromeos/tpm/install_attributes.h"
#include "components/policy/core/common/cloud/cloud_policy_core.h"
#include "components/policy/core/common/cloud/device_management_service.h"
#include "components/prefs/pref_service.h"
namespace policy {
namespace {
std::string GetString(const base::Value& dict, base::StringPiece key) {
DCHECK(dict.is_dict());
const std::string* value = dict.FindStringKey(key);
return value ? *value : std::string();
}
} // namespace
DeviceCloudPolicyInitializer::DeviceCloudPolicyInitializer(
PrefService* local_state,
DeviceManagementService* enterprise_service,
chromeos::InstallAttributes* install_attributes,
ServerBackedStateKeysBroker* state_keys_broker,
DeviceCloudPolicyStoreAsh* policy_store,
DeviceCloudPolicyManagerAsh* policy_manager,
chromeos::system::StatisticsProvider* statistics_provider)
: local_state_(local_state),
enterprise_service_(enterprise_service),
install_attributes_(install_attributes),
state_keys_broker_(state_keys_broker),
policy_store_(policy_store),
policy_manager_(policy_manager),
statistics_provider_(statistics_provider) {}
void DeviceCloudPolicyInitializer::SetSystemURLLoaderFactoryForTesting(
scoped_refptr<network::SharedURLLoaderFactory> system_url_loader_factory) {
system_url_loader_factory_for_testing_ = system_url_loader_factory;
}
DeviceCloudPolicyInitializer::~DeviceCloudPolicyInitializer() {
DCHECK(!is_initialized_);
}
void DeviceCloudPolicyInitializer::Init() {
DCHECK(!is_initialized_);
is_initialized_ = true;
policy_store_->AddObserver(this);
state_keys_update_subscription_ = state_keys_broker_->RegisterUpdateCallback(
base::BindRepeating(&DeviceCloudPolicyInitializer::TryToStartConnection,
base::Unretained(this)));
TryToStartConnection();
}
void DeviceCloudPolicyInitializer::Shutdown() {
DCHECK(is_initialized_);
policy_store_->RemoveObserver(this);
state_keys_update_subscription_ = {};
is_initialized_ = false;
}
EnrollmentConfig DeviceCloudPolicyInitializer::GetPrescribedEnrollmentConfig()
const {
EnrollmentConfig config;
// Authentication through the attestation mechanism is controlled by a
// command line switch that either enables it or forces it (meaning that
// interactive authentication is disabled).
switch (DeviceCloudPolicyManagerAsh::GetZeroTouchEnrollmentMode()) {
case ZeroTouchEnrollmentMode::DISABLED:
// Only use interactive authentication.
config.auth_mechanism = EnrollmentConfig::AUTH_MECHANISM_INTERACTIVE;
break;
case ZeroTouchEnrollmentMode::ENABLED:
// Use the best mechanism, which may include attestation if available.
config.auth_mechanism = EnrollmentConfig::AUTH_MECHANISM_BEST_AVAILABLE;
break;
case ZeroTouchEnrollmentMode::FORCED:
// Only use attestation to authenticate since zero-touch is forced.
config.auth_mechanism = EnrollmentConfig::AUTH_MECHANISM_ATTESTATION;
break;
case ZeroTouchEnrollmentMode::HANDS_OFF:
// Hands-off implies the same authentication method as Forced.
config.auth_mechanism = EnrollmentConfig::AUTH_MECHANISM_ATTESTATION;
break;
}
// If OOBE is done and we are not enrolled, make sure we only try interactive
// enrollment.
const bool oobe_complete =
local_state_->GetBoolean(ash::prefs::kOobeComplete);
if (oobe_complete &&
config.auth_mechanism == EnrollmentConfig::AUTH_MECHANISM_BEST_AVAILABLE)
config.auth_mechanism = EnrollmentConfig::AUTH_MECHANISM_INTERACTIVE;
// If OOBE is done and we are enrolled, check for need to recover enrollment.
// Enrollment recovery is not implemented for Active Directory.
if (oobe_complete && install_attributes_->IsCloudManaged()) {
// Regardless what mode is applicable, the enrollment domain is fixed.
config.management_domain = install_attributes_->GetDomain();
// Enrollment has completed previously and installation-time attributes
// are in place. Enrollment recovery is required when the server
// registration gets lost.
if (local_state_->GetBoolean(prefs::kEnrollmentRecoveryRequired)) {
LOG(WARNING) << "Enrollment recovery required according to pref.";
if (statistics_provider_->GetEnterpriseMachineID().empty())
LOG(WARNING) << "Postponing recovery because machine id is missing.";
else
config.mode = EnrollmentConfig::MODE_RECOVERY;
}
return config;
}
// OOBE is still running, or it is complete but the device hasn't been
// enrolled yet. In either case, enrollment should take place if there's a
// signal present that indicates the device should enroll.
// Gather enrollment signals from various sources.
const base::Value* device_state =
local_state_->GetDictionary(prefs::kServerBackedDeviceState);
std::string device_state_mode;
std::string device_state_management_domain;
absl::optional<bool> is_license_packaged_with_device;
std::string license_type;
if (device_state) {
device_state_mode = GetString(*device_state, kDeviceStateMode);
device_state_management_domain =
GetString(*device_state, kDeviceStateManagementDomain);
is_license_packaged_with_device =
device_state->FindBoolPath(kDeviceStatePackagedLicense);
license_type = GetString(*device_state, kDeviceStateLicenseType);
}
if (is_license_packaged_with_device) {
config.is_license_packaged_with_device =
is_license_packaged_with_device.value();
} else {
config.is_license_packaged_with_device = false;
}
if (license_type == kDeviceStateLicenseTypeEnterprise) {
config.license_type = EnrollmentConfig::LicenseType::kEnterprise;
} else if (license_type == kDeviceStateLicenseTypeEducation) {
config.license_type = EnrollmentConfig::LicenseType::kEducation;
} else if (license_type == kDeviceStateLicenseTypeTerminal) {
config.license_type = EnrollmentConfig::LicenseType::kTerminal;
} else {
config.license_type = EnrollmentConfig::LicenseType::kNone;
}
const bool pref_enrollment_auto_start_present =
local_state_->HasPrefPath(prefs::kDeviceEnrollmentAutoStart);
const bool pref_enrollment_auto_start =
local_state_->GetBoolean(prefs::kDeviceEnrollmentAutoStart);
const bool pref_enrollment_can_exit_present =
local_state_->HasPrefPath(prefs::kDeviceEnrollmentCanExit);
const bool pref_enrollment_can_exit =
local_state_->GetBoolean(prefs::kDeviceEnrollmentCanExit);
const bool oem_is_managed =
GetMachineFlag(chromeos::system::kOemIsEnterpriseManagedKey, false);
const bool oem_can_exit_enrollment = GetMachineFlag(
chromeos::system::kOemCanExitEnterpriseEnrollmentKey, true);
// Decide enrollment mode. Give precedence to forced variants.
if (device_state_mode == kDeviceStateRestoreModeReEnrollmentEnforced) {
config.mode = EnrollmentConfig::MODE_SERVER_FORCED;
config.management_domain = device_state_management_domain;
} else if (device_state_mode == kDeviceStateInitialModeEnrollmentEnforced) {
config.mode = EnrollmentConfig::MODE_INITIAL_SERVER_FORCED;
config.management_domain = device_state_management_domain;
} else if (device_state_mode ==
kDeviceStateRestoreModeReEnrollmentZeroTouch) {
config.mode = EnrollmentConfig::MODE_ATTESTATION_SERVER_FORCED;
config.auth_mechanism = EnrollmentConfig::AUTH_MECHANISM_BEST_AVAILABLE;
config.management_domain = device_state_management_domain;
} else if (device_state_mode == kDeviceStateInitialModeEnrollmentZeroTouch) {
config.mode = EnrollmentConfig::MODE_ATTESTATION_INITIAL_SERVER_FORCED;
config.auth_mechanism = EnrollmentConfig::AUTH_MECHANISM_BEST_AVAILABLE;
config.management_domain = device_state_management_domain;
} else if (pref_enrollment_auto_start_present && pref_enrollment_auto_start &&
pref_enrollment_can_exit_present && !pref_enrollment_can_exit) {
config.mode = EnrollmentConfig::MODE_LOCAL_FORCED;
} else if (oem_is_managed && !oem_can_exit_enrollment) {
config.mode = EnrollmentConfig::MODE_LOCAL_FORCED;
} else if (oobe_complete) {
// If OOBE is complete, don't return advertised modes as there's currently
// no way to make sure advertised enrollment only gets shown once.
config.mode = EnrollmentConfig::MODE_NONE;
} else if (device_state_mode ==
kDeviceStateRestoreModeReEnrollmentRequested) {
config.mode = EnrollmentConfig::MODE_SERVER_ADVERTISED;
config.management_domain = device_state_management_domain;
} else if (pref_enrollment_auto_start_present && pref_enrollment_auto_start) {
config.mode = EnrollmentConfig::MODE_LOCAL_ADVERTISED;
} else if (oem_is_managed) {
config.mode = EnrollmentConfig::MODE_LOCAL_ADVERTISED;
}
return config;
}
void DeviceCloudPolicyInitializer::OnStoreLoaded(CloudPolicyStore* store) {
TryToStartConnection();
}
void DeviceCloudPolicyInitializer::OnStoreError(CloudPolicyStore* store) {
// Do nothing.
}
std::unique_ptr<CloudPolicyClient> DeviceCloudPolicyInitializer::CreateClient(
DeviceManagementService* device_management_service) {
// DeviceDMToken callback is empty here because for device policies this
// DMToken is already provided in the policy fetch requests.
return CreateDeviceCloudPolicyClientAsh(
statistics_provider_, device_management_service,
system_url_loader_factory_for_testing_
? system_url_loader_factory_for_testing_
: g_browser_process->shared_url_loader_factory(),
CloudPolicyClient::DeviceDMTokenCallback());
}
void DeviceCloudPolicyInitializer::TryToStartConnection() {
if (install_attributes_->IsActiveDirectoryManaged()) {
// This will go away once ChromeAd deprecation is completed.
return;
}
// Currently reven devices don't support sever-backed state keys, but they
// also don't support FRE/AutoRE so don't block initialization of device
// policy on state keys being available on reven.
// TODO(b/208705225): Remove this special case when reven supports state keys.
const bool allow_init_without_state_keys = ash::switches::IsRevenBranding();
// TODO(b/181140445): If we had a separate state keys upload request to DM
// Server we could drop the `state_keys_broker_->available()` requirement.
if (policy_store_->is_initialized() && policy_store_->has_policy() &&
(allow_init_without_state_keys || state_keys_broker_->available())) {
StartConnection(CreateClient(enterprise_service_));
}
}
void DeviceCloudPolicyInitializer::StartConnection(
std::unique_ptr<CloudPolicyClient> client) {
if (!policy_manager_->IsConnected()) {
policy_manager_->StartConnection(std::move(client), install_attributes_);
}
}
bool DeviceCloudPolicyInitializer::GetMachineFlag(const std::string& key,
bool default_value) const {
bool value = default_value;
if (!statistics_provider_->GetMachineFlag(key, &value))
return default_value;
return value;
}
} // namespace policy
| 4,061 |
356 | <filename>app/src/main/java/com/idrv/coach/bean/Visitor.java
package com.idrv.coach.bean;
/**
* time:2016/5/23
* description:
*
* @author sunjianfei
*/
public class Visitor {
String nickname;
String headimgurl;
public String getNickname() {
return nickname;
}
public void setNickname(String nickname) {
this.nickname = nickname;
}
public String getHeadimgurl() {
return headimgurl;
}
public void setHeadimgurl(String headimgurl) {
this.headimgurl = headimgurl;
}
}
| 224 |
5,938 | <gh_stars>1000+
#!/usr/bin/env python
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START sendgrid-imp]
import sendgrid
from sendgrid.helpers.mail import Mail
# [END sendgrid-imp]
import webapp2
# make a secure connection to SendGrid
# [START sendgrid-config]
SENDGRID_API_KEY = 'your-sendgrid-api-key'
SENDGRID_SENDER = 'your-sendgrid-sender'
# [END sendgrid-config]
def send_simple_message(recipient):
# [START sendgrid-send]
message = Mail(
from_email=SENDGRID_SENDER,
to_emails='{},'.format(recipient),
subject='This is a test email',
html_content='<strong>Example</strong> message.')
sg = sendgrid.SendGridAPIClient(SENDGRID_API_KEY)
response = sg.send(message)
return response
# [END sendgrid-send]
class MainPage(webapp2.RequestHandler):
def get(self):
self.response.content_type = 'text/html'
self.response.write("""
<!doctype html>
<html><body>
<form action="/send" method="POST">
<input type="text" name="recipient" placeholder="Enter recipient email">
<input type="submit" name="submit" value="Send simple email">
</form>
</body></html>
""")
class SendEmailHandler(webapp2.RequestHandler):
def post(self):
recipient = self.request.get('recipient')
sg_response = send_simple_message(recipient)
self.response.set_status(sg_response.status_code)
self.response.write(sg_response.body)
app = webapp2.WSGIApplication([
('/', MainPage),
('/send', SendEmailHandler)
], debug=True)
| 727 |
455 | <reponame>lebrice/RoBO
import logging
import numpy as np
from scipy.stats import norm
from robo.acquisition_functions.base_acquisition import BaseAcquisitionFunction
logger = logging.getLogger(__name__)
class PI(BaseAcquisitionFunction):
def __init__(self, model, par=0.0):
r"""
Probability of Improvement solves the following equation
:math:`PI(X) := \mathbb{P}\left( f(\mathbf{X^+}) - f_{t+1}(\mathbf{X}) > \xi\right)`, where
:math:`f(X^+)` is the best input found so far.
Parameters
----------
model: Model object
Current belief of your objective function
If you want to calculate derivatives than the model should also support
- predictive_gradients(X_test)
par: float
Controls the balance between exploration
and exploitation of the acquisition_functions function.
"""
super(PI, self).__init__(model)
self.par = par
def compute(self, X_test, derivative=False):
"""
Computes the PI value and its derivatives.
Parameters
----------
X_test: np.ndarray(1, D), The input point where the acquisition_functions function
should be evaluate. The dimensionality of X is (N, D), with N as
the number of points to evaluate at and D is the number of
dimensions of one X.
derivative: Boolean
If is set to true also the derivative of the acquisition_functions
function at X is returned
Returns
-------
np.ndarray(1,1)
Probability of Improvement of X_test
np.ndarray(1,D)
Derivative of Probability of Improvement at X_test
(only if derivative=True)
"""
m, v = self.model.predict(X_test)
_, inc_val = self.model.get_incumbent()
s = np.sqrt(v)
z = (inc_val - m - self.par) / s
f = norm.cdf(z)
if derivative:
dmdx, ds2dx = self.model.predictive_gradients(X_test)
dmdx = dmdx[0]
ds2dx = ds2dx[0][:, None]
dsdx = ds2dx / (2 * s)
df = ((-norm.pdf(z) / s) * (dmdx + dsdx * z)).T
return f, df
else:
return f
| 1,044 |
428 | <gh_stars>100-1000
/**
* Copyright 2008 - 2019 The Loon Game Engine Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
* @project loon
* @author cping
* @email:<EMAIL>
* @version 0.5
*/
package org.test;
import loon.Stage;
import loon.action.ActionBind;
import loon.action.ActionListener;
import loon.action.FollowTo;
import loon.action.MoveTo;
import loon.action.collision.CollisionWorld;
import loon.action.map.Field2D;
import loon.action.sprite.AnimatedEntity;
import loon.action.sprite.AnimatedEntity.PlayIndex;
import loon.events.Touched;
import loon.action.sprite.Entity;
public class CollisionWorldTest extends Stage {
@Override
public void create() {
// 定义一个碰撞世界(此类会处理位置变化)
final CollisionWorld world = new CollisionWorld(this);
// 构建精灵以32x32的大小拆分图片,放置在坐标位置200x60,显示大小宽32,高32
final AnimatedEntity hero = new AnimatedEntity("assets/rpg/hero.gif", 32, 32, 200, 60, 32, 32);
// 播放动画,速度每帧220
final long[] frames = { 220, 220, 220 };
// 左右下上四方向的帧播放顺序(也可以理解为具体播放的帧)
final int[] leftIds = { 3, 4, 5 };
final int[] rightIds = { 6, 7, 8 };
final int[] downIds = { 0, 1, 2 };
final int[] upIds = { 9, 10, 11 };
// 绑定字符串和帧索引关系
hero.setPlayIndex("left", PlayIndex.at(frames, leftIds));
hero.setPlayIndex("right", PlayIndex.at(frames, rightIds));
hero.setPlayIndex("down", PlayIndex.at(frames, downIds));
hero.setPlayIndex("up", PlayIndex.at(frames, upIds));
// 播放绑定到down的动画帧
hero.animate("down");
//添加Hero到碰撞世界
world.add(hero);
//添加一个球到碰撞世界
world.add(new Entity("ball.png", 66, 66));
world.add(new Entity("ball.png", 266, 166));
world.add(new Entity("ball.png", 155, 100));
Entity follow = new Entity("ball.png", 266, 266);
FollowTo followeve = new FollowTo(hero);
followeve.setCollisionWorld(world);
//跟随hero
follow.selfAction().event(followeve).start();
world.add(follow);
up(new Touched() {
@Override
public void on(final float x, final float y) {
// item1移动到指定位置,8方向移动,速度8
final MoveTo move = new MoveTo(x, y, true, 8);
// 不寻径,单纯移动
move.setMoveByMode(true);
// 注入碰撞世界,让缓动动画自动计算碰撞结果
move.setCollisionWorld(world);
// 监听移动事件
move.setActionListener(new ActionListener() {
@Override
public void stop(ActionBind o) {
}
@Override
public void start(ActionBind o) {
}
@Override
public void process(ActionBind o) {
if (move.isDirectionUpdate()) {
switch (move.getDirection()) {
case Field2D.TUP:
case Field2D.UP:
hero.animate("up");
break;
default:
case Field2D.TDOWN:
case Field2D.DOWN:
hero.animate("down");
break;
case Field2D.TLEFT:
case Field2D.LEFT:
hero.animate("left");
break;
case Field2D.TRIGHT:
case Field2D.RIGHT:
hero.animate("right");
break;
}
}
}
});
// 调动角色缓动动画,开始移动
hero.selfAction().event(move).start();
}
});
//关闭Screen时注销碰撞世界
putRelease(world);
add(MultiScreenTest.getBackButton(this, 1));
}
}
| 1,785 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.jumpto.quicksearch;
import org.netbeans.modules.jumpto.symbol.GoToSymbolAction;
import org.netbeans.spi.jumpto.symbol.SymbolDescriptor;
import org.netbeans.spi.quicksearch.SearchProvider;
import org.netbeans.spi.quicksearch.SearchRequest;
import org.netbeans.spi.quicksearch.SearchResponse;
import org.openide.filesystems.FileObject;
import org.openide.util.NbBundle;
/**
*
* @author <NAME>
*/
public class GoToSymbolProvider implements SearchProvider {
private GoToSymbolWorker worker;
public void evaluate(SearchRequest request, SearchResponse response) {
String text = removeNonJavaChars(request.getText());
if(text.length() == 0) {
return;
}
GoToSymbolWorker local;
synchronized(this) {
if (worker!=null) {
worker.cancel();
}
worker = new GoToSymbolWorker(text);
local = worker;
}
local.run();
for (SymbolDescriptor td : local.getTypes()) {
String displayHint = td.getFileDisplayPath();
String htmlDisplayName = escapeLtGt(td.getSymbolName()) + " " + NbBundle.getMessage(GoToSymbolAction.class, "MSG_DeclaredIn",escapeLtGt(td.getOwnerName()));
final String projectName = td.getProjectName();
if (projectName != null && !projectName.isEmpty()) {
htmlDisplayName = String.format(
"%s [%s]", //NOI18N
htmlDisplayName,
projectName);
}
if (!response.addResult(new GoToSymbolCommand(td),
htmlDisplayName,
displayHint,
null)) {
break;
}
}
}
private static String escapeLtGt(String input) {
String temp = input.replaceAll("<", "<"); // NOI18N
temp = temp.replaceAll(">", ">"); // NOI18N
return temp;
}
private static class GoToSymbolCommand implements Runnable {
private SymbolDescriptor command;
public GoToSymbolCommand(SymbolDescriptor command) {
this.command = command;
}
public void run() {
command.open();
}
}
private static String removeNonJavaChars(String text) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < text.length(); i++) {
char c = text.charAt(i);
if (Character.isJavaIdentifierPart(c)) {
sb.append(c);
}
}
return sb.toString();
}
}
| 1,492 |
2,754 | <filename>CheckTool/feature_detector.h<gh_stars>1000+
/*
* Copyright (c) 2020 Intel Corporation
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef HAXM_CHECK_FEATURE_DETECTOR_H_
#define HAXM_CHECK_FEATURE_DETECTOR_H_
#include <string>
#include "common.h"
#include "cpuid.h"
#include "os.h"
namespace haxm {
namespace check_util {
class FeatureDetector {
public:
FeatureDetector();
~FeatureDetector();
CheckResult Detect() const;
void Print() const;
private:
CheckResult CheckCpuVendor(std::string* vendor = nullptr) const;
CheckResult CheckLongModeSupported() const; // Long Mode = Intel64
CheckResult CheckVmxSupported() const;
CheckResult CheckVmxEnabled() const;
CheckResult CheckEptSupported() const;
CheckResult CheckNxSupported() const;
CheckResult CheckNxEnabled() const;
CheckResult CheckHyperVDisabled() const;
CheckResult CheckOsVersion(OsVersion* os_ver = nullptr) const;
CheckResult CheckOsArchitecture(OsArchitecture* os_arch = nullptr) const;
CheckResult CheckGuestOccupied(uint32_t* occupied_count = nullptr) const;
static std::string ToString(CheckResult res);
static std::string ToString(OsType os_type);
static std::string ToString(OsArchitecture os_arch);
Cpuid cpuid_;
Os* os_;
};
} // namespace check_util
} // namespace haxm
#endif // HAXM_CHECK_FEATURE_DETECTOR_H_
| 909 |
425 | <reponame>jjzhang166/doubanfm-qt<gh_stars>100-1000
#include "controlpanel.h"
#include "ui_controlpanel.h"
#include <QLinearGradient>
#include <QPainter>
#include <QPolygon>
#include <QRegion>
#include <QDebug>
#include <QTime>
#include <QSettings>
#include <QNetworkReply>
#include "mainwidget.h"
#include <QDesktopServices>
#include "libs/douban_types.h"
#include "settingdialog.h"
ControlPanel::ControlPanel(QWidget *parent) :
QWidget(parent),
ui(new Ui::ControlPanel),
doubanfm(DoubanFM::getInstance()),
player(DoubanPlayer::getInstance()),
imgmgr(new QNetworkAccessManager(this)),
lyricGetter(new LyricGetter(this)),
settingDialog(new SettingDialog(this))
{
ui->setupUi(this);
loadConfig();
// Shape of channelButton and lyricButton
QPolygon polygonTop, polygonBottom;
polygonTop.setPoints(4, 0, 0, 131, 0, 115, 16, 16, 16);
polygonBottom.setPoints(4, 16, 0, 115, 0, 131, 16, 0, 16);
QRegion regionTop(polygonTop);
QRegion regionBottom(polygonBottom);
ui->channelButton->setMask(regionTop);
ui->lyricButton->setMask(regionBottom);
connect(imgmgr, &QNetworkAccessManager::finished, [this] (QNetworkReply *reply) {
if (QNetworkReply::NoError != reply->error()) {
qDebug() << "Err: Album image receive error";
reply->deleteLater();
return;
}
const QByteArray data(reply->readAll());
if (!data.size())
qDebug() << Q_FUNC_INFO << "received pixmap looks like nothing";
QImage image = QImage::fromData(data);
/*
if (player.playlist()->currentIndex() >= 0) {
int index = player.playlist()->currentIndex();
if (notify) {
notify->close();
delete notify;
notify = nullptr;
}
notify = new Notification(songs[index].artist, songs[index].title);
if (data.size() > 0) {
iiibiiay notify_icon_data = iiibiiay::fromImage(image);
notify->setHint("icon_data",
QVariant(qDBusRegisterMetaType<iiibiiay>(), ¬ify_icon_data));
}
notify->setAutoDelete(false);
notify->show();
}
*/
ui->albumImg->setAlbumImage(image);
reply->deleteLater();
});
//player.setPlaylist(new QMediaPlaylist(&player));
connect(&player, &DoubanPlayer::currentSongChanged, [=] (const DoubanFMSong& song) {
setArtistName(song.artist);
setSongName(song.title);
setAlbumName(song.albumtitle);
QString mod_url = song.picture;
mod_url.replace("mpic", "lpic");
imgmgr->get(QNetworkRequest(QUrl(mod_url)));
if (song.like) {
ui->likeButton->setChecked(true);
}
else {
ui->likeButton->setChecked(false);
}
});
setArtistName("Loading");
connect(&player, SIGNAL(positionChanged(qint64)), ui->volumeTime, SLOT(setTick(qint64)));
connect(&player, &DoubanPlayer::positionChanged, [this] (qint64 tick) {
ui->seeker->setValue((qreal) tick / player.duration() * ui->seeker->maximum());
});
//connect(player, &QMediaPlayer::volumeChanged, [this] (int vol) {
// qDebug() << vol;
//});
connect(&player, &DoubanPlayer::receivedRateSong, [this] (bool succeed) {
if (!succeed) return;
if (player.currentSong().like) {
ui->likeButton->setChecked(true);
}
else {
ui->likeButton->setChecked(false);
}
});
connect(ui->volumeTime, &VolumeTimePanel::volumeChanged, [this] (int value) {
this->player.setVolume(value);
});
/*if (player->channel() == -3) {
if (!doubanfm->getUser())
player->setChannel(1);
}*/
connect(lyricGetter, &LyricGetter::gotLyricError, [this] (const QString& ) {
/*if (ui->lyricWidget->isVisible())
emit ui->lyricWidgetTriggerRight->enter();*/
});
//ui->lyricWidget->setVisible(false);
connect(ui->albumImg, &AlbumWidget::clicked, [this] () {
QDesktopServices::openUrl(QUrl("http://www.douban.com" + player.currentSong().album));
});
connect(&player, &DoubanPlayer::canControlChanged, [=] (bool can) {
ui->nextButton->setEnabled(can);
ui->trashButton->setEnabled(can);
ui->likeButton->setEnabled(can);
});
/*
connect(ui->lyricWidgetTriggerLeft, &LyricWidgetTriggerLeft::enter, [this] () {
QPropertyAnimation *anim = new QPropertyAnimation(ui->albumImg, "geometry");
anim->setDuration(400);
anim->setStartValue(ui->albumImg->geometry());
QRect endval(this->geometry().width() - ui->albumImg->geometry().width(),
ui->albumImg->geometry().y(),
ui->albumImg->geometry().width(),
ui->albumImg->geometry().height());
anim->setEndValue(endval);
anim->setEasingCurve(QEasingCurve::OutCubic);
ui->album->setVisible(false);
ui->artist->setVisible(false);
ui->volumeTime->setVisible(false);
ui->userLogin->setVisible(false);
ui->trashButton->setVisible(false);
ui->songName->setVisible(false);
ui->seeker->setVisible(false);
ui->pauseButton->setVisible(false);
ui->nextButton->setVisible(false);
ui->likeButton->setVisible(false);
connect(anim, &QPropertyAnimation::finished, [this] () {
ui->lyricWidget->setVisible(true);
});
anim->start(QPropertyAnimation::DeleteWhenStopped);
});
connect(ui->lyricWidgetTriggerRight, &LyricWidgetTriggerRight::enter, [this] () {
QPropertyAnimation *anim = new QPropertyAnimation(ui->albumImg, "geometry");
anim->setDuration(400);
anim->setStartValue(ui->albumImg->geometry());
QRect endval(0,
ui->albumImg->geometry().y(),
ui->albumImg->geometry().width(),
ui->albumImg->geometry().height());
anim->setEndValue(endval);
anim->setEasingCurve(QEasingCurve::OutCubic);
ui->lyricWidget->setVisible(false);
connect(anim, &QPropertyAnimation::finished, [this] () {
ui->album->setVisible(true);
ui->artist->setVisible(true);
ui->volumeTime->setVisible(true);
ui->userLogin->setVisible(true);
ui->trashButton->setVisible(true);
ui->songName->setVisible(true);
ui->seeker->setVisible(true);
ui->pauseButton->setVisible(true);
ui->nextButton->setVisible(true);
ui->likeButton->setVisible(true);
});
anim->start(QPropertyAnimation::DeleteWhenStopped);
});
*/
}
ControlPanel::~ControlPanel()
{
delete ui;
saveConfig();
delete lyricGetter;
delete settingDialog;
}
void ControlPanel::setSongName(const QString &name) {
ui->songName->setText(QString("<font color='#04aaa1'>")
+ name + QString("</font>"));
}
void ControlPanel::setArtistName(const QString &name) {
ui->artist->setText(QString("<font color='grey'>")
+ name + QString("</font>"));
}
void ControlPanel::loadConfig() {
QSettings settings("QDoubanFM", "QDoubanFM");
settings.beginGroup("General");
player.setVolume(settings.value("volume", 100).toInt());
qint32 _channel = settings.value("channel", 1).toInt();
player.setKbps(settings.value("kbps", 64).toInt());
settings.endGroup();
if (_channel == -3 && doubanfm.hasLogin()) {
player.setChannel(_channel);
}
}
void ControlPanel::saveConfig() {
QSettings settings("QDoubanFM", "QDoubanFM");
settings.beginGroup("General");
settings.setValue("channel", player.channel());
settings.setValue("volume", player.volume());
settings.setValue("kbps", player.kbps());
settings.endGroup();
}
void ControlPanel::on_nextButton_clicked()
{
ui->seeker->setValue(0);
player.next();
}
void ControlPanel::on_pauseButton_clicked()
{
player.pause();
}
void ControlPanel::on_likeButton_clicked()
{
bool is_liked = player.currentSong().like;
if (is_liked)
player.unrateCurrentSong();
else
player.rateCurrentSong();
}
void ControlPanel::on_trashButton_clicked()
{
player.trashCurrentSong();
}
void ControlPanel::setAlbumName(const QString &name) {
ui->album->setText(QString("<font color=grey>< ") + name + QString(" ></font>"));
}
void ControlPanel::play() {
/*QPropertyAnimation *fadein = new QPropertyAnimation(&player, "volume");
fadein->setDuration(1000);
fadein->setStartValue(player.volume());
player.play();
fadein->setEndValue(volume);
fadein->start(QPropertyAnimation::DeleteWhenStopped);
isPaused = false;*/
player.play();
}
void ControlPanel::pause() {
/*QPropertyAnimation *fadeout = new QPropertyAnimation(&player, "volume");
fadeout->setDuration(1000);
fadeout->setStartValue(player.volume());
volume = player.volume();
fadeout->setEndValue(0);
connect(fadeout, &QPropertyAnimation::finished, [this] () {
player.pause();
});
fadeout->start(QPropertyAnimation::DeleteWhenStopped);
isPaused = true;*/
player.pause();
}
void ControlPanel::on_settingButton_clicked()
{
settingDialog->show();
}
void ControlPanel::on_channelButton_clicked(bool checked)
{
if (checked)
emit openChannelPanel();
else
emit closeChannelPanel();
}
void ControlPanel::on_lyricButton_clicked(bool checked)
{
if (checked)
emit openLyricPanel();
else
emit closeLyricPanel();
}
| 4,238 |
1,641 | import pytest
from eth._utils.numeric import (
get_highest_bit_index,
)
@pytest.mark.parametrize(
'value,expected',
(
(1, 0),
(2, 1),
(3, 1),
(255, 7),
(256, 8),
)
)
def test_get_highest_bit_index(value, expected):
actual = get_highest_bit_index(value)
assert actual == expected
| 168 |
322 | <reponame>phenixmzy/apache-eagle-0.5<filename>eagle-core/eagle-query/eagle-client-base/src/main/java/org/apache/eagle/service/client/impl/BatchSender.java
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.eagle.service.client.impl;
import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity;
import org.apache.eagle.log.entity.GenericServiceAPIResponseEntity;
import org.apache.eagle.service.client.IEagleServiceClient;
import org.apache.eagle.service.client.EagleServiceClientException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Closeable;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
public class BatchSender implements Closeable {
private final static Logger LOG = LoggerFactory.getLogger(BatchSender.class);
private final List<TaggedLogAPIEntity> entityBucket;
private final IEagleServiceClient client;
protected int getBatchSize() {
return batchSize;
}
protected void setBatchSize(int batchSize) {
if(batchSize<0) throw new IllegalArgumentException("batch size should be "+batchSize);
this.batchSize = batchSize;
}
private int batchSize;
public BatchSender(IEagleServiceClient client, int batchSize){
this.setBatchSize(batchSize);
this.client = client;
this.entityBucket = new LinkedList<TaggedLogAPIEntity>();
}
public BatchSender send(TaggedLogAPIEntity entity) throws IOException, EagleServiceClientException {
this.entityBucket.add(entity);
if(this.entityBucket.size()>=this.batchSize){
flush();
}
return this;
}
public BatchSender send(List<TaggedLogAPIEntity> entities) throws IOException, EagleServiceClientException {
this.entityBucket.addAll(entities);
if(this.entityBucket.size()>= this.batchSize){
flush();
}
return this;
}
public void flush() throws IOException, EagleServiceClientException {
if(this.entityBucket.size() == 0 && LOG.isDebugEnabled()){
LOG.debug("No entities to flush");
return;
}
LOG.info("Writing "+this.entityBucket.size()+" entities");
GenericServiceAPIResponseEntity<String> response = this.client.create(this.entityBucket);
if(!response.isSuccess()){
LOG.error("Got service exception: "+response.getException());
throw new IOException("Service exception"+response.getException());
}else{
this.entityBucket.clear();
}
}
@Override
public void close() throws IOException {
try {
flush();
} catch (EagleServiceClientException e) {
throw new IOException(e);
}
}
} | 1,259 |
399 | <reponame>ytai/ioio<filename>IOIOLibCore/src/main/java/ioio/lib/impl/Board.java
/*
* Copyright 2011 <NAME>. All rights reserved.
*
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <NAME>HI OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are those of the
* authors and should not be interpreted as representing official policies, either expressed
* or implied.
*/
package ioio.lib.impl;
enum Board {
SPRK0015(Hardware.IOIO0002),
SPRK0016(Hardware.IOIO0003),
MINT0010(Hardware.IOIO0003),
SPRK0020(Hardware.IOIO0004);
public final Hardware hardware;
Board(Hardware hw) {
hardware = hw;
}
static class Hardware {
private static final boolean[][] MAP_IOIO0002_IOIO0003 = {
// p. out p. in analog
{true, true, false}, // 0
{false, false, false}, // 1
{false, false, false}, // 2
{true, true, false}, // 3
{true, true, false}, // 4
{true, true, false}, // 5
{true, true, false}, // 6
{true, true, false}, // 7
{false, false, false}, // 8
{false, true, false}, // 9
{true, true, false}, // 10
{true, true, false}, // 11
{true, true, false}, // 12
{true, true, false}, // 13
{true, true, false}, // 14
{false, false, false}, // 15
{false, false, false}, // 16
{false, false, false}, // 17
{false, false, false}, // 18
{false, false, false}, // 19
{false, false, false}, // 20
{false, false, false}, // 21
{false, false, false}, // 22
{false, false, false}, // 23
{false, false, false}, // 24
{false, false, false}, // 25
{false, false, false}, // 26
{true, true, false}, // 27
{true, true, false}, // 28
{true, true, false}, // 29
{true, true, false}, // 30
{true, true, true}, // 31
{true, true, true}, // 32
{false, false, true}, // 33
{true, true, true}, // 34
{true, true, true}, // 35
{true, true, true}, // 36
{true, true, true}, // 37
{true, true, true}, // 38
{true, true, true}, // 39
{true, true, true}, // 40
{false, false, true}, // 41
{false, false, true}, // 42
{false, false, true}, // 43
{false, false, true}, // 44
{true, true, true}, // 45
{true, true, true}, // 46
{true, true, false}, // 47
{true, true, false} // 48
};
private static final boolean[][] MAP_IOIO0004 = {
// p. out p. in analog
{false, false, false}, // 0
{true, true, false}, // 1
{true, true, false}, // 2
{true, true, false}, // 3
{true, true, false}, // 4
{true, true, false}, // 5
{true, true, false}, // 6
{true, true, false}, // 7
{false, false, false}, // 8
{false, true, false}, // 9
{true, true, false}, // 10
{true, true, false}, // 11
{true, true, false}, // 12
{true, true, false}, // 13
{true, true, false}, // 14
{false, false, false}, // 15
{false, false, false}, // 16
{false, false, false}, // 17
{false, false, false}, // 18
{false, false, false}, // 19
{false, false, false}, // 20
{false, false, false}, // 21
{false, false, false}, // 22
{false, false, false}, // 23
{false, false, false}, // 24
{false, false, false}, // 25
{false, false, false}, // 26
{true, true, false}, // 27
{true, true, false}, // 28
{true, true, false}, // 29
{true, true, false}, // 30
{true, true, true}, // 31
{true, true, true}, // 32
{false, false, true}, // 33
{true, true, true}, // 34
{true, true, true}, // 35
{true, true, true}, // 36
{true, true, true}, // 37
{true, true, true}, // 38
{true, true, true}, // 39
{true, true, true}, // 40
{false, false, true}, // 41
{false, false, true}, // 42
{false, false, true}, // 43
{false, false, true}, // 44
{true, true, true}, // 45
{true, true, true} // 46
};
static final Hardware IOIO0002 = new Hardware(MAP_IOIO0002_IOIO0003,
9, 4, 3, new int[]{0, 2, 4}, new int[]{6, 7, 8},
new int[][]{{4, 5}, {47, 48}, {26, 25}},
new int[]{36, 37, 38});
static final Hardware IOIO0003 = IOIO0002;
static final Hardware IOIO0004 = new Hardware(MAP_IOIO0004,
9, 4, 3, new int[]{0, 2, 4}, new int[]{6, 7, 8},
new int[][]{{4, 5}, {1, 2}, {26, 25}},
new int[]{36, 37, 38});
private enum Function {
PERIPHERAL_OUT,
PERIPHERAL_IN,
ANALOG_IN
}
private final boolean[][] map_;
private final int numPwmModules_;
private final int numUartModules_;
private final int numSpiModules_;
private final int[] incapSingleModules_;
private final int[] incapDoubleModules_;
private final int[][] twiPins_;
private final int[] icspPins_;
private Hardware(boolean[][] map, int numPwmModules,
int numUartModules, int numSpiModules,
int[] incapDoubleModules, int[] incapSingleModules,
int[][] twiPins, int[] icspPins) {
if (map == null)
throw new IllegalArgumentException("WTF");
map_ = map;
numPwmModules_ = numPwmModules;
numUartModules_ = numUartModules;
numSpiModules_ = numSpiModules;
incapSingleModules_ = incapSingleModules;
incapDoubleModules_ = incapDoubleModules;
twiPins_ = twiPins;
icspPins_ = icspPins;
}
int numPins() {
return map_.length;
}
int numAnalogPins() {
int result = 0;
for (boolean[] b : map_) {
if (b[Function.ANALOG_IN.ordinal()]) {
++result;
}
}
return result;
}
int numPwmModules() {
return numPwmModules_;
}
int numUartModules() {
return numUartModules_;
}
int numSpiModules() {
return numSpiModules_;
}
int numTwiModules() {
return twiPins().length;
}
int[] incapSingleModules() {
return incapSingleModules_;
}
int[] incapDoubleModules() {
return incapDoubleModules_;
}
int[][] twiPins() {
return twiPins_;
}
int[] icspPins() {
return icspPins_;
}
void checkSupportsAnalogInput(int pin) {
checkValidPin(pin);
if (!map_[pin][Function.ANALOG_IN.ordinal()]) {
throw new IllegalArgumentException("Pin " + pin
+ " does not support analog input");
}
}
void checkSupportsPeripheralInput(int pin) {
checkValidPin(pin);
if (!map_[pin][Function.PERIPHERAL_IN.ordinal()]) {
throw new IllegalArgumentException("Pin " + pin
+ " does not support peripheral input");
}
}
void checkSupportsPeripheralOutput(int pin) {
checkValidPin(pin);
if (!map_[pin][Function.PERIPHERAL_OUT.ordinal()]) {
throw new IllegalArgumentException("Pin " + pin
+ " does not support peripheral output");
}
}
void checkValidPin(int pin) {
if (pin < 0 || pin >= map_.length) {
throw new IllegalArgumentException("Illegal pin: " + pin);
}
}
void checkSupportsCapSense(int pin) {
checkValidPin(pin);
// Currently, all analog pins are also cap-sense.
if (!map_[pin][Function.ANALOG_IN.ordinal()]) {
throw new IllegalArgumentException("Pin " + pin
+ " does not support cap-sense");
}
}
}
}
| 5,391 |
364 | <filename>hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/SchemaBaseValidator.java
package ca.uhn.fhir.validation;
/*
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.util.ClasspathUtil;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.w3c.dom.ls.LSInput;
import org.w3c.dom.ls.LSResourceResolver;
import org.xml.sax.SAXException;
import org.xml.sax.SAXNotRecognizedException;
import org.xml.sax.SAXParseException;
import javax.xml.XMLConstants;
import javax.xml.transform.Source;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.StringReader;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
public class SchemaBaseValidator implements IValidatorModule {
public static final String RESOURCES_JAR_NOTE = "Note that as of HAPI FHIR 1.2, DSTU2 validation files are kept in a separate JAR (hapi-fhir-validation-resources-XXX.jar) which must be added to your classpath. See the HAPI FHIR download page for more information.";
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SchemaBaseValidator.class);
private static final Set<String> SCHEMA_NAMES;
private static boolean ourJaxp15Supported;
static {
HashSet<String> sn = new HashSet<>();
sn.add("xml.xsd");
sn.add("xhtml1-strict.xsd");
sn.add("fhir-single.xsd");
sn.add("fhir-xhtml.xsd");
sn.add("tombstone.xsd");
sn.add("opensearch.xsd");
sn.add("opensearchscore.xsd");
sn.add("xmldsig-core-schema.xsd");
SCHEMA_NAMES = Collections.unmodifiableSet(sn);
}
private final Map<String, Schema> myKeyToSchema = new HashMap<>();
private FhirContext myCtx;
public SchemaBaseValidator(FhirContext theContext) {
myCtx = theContext;
}
private void doValidate(IValidationContext<?> theContext) {
Schema schema = loadSchema();
try {
Validator validator = schema.newValidator();
MyErrorHandler handler = new MyErrorHandler(theContext);
validator.setErrorHandler(handler);
String encodedResource;
if (theContext.getResourceAsStringEncoding() == EncodingEnum.XML) {
encodedResource = theContext.getResourceAsString();
} else {
encodedResource = theContext.getFhirContext().newXmlParser().encodeResourceToString((IBaseResource) theContext.getResource());
}
try {
/*
* See https://github.com/hapifhir/hapi-fhir/issues/339
* https://www.owasp.org/index.php/XML_External_Entity_(XXE)_Processing
*/
validator.setProperty(XMLConstants.ACCESS_EXTERNAL_DTD, "");
validator.setProperty(XMLConstants.ACCESS_EXTERNAL_SCHEMA, "");
} catch (SAXNotRecognizedException ex) {
ourLog.debug("Jaxp 1.5 Support not found.", ex);
}
validator.validate(new StreamSource(new StringReader(encodedResource)));
} catch (SAXParseException e) {
SingleValidationMessage message = new SingleValidationMessage();
message.setLocationLine(e.getLineNumber());
message.setLocationCol(e.getColumnNumber());
message.setMessage(e.getLocalizedMessage());
message.setSeverity(ResultSeverityEnum.FATAL);
theContext.addValidationMessage(message);
} catch (SAXException | IOException e) {
// Catch all
throw new ConfigurationException("Could not load/parse schema file", e);
}
}
private Schema loadSchema() {
String key = "fhir-single.xsd";
synchronized (myKeyToSchema) {
Schema schema = myKeyToSchema.get(key);
if (schema != null) {
return schema;
}
Source baseSource = loadXml("fhir-single.xsd");
SchemaFactory schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
schemaFactory.setResourceResolver(new MyResourceResolver());
try {
try {
/*
* See https://github.com/hapifhir/hapi-fhir/issues/339
* https://www.owasp.org/index.php/XML_External_Entity_(XXE)_Processing
*/
schemaFactory.setProperty(XMLConstants.ACCESS_EXTERNAL_DTD, "");
ourJaxp15Supported = true;
} catch (SAXNotRecognizedException e) {
ourJaxp15Supported = false;
ourLog.warn("Jaxp 1.5 Support not found.", e);
}
schema = schemaFactory.newSchema(new Source[]{baseSource});
} catch (SAXException e) {
throw new ConfigurationException("Could not load/parse schema file: " + "fhir-single.xsd", e);
}
myKeyToSchema.put(key, schema);
return schema;
}
}
Source loadXml(String theSchemaName) {
String pathToBase = myCtx.getVersion().getPathToSchemaDefinitions() + '/' + theSchemaName;
ourLog.debug("Going to load resource: {}", pathToBase);
String contents = ClasspathUtil.loadResource(pathToBase, ClasspathUtil.withBom());
return new StreamSource(new StringReader(contents), null);
}
@Override
public void validateResource(IValidationContext<IBaseResource> theContext) {
doValidate(theContext);
}
private final class MyResourceResolver implements LSResourceResolver {
private MyResourceResolver() {
}
@Override
public LSInput resolveResource(String theType, String theNamespaceURI, String thePublicId, String theSystemId, String theBaseURI) {
if (theSystemId != null && SCHEMA_NAMES.contains(theSystemId)) {
LSInputImpl input = new LSInputImpl();
input.setPublicId(thePublicId);
input.setSystemId(theSystemId);
input.setBaseURI(theBaseURI);
String pathToBase = myCtx.getVersion().getPathToSchemaDefinitions() + '/' + theSystemId;
ourLog.debug("Loading referenced schema file: " + pathToBase);
byte[] bytes = ClasspathUtil.loadResourceAsByteArray(pathToBase);
input.setByteStream(new ByteArrayInputStream(bytes));
return input;
}
throw new ConfigurationException("Unknown schema: " + theSystemId);
}
}
private static class MyErrorHandler implements org.xml.sax.ErrorHandler {
private IValidationContext<?> myContext;
MyErrorHandler(IValidationContext<?> theContext) {
myContext = theContext;
}
private void addIssue(SAXParseException theException, ResultSeverityEnum theSeverity) {
SingleValidationMessage message = new SingleValidationMessage();
message.setLocationLine(theException.getLineNumber());
message.setLocationCol(theException.getColumnNumber());
message.setMessage(theException.getLocalizedMessage());
message.setSeverity(theSeverity);
myContext.addValidationMessage(message);
}
@Override
public void error(SAXParseException theException) {
addIssue(theException, ResultSeverityEnum.ERROR);
}
@Override
public void fatalError(SAXParseException theException) {
addIssue(theException, ResultSeverityEnum.FATAL);
}
@Override
public void warning(SAXParseException theException) {
addIssue(theException, ResultSeverityEnum.WARNING);
}
}
public static boolean isJaxp15Supported() {
return ourJaxp15Supported;
}
}
| 2,765 |
1,293 | <reponame>manifold-systems/manifold<filename>manifold-core-parent/manifold/src/main/java/manifold/internal/javac/ManResolve.java
/*
* Copyright (c) 2018 - Manifold Systems LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package manifold.internal.javac;
import com.sun.tools.javac.api.JavacTrees;
import com.sun.tools.javac.code.Symbol;
import com.sun.tools.javac.code.Type;
import com.sun.tools.javac.comp.Annotate;
import com.sun.tools.javac.comp.Attr;
import com.sun.tools.javac.comp.AttrContext;
import com.sun.tools.javac.comp.Check;
import com.sun.tools.javac.comp.DeferredAttr;
import com.sun.tools.javac.comp.Env;
import com.sun.tools.javac.comp.Flow;
import com.sun.tools.javac.comp.Infer;
import com.sun.tools.javac.comp.LambdaToMethod;
import com.sun.tools.javac.comp.Lower;
import com.sun.tools.javac.comp.Resolve;
import com.sun.tools.javac.comp.TransTypes;
import com.sun.tools.javac.jvm.Gen;
import com.sun.tools.javac.model.JavacElements;
import com.sun.tools.javac.tree.JCTree;
import com.sun.tools.javac.util.Context;
import javax.tools.JavaFileObject;
import manifold.util.JreUtil;
import manifold.util.ReflectUtil;
import manifold.util.concurrent.LocklessLazyVar;
public class ManResolve extends Resolve
{
private static final String RESOLVE_FIELD = "rs";
private static final LocklessLazyVar<Class<?>> EXTENSION_TRANSFORMER = LocklessLazyVar.make(
() -> ReflectUtil.type( "manifold.ext.ExtensionTransformer" )
);
private final Attr _attr;
public static Resolve instance( Context ctx )
{
Resolve resolve = ctx.get( resolveKey );
if( !(resolve instanceof ManResolve) )
{
ctx.put( resolveKey, (Resolve)null );
resolve = new ManResolve( ctx );
}
return resolve;
}
@SuppressWarnings("ConstantConditions")
private ManResolve( Context context )
{
super( context );
_attr = Attr.instance( context );
ReflectUtil.field( this, "log" ).set( ReflectUtil.field( _attr, "log" ).get() );
if( JreUtil.isJava8() )
{
reassignEarlyHolders8( context );
}
else
{
reassignEarlyHolders( context );
}
}
private void reassignEarlyHolders8( Context context )
{
ReflectUtil.field( Attr.instance( context ), RESOLVE_FIELD ).set( this );
ReflectUtil.field( DeferredAttr.instance( context ), RESOLVE_FIELD ).set( this );
ReflectUtil.field( Check.instance( context ), RESOLVE_FIELD ).set( this );
ReflectUtil.field( Infer.instance( context ), RESOLVE_FIELD ).set( this );
ReflectUtil.field( Flow.instance( context ), RESOLVE_FIELD ).set( this );
ReflectUtil.field( Lower.instance( context ), RESOLVE_FIELD ).set( this );
ReflectUtil.field( Gen.instance( context ), RESOLVE_FIELD ).set( this );
ReflectUtil.field( Annotate.instance( context ), RESOLVE_FIELD ).set( this );
ReflectUtil.field( JavacTrees.instance( context ), "resolve" ).set( this );
ReflectUtil.field( TransTypes.instance( context ), "resolve" ).set( this );
}
private void reassignEarlyHolders( Context context )
{
ReflectUtil.field( _attr, RESOLVE_FIELD ).set( this );
ReflectUtil.field( DeferredAttr.instance( context ), RESOLVE_FIELD ).set( this );
ReflectUtil.field( Check.instance( context ), RESOLVE_FIELD ).set( this );
ReflectUtil.field( Infer.instance( context ), RESOLVE_FIELD ).set( this );
ReflectUtil.field( Flow.instance( context ), RESOLVE_FIELD ).set( this );
ReflectUtil.field( LambdaToMethod.instance( context ), RESOLVE_FIELD ).set( this );
ReflectUtil.field( Lower.instance( context ), RESOLVE_FIELD ).set( this );
ReflectUtil.field( Gen.instance( context ), RESOLVE_FIELD ).set( this );
ReflectUtil.field(
ReflectUtil.method(
ReflectUtil.type( "com.sun.tools.javac.jvm.StringConcat" ), "instance", Context.class )
.invokeStatic( context ), RESOLVE_FIELD )
.set( this );
ReflectUtil.field( JavacTrees.instance( context ), "resolve" ).set( this );
ReflectUtil.field( Annotate.instance( context ), "resolve" ).set( this );
ReflectUtil.field( TransTypes.instance( context ), "resolve" ).set( this );
ReflectUtil.field( JavacElements.instance( context ), "resolve" ).set( this );
if( JreUtil.isJava11orLater() )
{
// Allow @var to work with properties.
// Note, this is not as scary as it looks. Setting allowLocalVariableTypeInference to false only turns off
// unnecessary name checking so we can use @var annotation type, which should be allowed because `@` effectively
// escapes the name, so there really isn't any conflict with Java's 'var' construct. Just sayin'
ReflectUtil.field( this, "allowLocalVariableTypeInference" ).set( false );
}
}
/**
* Allow augmented classes to access modules as if defined in both the extended class' module and
* the extension class' module.
*/
@Override
public boolean isAccessible( Env<AttrContext> env, Symbol.TypeSymbol typeSymbol, boolean checkInner )
{
boolean accessible = super.isAccessible( env, typeSymbol, checkInner );
if( accessible )
{
return true;
}
if( isJailbreakOnType() )
{
// handle the case where the class itself is inaccessible:
//
// // the *type* must be @Jailbreak as well as the constructor
// com.foo.@Jailbreak PrivateClass privateThing = new com.foo.@Jailbreak PrivateClass();
// privateThing.privateMethod();
// ...
return true;
}
if( JreUtil.isJava8() )
{
return false;
}
// Java 9 +
JavaFileObject sourceFile = env.toplevel.getSourceFile();
if( sourceFile instanceof GeneratedJavaStubFileObject )
{
// Allow augmented classes to access modules as if defined in both the extended class' module and
// the extension class' module.
accessible = true;
}
return accessible;
}
private boolean isJailbreakOnType()
{
JCTree.JCAnnotatedType annotatedType = ((ManAttr)_attr).peekAnnotatedType();
if( annotatedType != null )
{
return annotatedType.toString().contains( "@Jailbreak" );
}
return false;
}
/**
* Allow @Jailbreak to expose otherwise inaccessible features
*/
@Override
public boolean isAccessible( Env<AttrContext> env, Type site, Symbol sym, boolean checkInner )
{
boolean accessible = super.isAccessible( env, site, sym, checkInner );
if( accessible )
{
return true;
}
if( isJailbreak( sym ) )
{
return true;
}
return isJailbreak( env.tree );
}
private boolean isJailbreak( Symbol sym )
{
Class<?> extensionTransformer = EXTENSION_TRANSFORMER.get();
if( extensionTransformer == null )
{
return false;
}
return (boolean)ReflectUtil.method( extensionTransformer, "isJailbreakSymbol", Symbol.class )
.invokeStatic( sym );
}
private boolean isJailbreak( JCTree tree )
{
if( !(tree instanceof JCTree.JCMethodInvocation) &&
!(tree instanceof JCTree.JCFieldAccess) &&
!(tree instanceof JCTree.JCAssign) &&
!(tree instanceof JCTree.JCNewClass) &&
!(tree instanceof JCTree.JCVariableDecl) &&
((ManAttr)_attr).peekSelect() == null )
{
return false;
}
Class<?> extensionTransformer = EXTENSION_TRANSFORMER.get();
if( extensionTransformer == null )
{
return false;
}
boolean isJailbreak = (boolean)ReflectUtil.method( extensionTransformer, "isJailbreakReceiver", JCTree.class )
.invokeStatic( tree );
if( !isJailbreak )
{
JCTree.JCFieldAccess select = ((ManAttr)_attr).peekSelect();
if( select != null && select != tree )
{
isJailbreak = (boolean)ReflectUtil.method( extensionTransformer, "isJailbreakReceiver", JCTree.JCFieldAccess.class )
.invokeStatic( select );
}
}
return isJailbreak;
}
}
| 3,051 |
304 | <filename>iPhoneOS11.2.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARConfiguration.h<gh_stars>100-1000
//
// ARConfiguration.h
// ARKit
//
// Copyright © 2016-2017 Apple Inc. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
NS_ASSUME_NONNULL_BEGIN
/**
Enum constants for indicating the world alignment.
*/
API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macos, watchos, tvos)
typedef NS_ENUM(NSInteger, ARWorldAlignment) {
/** Aligns the world with gravity that is defined by vector (0, -1, 0). */
ARWorldAlignmentGravity,
/** Aligns the world with gravity that is defined by the vector (0, -1, 0)
and heading (w.r.t. True North) that is given by the vector (0, 0, -1). */
ARWorldAlignmentGravityAndHeading,
/** Aligns the world with the camera’s orientation. */
ARWorldAlignmentCamera
} NS_SWIFT_NAME(ARConfiguration.WorldAlignment);
/**
Option set indicating the type of planes to detect.
*/
API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macos, watchos, tvos)
typedef NS_OPTIONS(NSUInteger, ARPlaneDetection) {
/** No plane detection is run. */
ARPlaneDetectionNone = 0,
/** Plane detection determines horizontal planes in the scene. */
ARPlaneDetectionHorizontal = (1 << 0),
} NS_SWIFT_NAME(ARWorldTrackingConfiguration.PlaneDetection);
/**
An object to describe and configure the Augmented Reality techniques to be used in an ARSession.
*/
API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macos, watchos, tvos)
@interface ARConfiguration : NSObject <NSCopying>
/**
Determines whether this device supports the ARConfiguration.
*/
@property(class, nonatomic, readonly) BOOL isSupported;
/**
Determines how the coordinate system should be aligned with the world.
@discussion The default is ARWorldAlignmentGravity.
*/
@property (nonatomic, readwrite) ARWorldAlignment worldAlignment;
/**
Enable or disable light estimation.
@discussion Enabled by default.
*/
@property (nonatomic, readwrite, getter=isLightEstimationEnabled) BOOL lightEstimationEnabled;
/**
Determines whether to capture and provide audio data.
@discussion Disabled by default.
*/
@property (nonatomic, readwrite) BOOL providesAudioData;
/** Unavailable */
- (instancetype)init NS_UNAVAILABLE;
+ (instancetype)new NS_UNAVAILABLE;
@end
/**
A configuration for running world tracking.
@discussion World tracking provides 6 degrees of freedom tracking of the device.
By finding feature points in the scene, world tracking enables performing hit-tests against the frame.
Tracking can no longer be resumed once the session is paused.
*/
API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macos, watchos, tvos)
@interface ARWorldTrackingConfiguration : ARConfiguration
/**
Type of planes to detect in the scene.
@discussion If set, new planes will continue to be detected and updated over time. Detected planes will be added to the session as
ARPlaneAnchor objects. In the event that two planes are merged, the newer plane will be removed. Defaults to ARPlaneDetectionNone.
*/
@property (nonatomic, readwrite) ARPlaneDetection planeDetection;
- (instancetype)init;
+ (instancetype)new NS_SWIFT_UNAVAILABLE("Use init() instead");
@end
/**
A configuration for running orientation tracking.
@discussion Orientation tracking provides 3 degrees of freedom tracking of the device.
*/
API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macos, watchos, tvos)
@interface AROrientationTrackingConfiguration : ARConfiguration
- (instancetype)init;
+ (instancetype)new NS_SWIFT_UNAVAILABLE("Use init() instead");
@end
/**
A configuration for running face tracking.
@discussion Face tracking uses the front facing camera to track the face in 3D providing details on the topology and expression of the face.
A detected face will be added to the session as an ARFaceAnchor object which contains information about head pose, mesh, eye pose, and blend shape
coefficients. If light estimation is enabled the detected face will be treated as a light probe and used to estimate the direction of incoming light.
*/
API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macos, watchos, tvos)
@interface ARFaceTrackingConfiguration : ARConfiguration
- (instancetype)init;
+ (instancetype)new NS_SWIFT_UNAVAILABLE("Use init() instead");
@end
NS_ASSUME_NONNULL_END
| 1,343 |
6,660 | <reponame>jerrykcode/kkFileView
package cn.keking.utils;
import io.mola.galimatias.GalimatiasParseException;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.util.HashMap;
import java.util.Map;
/**
* @author : kl
* create : 2020-12-27 1:30 上午
**/
public class WebUtils {
/**
* 获取标准的URL
* @param urlStr url
* @return 标准的URL
*/
public static URL normalizedURL(String urlStr) throws GalimatiasParseException, MalformedURLException {
return io.mola.galimatias.URL.parse(urlStr).toJavaURL();
}
/**
* 获取url中的参数
*
* @param url url
* @param name 参数名
* @return 参数值
*/
public static String getUrlParameterReg(String url, String name) {
Map<String, String> mapRequest = new HashMap<>();
String strUrlParam = truncateUrlPage(url);
if (strUrlParam == null) {
return "";
}
//每个键值为一组
String[] arrSplit = strUrlParam.split("[&]");
for (String strSplit : arrSplit) {
String[] arrSplitEqual = strSplit.split("[=]");
//解析出键值
if (arrSplitEqual.length > 1) {
//正确解析
mapRequest.put(arrSplitEqual[0], arrSplitEqual[1]);
} else if (!arrSplitEqual[0].equals("")) {
//只有参数没有值,不加入
mapRequest.put(arrSplitEqual[0], "");
}
}
return mapRequest.get(name);
}
/**
* 去掉url中的路径,留下请求参数部分
*
* @param strURL url地址
* @return url请求参数部分
*/
private static String truncateUrlPage(String strURL) {
String strAllParam = null;
strURL = strURL.trim();
String[] arrSplit = strURL.split("[?]");
if (strURL.length() > 1) {
if (arrSplit.length > 1) {
if (arrSplit[1] != null) {
strAllParam = arrSplit[1];
}
}
}
return strAllParam;
}
/**
* 从url中剥离出文件名
*
* @param url 格式如:http://www.com.cn/20171113164107_月度绩效表模板(新).xls?UCloudPublicKey=ucloudtangshd@weifenf.com14355492830001993909323&Expires=&Signature=I D1NOFtAJSPT16E6imv6JWuq0k=
* @return 文件名
*/
public static String getFileNameFromURL(String url) {
// 因为url的参数中可能会存在/的情况,所以直接url.lastIndexOf("/")会有问题
// 所以先从?处将url截断,然后运用url.lastIndexOf("/")获取文件名
String noQueryUrl = url.substring(0, url.contains("?") ? url.indexOf("?") : url.length());
return noQueryUrl.substring(noQueryUrl.lastIndexOf("/") + 1);
}
/**
* 从url中获取文件后缀
*
* @param url url
* @return 文件后缀
*/
public static String suffixFromUrl(String url) {
String nonPramStr = url.substring(0, url.contains("?") ? url.indexOf("?") : url.length());
String fileName = nonPramStr.substring(nonPramStr.lastIndexOf("/") + 1);
return KkFileUtils.suffixFromFileName(fileName);
}
/**
* 对url中的文件名进行UTF-8编码
*
* @param url url
* @return 文件名编码后的url
*/
public static String encodeUrlFileName(String url) {
String noQueryUrl = url.substring(0, url.contains("?") ? url.indexOf("?") : url.length());
int fileNameStartIndex = noQueryUrl.lastIndexOf('/') + 1;
int fileNameEndIndex = noQueryUrl.lastIndexOf('.');
String encodedFileName;
try {
encodedFileName = URLEncoder.encode(noQueryUrl.substring(fileNameStartIndex, fileNameEndIndex), "UTF-8");
} catch (UnsupportedEncodingException e) {
return null;
}
return url.substring(0, fileNameStartIndex) + encodedFileName + url.substring(fileNameEndIndex);
}
}
| 2,034 |
941 | <gh_stars>100-1000
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
"""Django settings for unit test project."""
import os
DEBUG = True
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.sqlite',
},
}
SITE_ID = 1
ROOT_URLCONF = 'server.urls'
SECRET_KEY = 'secret'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'django.contrib.staticfiles',
'easy_thumbnails',
'sekizai',
'djng',
'server',
]
USE_L10N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/media/'
# Absolute path to the directory that holds static files.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = os.environ.get('DJANGO_STATIC_ROOT', '')
# URL that handles the static files served from STATIC_ROOT.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'client', 'src'),
)
# FORM_RENDERER = 'djng.forms.renderers.DjangoAngularTemplates'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.template.context_processors.request',
'django.contrib.messages.context_processors.messages',
'server.context_processors.global_context',
],
},
},
]
TIME_ZONE = 'Europe/Berlin'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'format': '[%(asctime)s %(module)s] %(levelname)s: %(message)s'
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple',
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': 'INFO',
'propagate': True,
},
},
}
# if package django-websocket-redis is installed, some more tests can be be added
try:
import ws4redis
INSTALLED_APPS.append('ws4redis')
for template in TEMPLATES:
template["OPTIONS"]["context_processors"].append('ws4redis.context_processors.default')
# This setting is required to override the Django's main loop, when running in
# development mode, such as ./manage runserver
WSGI_APPLICATION = 'ws4redis.django_runserver.application'
# URL that distinguishes websocket connections from normal requests
WEBSOCKET_URL = '/ws/'
# Set the number of seconds each message shall persist
WS4REDIS_EXPIRE = 3600
WS4REDIS_HEARTBEAT = '--heartbeat--'
WS4REDIS_PREFIX = 'djangular'
except ImportError:
pass
| 1,581 |
2,757 | <reponame>CEOALT1/RefindPlusUDK
/** @file
Functions declarations to make Xen hypercalls.
Copyright (C) 2014, Citrix Ltd.
This program and the accompanying materials
are licensed and made available under the terms and conditions of the BSD License
which accompanies this distribution. The full text of the license may be found at
http://opensource.org/licenses/bsd-license.php
THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
**/
#ifndef __XEN_HYPERCALL_LIB_H__
#define __XEN_HYPERCALL_LIB_H__
/**
Check if the Xen Hypercall library is able to make calls to the Xen
hypervisor.
Client code should call further functions in this library only if, and after,
this function returns TRUE.
@retval TRUE Hypercalls are available.
@retval FALSE Hypercalls are not available.
**/
BOOLEAN
EFIAPI
XenHypercallIsAvailable (
VOID
);
/**
This function will put the two arguments in the right place (registers) and
invoke the hypercall identified by HypercallID.
@param HypercallID The symbolic ID of the hypercall to be invoked
@param Arg1 First argument.
@param Arg2 Second argument.
@return Return 0 if success otherwise it return an errno.
**/
INTN
EFIAPI
XenHypercall2 (
IN UINTN HypercallID,
IN OUT INTN Arg1,
IN OUT INTN Arg2
);
/**
Return the value of the HVM parameter Index.
@param Index The parameter to get, e.g. HVM_PARAM_STORE_EVTCHN.
@return The value of the asked parameter or 0 in case of error.
**/
UINT64
EFIAPI
XenHypercallHvmGetParam (
UINT32 Index
);
/**
Hypercall to do different operation on the memory.
@param Operation The operation number, e.g. XENMEM_add_to_physmap.
@param Arguments The arguments associated to the operation.
@return Return the return value from the hypercall, 0 in case of success
otherwise, an error code.
**/
INTN
EFIAPI
XenHypercallMemoryOp (
IN UINTN Operation,
IN OUT VOID *Arguments
);
/**
Do an operation on the event channels.
@param Operation The operation number, e.g. EVTCHNOP_send.
@param Arguments The argument associated to the operation.
@return Return the return value from the hypercall, 0 in case of success
otherwise, an error code.
**/
INTN
EFIAPI
XenHypercallEventChannelOp (
IN INTN Operation,
IN OUT VOID *Arguments
);
#endif
| 926 |
2,816 | <reponame>leoYY/duckdb
//===----------------------------------------------------------------------===//
// DuckDB
//
// duckdb/function/function.hpp
//
//
//===----------------------------------------------------------------------===//
#pragma once
#include "duckdb/common/types/data_chunk.hpp"
#include "duckdb/common/unordered_map.hpp"
#include "duckdb/common/unordered_set.hpp"
#include "duckdb/parser/column_definition.hpp"
namespace duckdb {
class CatalogEntry;
class Catalog;
class ClientContext;
class Expression;
class ExpressionExecutor;
class Transaction;
class AggregateFunction;
class AggregateFunctionSet;
class CopyFunction;
class PragmaFunction;
class ScalarFunctionSet;
class ScalarFunction;
class TableFunctionSet;
class TableFunction;
struct PragmaInfo;
struct FunctionData {
DUCKDB_API virtual ~FunctionData();
DUCKDB_API virtual unique_ptr<FunctionData> Copy();
DUCKDB_API virtual bool Equals(FunctionData &other);
DUCKDB_API static bool Equals(FunctionData *left, FunctionData *right);
};
struct TableFunctionData : public FunctionData {
// used to pass on projections to table functions that support them. NB, can contain COLUMN_IDENTIFIER_ROW_ID
vector<idx_t> column_ids;
};
struct FunctionParameters {
vector<Value> values;
unordered_map<string, Value> named_parameters;
};
//! Function is the base class used for any type of function (scalar, aggregate or simple function)
class Function {
public:
DUCKDB_API explicit Function(string name);
DUCKDB_API virtual ~Function();
//! The name of the function
string name;
public:
//! Returns the formatted string name(arg1, arg2, ...)
DUCKDB_API static string CallToString(const string &name, const vector<LogicalType> &arguments);
//! Returns the formatted string name(arg1, arg2..) -> return_type
DUCKDB_API static string CallToString(const string &name, const vector<LogicalType> &arguments,
const LogicalType &return_type);
//! Returns the formatted string name(arg1, arg2.., np1=a, np2=b, ...)
DUCKDB_API static string CallToString(const string &name, const vector<LogicalType> &arguments,
const unordered_map<string, LogicalType> &named_parameters);
//! Bind a scalar function from the set of functions and input arguments. Returns the index of the chosen function,
//! returns DConstants::INVALID_INDEX and sets error if none could be found
DUCKDB_API static idx_t BindFunction(const string &name, vector<ScalarFunction> &functions,
vector<LogicalType> &arguments, string &error);
DUCKDB_API static idx_t BindFunction(const string &name, vector<ScalarFunction> &functions,
vector<unique_ptr<Expression>> &arguments, string &error);
//! Bind an aggregate function from the set of functions and input arguments. Returns the index of the chosen
//! function, returns DConstants::INVALID_INDEX and sets error if none could be found
DUCKDB_API static idx_t BindFunction(const string &name, vector<AggregateFunction> &functions,
vector<LogicalType> &arguments, string &error);
DUCKDB_API static idx_t BindFunction(const string &name, vector<AggregateFunction> &functions,
vector<unique_ptr<Expression>> &arguments, string &error);
//! Bind a table function from the set of functions and input arguments. Returns the index of the chosen
//! function, returns DConstants::INVALID_INDEX and sets error if none could be found
DUCKDB_API static idx_t BindFunction(const string &name, vector<TableFunction> &functions,
vector<LogicalType> &arguments, string &error);
DUCKDB_API static idx_t BindFunction(const string &name, vector<TableFunction> &functions,
vector<unique_ptr<Expression>> &arguments, string &error);
//! Bind a pragma function from the set of functions and input arguments
DUCKDB_API static idx_t BindFunction(const string &name, vector<PragmaFunction> &functions, PragmaInfo &info,
string &error);
};
class SimpleFunction : public Function {
public:
DUCKDB_API SimpleFunction(string name, vector<LogicalType> arguments,
LogicalType varargs = LogicalType(LogicalTypeId::INVALID));
DUCKDB_API ~SimpleFunction() override;
//! The set of arguments of the function
vector<LogicalType> arguments;
//! The type of varargs to support, or LogicalTypeId::INVALID if the function does not accept variable length
//! arguments
LogicalType varargs;
public:
DUCKDB_API virtual string ToString();
DUCKDB_API bool HasVarArgs() const;
};
class SimpleNamedParameterFunction : public SimpleFunction {
public:
DUCKDB_API SimpleNamedParameterFunction(string name, vector<LogicalType> arguments,
LogicalType varargs = LogicalType(LogicalTypeId::INVALID));
DUCKDB_API ~SimpleNamedParameterFunction() override;
//! The named parameters of the function
unordered_map<string, LogicalType> named_parameters;
public:
DUCKDB_API string ToString() override;
DUCKDB_API bool HasNamedParameters();
DUCKDB_API void EvaluateInputParameters(vector<LogicalType> &arguments, vector<Value> ¶meters,
unordered_map<string, Value> &named_parameters,
vector<unique_ptr<ParsedExpression>> &children);
};
class BaseScalarFunction : public SimpleFunction {
public:
DUCKDB_API BaseScalarFunction(string name, vector<LogicalType> arguments, LogicalType return_type,
bool has_side_effects, LogicalType varargs = LogicalType(LogicalTypeId::INVALID));
DUCKDB_API ~BaseScalarFunction() override;
//! Return type of the function
LogicalType return_type;
//! Whether or not the function has side effects (e.g. sequence increments, random() functions, NOW()). Functions
//! with side-effects cannot be constant-folded.
bool has_side_effects;
public:
DUCKDB_API hash_t Hash() const;
//! Cast a set of expressions to the arguments of this function
DUCKDB_API void CastToFunctionArguments(vector<unique_ptr<Expression>> &children);
DUCKDB_API string ToString() override;
};
class BuiltinFunctions {
public:
BuiltinFunctions(ClientContext &transaction, Catalog &catalog);
//! Initialize a catalog with all built-in functions
void Initialize();
public:
void AddFunction(AggregateFunctionSet set);
void AddFunction(AggregateFunction function);
void AddFunction(ScalarFunctionSet set);
void AddFunction(PragmaFunction function);
void AddFunction(const string &name, vector<PragmaFunction> functions);
void AddFunction(ScalarFunction function);
void AddFunction(const vector<string> &names, ScalarFunction function);
void AddFunction(TableFunctionSet set);
void AddFunction(TableFunction function);
void AddFunction(CopyFunction function);
void AddCollation(string name, ScalarFunction function, bool combinable = false,
bool not_required_for_equality = false);
private:
ClientContext &context;
Catalog &catalog;
private:
template <class T>
void Register() {
T::RegisterFunction(*this);
}
// table-producing functions
void RegisterSQLiteFunctions();
void RegisterReadFunctions();
void RegisterTableFunctions();
void RegisterArrowFunctions();
// aggregates
void RegisterAlgebraicAggregates();
void RegisterDistributiveAggregates();
void RegisterNestedAggregates();
void RegisterHolisticAggregates();
void RegisterRegressiveAggregates();
// scalar functions
void RegisterDateFunctions();
void RegisterEnumFunctions();
void RegisterGenericFunctions();
void RegisterMathFunctions();
void RegisterOperators();
void RegisterStringFunctions();
void RegisterNestedFunctions();
void RegisterSequenceFunctions();
void RegisterTrigonometricsFunctions();
// pragmas
void RegisterPragmaFunctions();
};
} // namespace duckdb
| 2,748 |
1,778 | <filename>include/common/util/error_codes.h<gh_stars>1000+
// Copyright 2013 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Note: This is based on Google's util/task/codes.proto, but converted to
// simple enum to avoid pulling in protobufs as a dependency.
#pragma once
#include <string>
// Not an error; returned on success
#define STATUS_OK 0
// The operation was cancelled (typically by the caller).
#define STATUS_CANCELLED -101
// Unknown error.
#define STATUS_UNKNOWN -102
// Client specified an invalid argument. Note that this differs
// from FAILED_PRECONDITION. INVALID_ARGUMENT indicates arguments
// that are problematic regardless of the state of the system
// (e.g., a malformed file name).
#define STATUS_INVALID_ARGUMENT -103
// Deadline expired before operation could complete.
#define STATUS_DEADLINE_EXCEEDED -104
// Some requested entity (e.g., file or directory) was not found.
#define STATUS_NOT_FOUND -105
// Some entity that we attempted to create (e.g., file or directory)
// already exists.
#define STATUS_ALREADY_EXISTS -106
// The caller does not have permission to execute the specified
// operation.
#define STATUS_PERMISSION_DENIED -107
// Some resource has been exhausted, perhaps a per-user quota, or
// perhaps the entire file system is out of space.
#define STATUS_RESOURCE_EXHAUSTED -108
// Operation was rejected because the system is not in a state
// required for the operation's execution. For example, directory
// to be deleted may be non-empty, an rmdir operation is applied to
// a non-directory, etc.
//
// A litmus test that may help a service implementor in deciding
// between FAILED_PRECONDITION, ABORTED, and UNAVAILABLE:
// (a) Use UNAVAILABLE if the client can retry just the failing call.
// (b) Use ABORTED if the client should retry at a higher-level
// (e.g., restarting a read-modify-write sequence).
// (c) Use FAILED_PRECONDITION if the client should not retry until
// the system state has been explicitly fixed. E.g., if an "rmdir"
// fails because the directory is non-empty, FAILED_PRECONDITION
// should be returned since the client should not retry unless
// they have first fixed up the directory by deleting files from it.
#define STATUS_FAILED_PRECONDITION -109
// The operation was aborted, typically due to a concurrency issue
// like sequencer check failures, transaction aborts, etc.
//
// See litmus test above for deciding between FAILED_PRECONDITION,
// ABORTED, and UNAVAILABLE.
#define STATUS_ABORTED -110
// Operation was attempted past the valid range. E.g., seeking or
// reading past end of file.
//
// Unlike INVALID_ARGUMENT, this error indicates a problem that may
// be fixed if the system state changes. For example, a 32-bit file
// system will generate INVALID_ARGUMENT if asked to read at an
// offset that is not in the range [0,2^32-1], but it will generate
// OUT_OF_RANGE if asked to read from an offset past the current
// file size.
#define STATUS_OUT_OF_RANGE -111
// Operation is not implemented or not supported/enabled in this service.
#define STATUS_UNIMPLEMENTED -112
// Internal errors. Means some invariants expected by underlying
// system has been broken. If you see one of these errors,
// something is very broken.
#define STATUS_INTERNAL -113
// The service is currently unavailable. This is a most likely a
// transient condition and may be corrected by retrying with
// a backoff.
//
// See litmus test above for deciding between FAILED_PRECONDITION,
// ABORTED, and UNAVAILABLE.
#define STATUS_UNAVAILABLE -114
// Unrecoverable data loss or corruption.
#define STATUS_DATA_LOSS -115
std::string StatusToString(int error_code);
| 1,240 |
1,893 | package com.liuguangqiang.swiplebacksample;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
/**
* Created by Eric on 15/3/1.
*/
public class TestFragment extends Fragment {
private static final String EXTRA_TITLE = "EXTRA_TITLE";
public static TestFragment newInstance(String title) {
TestFragment testFragment = new TestFragment();
Bundle bundle = new Bundle();
bundle.putString(EXTRA_TITLE, title);
testFragment.setArguments(bundle);
return testFragment;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_test, container, false);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
initViews();
}
private void initViews() {
TextView tvTitle = (TextView) getView().findViewById(R.id.tv_test);
Bundle bundle = getArguments();
if (bundle.containsKey(EXTRA_TITLE)) {
tvTitle.setText(bundle.getString(EXTRA_TITLE));
}
}
}
| 488 |
356 | #include "renderer.h"
#include <components/camera.h>
#include <components/model.h>
#include <components/light.h>
#include <components/ambient.h>
#include <components/node.h>
#include <components/name.h>
#include <systems/window.h>
#include <systems/render_device.h>
#include <systems/editmode.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <candle.h>
#include <utils/noise.h>
#include <utils/nk.h>
#include <utils/material.h>
#include <GLFW/glfw3.h>
static int renderer_update_screen_texture(renderer_t *self);
static void bind_pass(pass_t *pass, shader_t *shader);
#define FIRST_TEX 8
static void pass_unbind_textures(pass_t *pass)
{
uint32_t i;
for (i = 0; i < pass->bound_textures; i++)
{
glActiveTexture(GL_TEXTURE0 + FIRST_TEX + i);
glBindTexture(GL_TEXTURE_2D, 0);
}
}
static int pass_bind_buffer(pass_t *pass, bind_t *bind, shader_t *shader)
{
int t;
hash_bind_t *sb = &bind->vs_uniforms;
texture_t *buffer;
if(bind->getter)
{
bind->buffer = ((tex_getter)bind->getter)(pass, pass->usrptr);
}
buffer = bind->buffer;
for(t = 0; t < buffer->bufs_size; t++)
{
if(((int)sb->u.buffer.u_tex[t]) != -1)
{
if(buffer->bufs[t].ready)
{
int i = pass->bound_textures++;
glActiveTexture(GL_TEXTURE0 + FIRST_TEX + i);
texture_bind(buffer, t);
glUniform1i(shader_cached_uniform(shader, sb->u.buffer.u_tex[t]),
FIRST_TEX + i);
glerr();
}
}
}
return 1;
}
void bind_get_uniforms(bind_t *bind, hash_bind_t *sb, pass_t *pass)
{
int t;
switch(bind->type)
{
case OPT_NONE:
printf("Empty bind??\n");
break;
case OPT_TEX:
if(bind->getter)
{
bind->buffer = ((tex_getter)bind->getter)(pass, pass->usrptr);
}
for(t = 0; t < bind->buffer->bufs_size; t++)
{
char buffer[256];
sprintf(buffer, "%s.%s", bind->name, bind->buffer->bufs[t].name);
sb->u.buffer.u_tex[t] = ref(buffer);
}
break;
default:
sb->u.number.u = ref(bind->name);
break;
}
sb->cached = true;
}
#ifdef __EMSCRIPTEN__
#include <emscripten.h>
void glGetBufferSubData(GLenum target, GLintptr offset, GLsizeiptr size, GLvoid *data)
{
EM_ASM_(
{
Module.ctx.getBufferSubData(Module.ctx.PIXEL_PACK_BUFFER, 0, HEAPU8.subarray($0, $0 + $1));
}, data, size);
}
#endif
static void bind_pass(pass_t *pass, shader_t *shader)
{
/* this function allows null shader for CALLBACK only */
int i;
if (!pass) return;
/* if(self->shader->frame_bind == self->frame) return; */
/* self->shader->frame_bind = self->frame; */
if (shader)
{
if (!shader->ready)
return;
if (pass->output->target != GL_TEXTURE_2D)
{
glUniform2f(shader_cached_uniform(shader, ref("screen_size")),
pass->output->width, pass->output->height);
}
else
{
uvec2_t size = pass->output->sizes[pass->framebuffer_id];
glUniform2f(shader_cached_uniform(shader, ref("screen_size")),
size.x, size.y);
}
}
glerr();
pass->bound_textures = 0;
for(i = 0; i < pass->binds_size; i++)
{
bind_t *bind = &pass->binds[i];
hash_bind_t *sb = &bind->vs_uniforms;
if (!sb->cached && shader)
{
bind_get_uniforms(bind, sb, pass);
}
switch(bind->type)
{
case OPT_NONE: printf("error\n"); break;
case OPT_TEX:
if(!pass_bind_buffer(pass, bind, shader)) return;
break;
case OPT_NUM:
if(bind->getter)
{
bind->number = ((number_getter)bind->getter)(pass, pass->usrptr);
}
glUniform1f(shader_cached_uniform(shader, sb->u.number.u), (GLfloat)bind->number); glerr();
glerr();
break;
case OPT_INT:
if(bind->getter)
{
bind->integer = ((integer_getter)bind->getter)(pass, pass->usrptr);
}
glUniform1i(shader_cached_uniform(shader, sb->u.integer.u), (GLint)bind->integer); glerr();
glerr();
break;
case OPT_UINT:
if(bind->getter)
{
bind->uinteger = ((integer_getter)bind->getter)(pass, pass->usrptr);
}
glUniform1ui(shader_cached_uniform(shader, sb->u.uinteger.u), (GLuint)bind->uinteger); glerr();
glerr();
break;
case OPT_UVEC2:
if(bind->getter)
{
bind->uvec2 = ((uvec2_getter)bind->getter)(pass, pass->usrptr);
}
glUniform2ui(shader_cached_uniform(shader, sb->u.uvec2.u), (GLuint)bind->uvec2.x, (GLuint)bind->uvec2.y);
glerr();
break;
case OPT_IVEC2:
if(bind->getter)
{
bind->ivec2 = ((ivec2_getter)bind->getter)(pass, pass->usrptr);
}
glUniform2i(shader_cached_uniform(shader, sb->u.ivec2.u), (GLint)bind->ivec2.x, (GLint)bind->ivec2.y);
glerr();
break;
case OPT_VEC2:
if(bind->getter)
{
bind->vec2 = ((vec2_getter)bind->getter)(pass, pass->usrptr);
}
glUniform2f(shader_cached_uniform(shader, sb->u.vec2.u), (GLfloat)bind->vec2.x, (GLfloat)bind->vec2.y);
glerr();
break;
case OPT_VEC3:
if(bind->getter)
{
bind->vec3 = ((vec3_getter)bind->getter)(pass, pass->usrptr);
}
glUniform3f(shader_cached_uniform(shader, sb->u.vec3.u), _vec3(bind->vec3));
glerr();
break;
case OPT_VEC4:
if(bind->getter)
{
bind->vec4 = ((vec4_getter)bind->getter)(pass, pass->usrptr);
}
glUniform4f(shader_cached_uniform(shader, sb->u.vec4.u), _vec4(bind->vec4));
glerr();
break;
case OPT_CALLBACK:
bind->getter(pass, pass->usrptr);
glerr();
break;
default:
printf("error\n");
}
}
/* ct_t *ambients = ecm_get(ct_ambient); */
/* c_ambient_t *ambient = (c_ambient_t*)ct_get_at(ambients, 0, 0); */
/* if(ambient) */
/* { */
/* c_probe_t *probe = c_probe(ambient); */
/* if(probe) shader_bind_ambient(shader, probe->map); */
/* } */
}
void renderer_set_output(renderer_t *self, texture_t *tex)
{
self->output = tex;
self->ready = 0;
}
texture_t *renderer_tex(renderer_t *self, unsigned int hash)
{
int i;
if(!hash) return NULL;
for(i = 0; i < self->outputs_num; i++)
{
pass_output_t *output = &self->outputs[i];
if(output->hash == hash) return output->buffer;
}
return NULL;
}
void renderer_add_tex(renderer_t *self, const char *name,
float resolution, texture_t *buffer)
{
pass_output_t *output = &self->outputs[self->outputs_num++];
output->resolution = resolution;
output->hash = ref(name);
output->buffer = buffer;
strncpy(buffer->name, name, sizeof(buffer->name) - 1);
}
static void update_ubo(renderer_t *self, int32_t camid)
{
if(!self->ubo_changed[camid]) return;
self->ubo_changed[camid] = false;
if(!self->ubos[camid])
{
glGenBuffers(1, &self->ubos[camid]); glerr();
glBindBuffer(GL_UNIFORM_BUFFER, self->ubos[camid]); glerr();
glBufferData(GL_UNIFORM_BUFFER, sizeof(self->glvars[camid]),
&self->glvars[camid], GL_DYNAMIC_DRAW); glerr();
}
glBindBuffer(GL_UNIFORM_BUFFER, self->ubos[camid]);
/* void *p = glMapBuffer(GL_UNIFORM_BUFFER, GL_WRITE_ONLY); */
/* memcpy(p, &self->glvars[camid], sizeof(self->glvars[camid])); */
glBufferSubData(GL_UNIFORM_BUFFER, 0, sizeof(self->glvars[camid]),
&self->glvars[camid]); glerr();
/* glUnmapBuffer(GL_UNIFORM_BUFFER); glerr(); */
}
void renderer_set_model(renderer_t *self, uint32_t camid, mat4_t *model)
{
self->render_device_frame = 0;
if(camid == ~0)
{
int32_t i;
for(i = 0; i < self->camera_count; i++)
{
struct gl_camera *var = &self->glvars[i];
var->model = mat4_mul(*model, self->relative_transform[i]);
var->inv_model = mat4_invert(var->model);
var->pos = vec4_xyz(mat4_mul_vec4(var->model, vec4(0.0f, 0.0f, 0.0f, 1.0f)));
if (self->stored_camera_frame[i] != self->frame)
{
self->glvars[i].previous_view = self->glvars[i].inv_model;
self->stored_camera_frame[i] = self->frame;
}
self->ubo_changed[i] = true;
}
}
else
{
struct gl_camera *var = &self->glvars[camid];
var->model = mat4_mul(*model, self->relative_transform[camid]);
var->pos = vec4_xyz(mat4_mul_vec4(var->model, vec4(0.0f, 0.0f, 0.0f, 1.0f)));
var->inv_model = mat4_invert(*model);
if (self->stored_camera_frame[camid] != self->frame)
{
var->previous_view = var->inv_model;
self->stored_camera_frame[camid] = self->frame;
}
self->ubo_changed[camid] = true;
}
}
void renderer_add_kawase(renderer_t *self, texture_t *t1, texture_t *t2,
int from_mip, int to_mip)
{
renderer_add_pass(self, "kawase_p",
to_mip == from_mip ? "copy" : "candle:downsample",
ref("quad"), 0, t2, NULL, to_mip, ~0, 2,
opt_tex("buf", t1, NULL),
opt_int("level", from_mip, NULL)
);
renderer_add_pass(self, "kawase_0", "candle:kawase", ref("quad"), 0,
t1, NULL, to_mip, ~0, 3,
opt_tex( "buf", t2, NULL),
opt_int( "distance", 0, NULL),
opt_int( "level", to_mip, NULL)
);
renderer_add_pass(self, "kawase_1", "candle:kawase", ref("quad"), 0,
t2, NULL, to_mip, ~0, 3,
opt_tex("buf", t1, NULL),
opt_int("distance", 1, NULL),
opt_int("level", to_mip, NULL)
);
renderer_add_pass(self, "kawase_2", "candle:kawase", ref("quad"), 0,
t1, NULL, to_mip, ~0, 3,
opt_tex("buf", t2, NULL),
opt_int("distance", 2, NULL),
opt_int("level", to_mip, NULL)
);
/* renderer_add_pass(self, "kawase_3", "candle:kawase", ref("quad"), 0, */
/* t2, NULL, to_mip, ~0, */
/* (bind_t[]){ */
/* {TEX, "buf", .buffer = t1}, */
/* {INT, "distance", .integer = 2}, */
/* {INT, "level", .integer = to_mip}, */
/* {NONE} */
/* } */
/* ); */
/* renderer_add_pass(self, "kawase_4", "candle:kawase", ref("quad"), 0, */
/* t1, NULL, to_mip, ~0, */
/* (bind_t[]){ */
/* {TEX, "buf", .buffer = t2}, */
/* {INT, "distance", .integer = 3}, */
/* {INT, "level", .integer = to_mip}, */
/* {NONE} */
/* } */
/* ); */
}
void *pass_process_query_mips(pass_t *self)
{
uint32_t size;
texture_t *tex = self->output;
bool_t second_stage = true;
renderer_t *renderer = self->renderer;
struct{
uint16_t _[2];
} *mips[4];
if (!tex->framebuffer_ready) return NULL;
size = tex->width * tex->height * 4;
if (!tex->bufs[1].pbo)
{
glGenBuffers(1, &tex->bufs[1].pbo);
glGenBuffers(1, &tex->bufs[2].pbo);
glGenBuffers(1, &tex->bufs[3].pbo);
glGenBuffers(1, &tex->bufs[4].pbo);
glBindBuffer(GL_PIXEL_PACK_BUFFER, tex->bufs[1].pbo); glerr();
glBufferData(GL_PIXEL_PACK_BUFFER, size, 0, GL_STREAM_READ);
glBindBuffer(GL_PIXEL_PACK_BUFFER, tex->bufs[2].pbo); glerr();
glBufferData(GL_PIXEL_PACK_BUFFER, size, 0, GL_STREAM_READ);
glBindBuffer(GL_PIXEL_PACK_BUFFER, tex->bufs[3].pbo); glerr();
glBufferData(GL_PIXEL_PACK_BUFFER, size, 0, GL_STREAM_READ);
glBindBuffer(GL_PIXEL_PACK_BUFFER, tex->bufs[4].pbo); glerr();
glBufferData(GL_PIXEL_PACK_BUFFER, size, 0, GL_STREAM_READ);
glerr();
second_stage = false;
}
if (renderer->mips_buffer_size < size * 4)
{
if (renderer->mips)
{
free(renderer->mips);
}
renderer->mips = malloc(size * 4);
renderer->mips_buffer_size = size * 4;
}
mips[0] = (void*)&renderer->mips[size * 0];
mips[1] = (void*)&renderer->mips[size * 1];
mips[2] = (void*)&renderer->mips[size * 2];
mips[3] = (void*)&renderer->mips[size * 3];
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0); glerr();
glBindFramebuffer(GL_READ_FRAMEBUFFER, tex->frame_buffer[0]); glerr();
glPixelStorei(GL_UNPACK_ALIGNMENT, 1); glerr();
glReadBuffer(GL_COLOR_ATTACHMENT0); glerr();
glBindBuffer(GL_PIXEL_PACK_BUFFER, tex->bufs[1].pbo); glerr();
if (second_stage)
{
glGetBufferSubData(GL_PIXEL_PACK_BUFFER, 0, size, mips[0]);
}
glReadPixels(0, 0, tex->width, tex->height, tex->bufs[1].format,
GL_UNSIGNED_BYTE, NULL); glerr();
glReadBuffer(GL_COLOR_ATTACHMENT1); glerr();
glBindBuffer(GL_PIXEL_PACK_BUFFER, tex->bufs[2].pbo);
if (second_stage)
{
glGetBufferSubData(GL_PIXEL_PACK_BUFFER, 0, size, mips[1]);
}
glReadPixels(0, 0, tex->width, tex->height, tex->bufs[2].format,
GL_UNSIGNED_BYTE, NULL); glerr();
glReadBuffer(GL_COLOR_ATTACHMENT2); glerr();
glBindBuffer(GL_PIXEL_PACK_BUFFER, tex->bufs[3].pbo);
if (second_stage)
{
glGetBufferSubData(GL_PIXEL_PACK_BUFFER, 0, size, mips[2]);
}
glReadPixels(0, 0, tex->width, tex->height, tex->bufs[3].format,
GL_UNSIGNED_BYTE, NULL); glerr();
glReadBuffer(GL_COLOR_ATTACHMENT3); glerr();
glBindBuffer(GL_PIXEL_PACK_BUFFER, tex->bufs[4].pbo);
if (second_stage)
{
glGetBufferSubData(GL_PIXEL_PACK_BUFFER, 0, size, mips[3]);
}
glReadPixels(0, 0, tex->width, tex->height, tex->bufs[4].format,
GL_UNSIGNED_BYTE, NULL); glerr();
load_tile_frame_inc();
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
if (second_stage)
{
uint32_t max_loads = 64;
int32_t y, x, i, c;
for (y = 0; y < tex->height; y++) {
for (x = 0; x < tex->width; x++) {
i = y * tex->width + x;
for (c = 0; c < 4; c++)
{
if (*((uint32_t*)&mips[c][i]) == 0) continue;
max_loads -= load_tile_by_id(mips[c][i]._[0], max_loads);
if (max_loads == 0) goto end;
max_loads -= load_tile_by_id(mips[c][i]._[1], max_loads);
if (max_loads == 0) goto end;
}
}
}
}
end:
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0); glerr();
return NULL;
}
void *pass_process_brightness(pass_t *self)
{
uint32_t mip, size, count, i;
texture_t *tex = self->output;
if (tex->width == 0 || tex->height == 0 || tex->bufs[0].ready == 0)
return NULL;
if (!tex->framebuffer_ready) return NULL;
texture_bind(tex, 0);
glGenerateMipmap(tex->target); glerr();
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0); glerr();
mip = MAX_MIPS - 1;
size = tex->sizes[mip].x * tex->sizes[mip].y * 4 * sizeof(uint8_t);
if (!tex->bufs[0].pbo)
{
glGenBuffers(1, &tex->bufs[0].pbo);
glBindBuffer(GL_PIXEL_PACK_BUFFER, tex->bufs[0].pbo); glerr();
glBufferData(GL_PIXEL_PACK_BUFFER, size, 0, GL_STREAM_READ);
glerr();
}
else
{
float brightness;
uint8_t *data;
data = malloc(size);
glBindBuffer(GL_PIXEL_PACK_BUFFER, tex->bufs[0].pbo); glerr();
glGetBufferSubData(GL_PIXEL_PACK_BUFFER, 0, size, data);
brightness = 0.0f;
count = 0u;
for (i = 0; i < size / 4; i += 4)
{
brightness += data[i + 0] + data[i + 1] + data[i + 2];
count += 3;
}
brightness /= (float)count * 255.0f;
tex->brightness = brightness;
free(data);
}
/* glPixelStorei(GL_UNPACK_ALIGNMENT, 1); glerr(); */
glBindFramebuffer(GL_READ_FRAMEBUFFER, tex->frame_buffer[mip]); glerr();
glReadBuffer(GL_COLOR_ATTACHMENT0); glerr();
glReadPixels(0, 0, tex->sizes[mip].x, tex->sizes[mip].y, tex->bufs[0].format,
GL_UNSIGNED_BYTE, NULL); glerr();
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0); glerr();
return NULL;
}
void renderer_default_pipeline(renderer_t *self,
float ssao_power,
float ssr_power,
float ssssss_power,
float bloom_power,
bool_t sscaustics,
bool_t decals,
bool_t volumetric_light,
bool_t transparency,
bool_t auto_exposure)
{
texture_t *query_mips = texture_new_2D(0, 0, 0, 5,
buffer_new("depth", true, -1),
buffer_new("tiles0", false, 4),
buffer_new("tiles1", false, 4),
buffer_new("tiles2", false, 4),
buffer_new("tiles3", false, 4));
texture_t *gbuffer = texture_new_2D(0, 0, 0, 5,
buffer_new("depth", true, -1),
buffer_new("albedo", true, 4),
buffer_new("nn", true, 2),
buffer_new("mrs", false, 3),
buffer_new("emissive", false, 3));
texture_t *transp_gbuffer = texture_new_2D(0, 0, 0, 5,
buffer_new("depth", true, -1),
buffer_new("albedo", true, 4),
buffer_new("nn", true, 2),
buffer_new("mrs", false, 3),
buffer_new("emissive", false, 3));
texture_t *ssao = texture_new_2D(0, 0, 0, 1,
buffer_new("occlusion", true, 1)
);
texture_t *volum = volumetric_light ? texture_new_2D(0, 0, TEX_INTERPOLATE, 1,
buffer_new("color", false, 4)
) : NULL;
texture_t *light = texture_new_2D(0, 0, TEX_INTERPOLATE, 1,
buffer_new("color", true, 4)
);
texture_t *refr = texture_new_2D(0, 0, TEX_MIPMAP | TEX_INTERPOLATE, 1,
buffer_new("color", true, 4)
);
texture_t *tmp = texture_new_2D(0, 0, TEX_MIPMAP | TEX_INTERPOLATE, 1,
buffer_new("color", true, 4)
);
texture_t *final = texture_new_2D(0, 0, TEX_INTERPOLATE
| (auto_exposure ? TEX_MIPMAP : 0), 1,
buffer_new("color", true, 4)
);
texture_t *bloom = bloom_power > 0.0f ? texture_new_2D(0, 0, TEX_INTERPOLATE | TEX_MIPMAP, 1,
buffer_new("color", false, 4)) : NULL;
renderer_add_tex(self, "query_mips", 0.1f, query_mips);
renderer_add_tex(self, "gbuffer", 1.0f, gbuffer);
renderer_add_tex(self, "transp_gbuffer", 1.0f, transp_gbuffer);
renderer_add_tex(self, "ssao", 1.0f / 2.0f, ssao);
renderer_add_tex(self, "light", 1.0f, light);
if (volumetric_light) renderer_add_tex(self, "volum", 1.0f / 2.0f, volum);
renderer_add_tex(self, "refr", 1.0f, refr);
renderer_add_tex(self, "tmp", 1.0f, tmp);
renderer_add_tex(self, "final", 1.0f, final);
if (bloom_power > 0.0f) renderer_add_tex(self, "bloom", 1.0f, bloom);
renderer_add_pass(self, "query_visible", "candle:query_mips", ref("visible"), 0,
query_mips, query_mips, 0, ~0, 3,
opt_clear_depth(1.0f, NULL),
opt_clear_color(Z4, NULL),
opt_skip(16)
);
renderer_add_pass(self, "query_decals", "candle:query_mips", ref("decals"), 0,
query_mips, NULL, 0, ~0, 1,
opt_skip(16)
);
renderer_add_pass(self, "query_transp", "candle:query_mips", ref("transparent"), 0,
query_mips, query_mips, 0, ~0, 1,
opt_skip(16)
);
renderer_add_pass(self, "svt", NULL, -1, 0,
query_mips, query_mips, 0, ~0, 2,
opt_callback((getter_cb)pass_process_query_mips),
opt_skip(16)
);
renderer_add_pass(self, "gbuffer", "candle:gbuffer", ref("visible"), 0, gbuffer,
gbuffer, 0, ~0, 2,
opt_clear_depth(1.0f, NULL),
opt_clear_color(Z4, NULL)
);
renderer_add_pass(self, "framebuffer_pass", "candle:framebuffer_draw",
ref("framebuffer"), 0, gbuffer, gbuffer, 0, ~0, 0
);
/* DECAL PASS */
if (decals)
{
renderer_add_pass(self, "decals_pass", "candle:gbuffer", ref("decals"), BLEND,
gbuffer, NULL, 0, ~0, 1,
opt_tex("gbuffer", gbuffer, NULL)
);
}
renderer_add_pass(self, "ambient_light_pass", "candle:pbr", ref("ambient"),
ADD, light, NULL, 0, ~0, 3,
opt_clear_color(Z4, NULL),
opt_int("opaque_pass", true, NULL),
opt_tex("gbuffer", gbuffer, NULL)
);
renderer_add_pass(self, "render_pass", "candle:pbr", ref("light"),
ADD, light, NULL, 0, ~0, 2,
opt_int("opaque_pass", true, NULL),
opt_tex("gbuffer", gbuffer, NULL)
);
if (ssssss_power > 0.f)
{
renderer_add_pass(self, "sss_pass0", "candle:sss", ref("quad"),
0, tmp, NULL, 0, ~0, 5,
opt_vec2("pass_dir", vec2(1.0f, 0.0f), NULL),
opt_tex("buf", light, NULL),
opt_num("power", ssssss_power, NULL),
opt_tex("gbuffer", gbuffer, NULL),
opt_clear_color(Z4, NULL)
);
renderer_add_pass(self, "sss_pass1", "candle:sss", ref("quad"),
0, light, NULL, 0, ~0, 4,
opt_vec2("pass_dir", vec2(0.0f, 1.0f), NULL),
opt_num("power", ssssss_power, NULL),
opt_tex("buf", tmp, NULL),
opt_tex("gbuffer", gbuffer, NULL)
);
}
if (volumetric_light)
{
renderer_add_pass(self, "volum_pass", "candle:volum", ref("light"),
ADD | CULL_DISABLE, volum, NULL, 0, ~0, 2,
opt_tex("gbuffer", gbuffer, NULL),
opt_clear_color(Z4, NULL)
);
}
/* renderer_add_pass(self, "sea", "sea", ref("quad"), 0, light, */
/* NULL, 0, ~0, */
/* (bind_t[]){ */
/* {TEX, "gbuffer", .buffer = gbuffer}, */
/* {NONE} */
/* } */
/* ); */
if (transparency)
{
renderer_add_pass(self, "refraction", "candle:copy", ref("quad"), 0,
refr, NULL, 0, ~0, 4,
opt_tex("buf", light, NULL),
opt_clear_color(Z4, NULL),
opt_ivec2("pos", ivec2(0, 0), NULL),
opt_int("level", 0, NULL)
);
renderer_add_kawase(self, refr, tmp, 0, 1);
renderer_add_kawase(self, refr, tmp, 1, 2);
renderer_add_kawase(self, refr, tmp, 2, 3);
renderer_add_pass(self, "transp_2", "candle:gbuffer", ref("transparent"),
0, transp_gbuffer, transp_gbuffer, 0, ~0, 4,
opt_tex("refr", refr, NULL),
opt_tex("gbuffer", gbuffer, NULL),
opt_clear_depth(1.0f, NULL),
opt_clear_color(Z4, NULL)
);
renderer_add_pass(self, "render_pass_2", "candle:pbr", ref("light"),
ADD, light, NULL, 0, ~0, 3,
opt_int("opaque_pass", false, NULL),
opt_clear_color(Z4, NULL),
opt_tex("gbuffer", transp_gbuffer, NULL)
);
renderer_add_pass(self, "copy_gbuffer", "candle:copy_gbuffer", ref("quad"),
0, gbuffer, gbuffer, 0, ~0, 1,
opt_tex("gbuffer", transp_gbuffer, NULL)
);
}
if (ssao_power > 0.0f)
{
renderer_add_pass(self, "ssao_pass", "candle:ssao", ref("quad"), 0,
ssao, NULL, 0, ~0, 2,
opt_tex( "gbuffer", gbuffer, NULL),
opt_clear_color(Z4, NULL)
);
}
renderer_add_pass(self, "final", "candle:final", ref("quad"), 0, final,
NULL, 0, ~0, 7,
opt_tex("gbuffer", gbuffer, NULL),
opt_tex("light", light, NULL),
opt_tex("refr", refr, NULL),
opt_num("ssr_power", ssr_power, NULL),
opt_tex("ssao", ssao, NULL),
opt_num("ssao_power", ssao_power, NULL),
opt_tex("volum", volum, NULL)
);
if (bloom_power > 0.0f)
{
renderer_add_pass(self, "bloom_0", "candle:bright", ref("quad"), 0,
bloom, NULL, 0, ~0, 1,
opt_tex("buf", final, NULL)
);
renderer_add_kawase(self, bloom, tmp, 0, 1);
renderer_add_kawase(self, bloom, tmp, 1, 2);
renderer_add_kawase(self, bloom, tmp, 2, 3);
renderer_add_pass(self, "bloom_1", "candle:upsample", ref("quad"), ADD,
final, NULL, 0, ~0, 3,
opt_tex("buf", bloom, NULL),
opt_int("level", 3, NULL),
opt_num("alpha", bloom_power, NULL)
);
}
if (auto_exposure)
{
renderer_add_pass(self, "luminance_calc", NULL, -1, 0,
final, NULL, 0, ~0, 2,
opt_callback((getter_cb)pass_process_brightness),
opt_skip(16)
);
}
/* renderer_tex(self, ref(light))->mipmaped = 1; */
/* self->output = ssao; */
self->output = final;
}
void renderer_update_projection(renderer_t *self)
{
uint32_t f;
self->glvars[0].projection = mat4_perspective(
self->proj_fov,
((float)self->width) / self->height,
self->proj_near, self->proj_far
);
self->glvars[0].inv_projection = mat4_invert(self->glvars[0].projection);
self->ubo_changed[0] = true;
for(f = 1; f < 6; f++)
{
self->glvars[f].projection = self->glvars[0].projection;
self->glvars[f].inv_projection = self->glvars[0].inv_projection;
self->glvars[f].pos = self->glvars[0].pos;
self->ubo_changed[f] = true;
}
}
vec3_t renderer_real_pos(renderer_t *self, float depth, vec2_t coord)
{
float z;
vec4_t clip_space, view_space, world_space;
if(depth < 0.01f) depth *= 100.0f;
z = depth * 2.0 - 1.0;
coord = vec2_sub_number(vec2_scale(coord, 2.0f), 1.0);
clip_space = vec4(_vec2(coord), z, 1.0);
view_space = mat4_mul_vec4(self->glvars[0].inv_projection, clip_space);
/* Perspective division */
view_space = vec4_div_number(view_space, view_space.w);
world_space = mat4_mul_vec4(self->glvars[0].model, view_space);
return vec4_xyz(world_space);
}
vec3_t renderer_screen_pos(renderer_t *self, vec3_t pos)
{
mat4_t VP = mat4_mul(self->glvars[0].projection,
self->glvars[0].inv_model);
vec4_t viewSpacePosition = mat4_mul_vec4(VP, vec4(_vec3(pos), 1.0f));
viewSpacePosition = vec4_div_number(viewSpacePosition, viewSpacePosition.w);
return vec3_scale(vec3_add_number(vec4_xyz(viewSpacePosition), 1.0f), 0.5);
}
static int renderer_update_screen_texture(renderer_t *self)
{
int i;
int w = self->width * self->resolution;
int h = self->height * self->resolution;
if(self->output)
{
renderer_update_projection(self);
}
for(i = 0; i < self->outputs_num; i++)
{
pass_output_t *output = &self->outputs[i];
if(output->resolution && output->buffer->target == GL_TEXTURE_2D)
{
int W = w * output->resolution;
int H = h * output->resolution;
texture_2D_resize(output->buffer, W, H);
}
}
self->ready = 1;
return 1;
}
int renderer_resize(renderer_t *self, int width, int height)
{
self->width = width;
self->height = height;
self->ready = 0;
return CONTINUE;
}
static texture_t *renderer_draw_pass(renderer_t *self, pass_t *pass,
bool_t *profile)
{
uint32_t f;
c_render_device_t *rd;
if(!pass->active) return NULL;
if (self->frame % pass->draw_every != 0) return NULL;
if (pass->shader_name[0] && !pass->shader)
{
pass->shader = fs_new(pass->shader_name);
if (!pass->shader) return NULL;
}
pass->bound_textures = 0;
rd = c_render_device(&SYS);
if (!rd) return NULL;
self->render_device_frame = rd->update_frame;
c_render_device_rebind(rd, (rd_bind_cb)bind_pass, pass);
if (pass->binds && pass->binds[0].type == OPT_CALLBACK)
{
bind_pass(pass, NULL);
return NULL;
}
if(pass->shader)
{
fs_bind(pass->shader);
}
if (profile)
{
*profile = glfwGetTime() * 1000;
glFinish(); glerr();
}
if(pass->additive)
{
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE); glerr();
}
if(pass->additive_no_alpha)
{
glEnable(GL_BLEND);
/* glBlendFunc(GL_SRC_COLOR, GL_ONE); glerr(); */
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE,
GL_ONE,
GL_ZERO);
}
if(pass->multiply)
{
glEnable(GL_BLEND);
glBlendFunc(GL_DST_COLOR, GL_ZERO); glerr();
}
if(pass->blend)
{
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); glerr();
}
if(pass->cull)
{
glEnable(GL_CULL_FACE); glerr();
}
else
{
glDisable(GL_CULL_FACE); glerr();
}
if(pass->depth)
{
glEnable(GL_DEPTH_TEST);
glDepthFunc(pass->depth_func); glerr();
}
else
{
glDisable(GL_DEPTH_TEST); glerr();
}
rd->cull_invert = pass->cull_invert;
glDepthMask(pass->depth_update); glerr();
if(pass->clear)
{
glClearColor(_vec4(pass->clear_color));
glClearDepth(pass->clear_depth);
}
for (f = 0; f < self->camera_count; f++)
{
uvec2_t pos;
uvec2_t size;
if (pass->camid != ~0 && f != pass->camid) continue;
c_render_device_bind_ubo(rd, 19, self->ubos[f]);
if (!pass->ignore_cam_viewport && self->size[f].x > 0)
{
size = self->size[f];
pos = self->pos[f];
}
else
{
size = uvec2(pass->output->width, pass->output->height);
pos = uvec2(0, 0);
}
pos.x += pass->custom_viewport_pos.x * size.x;
pos.y += pass->custom_viewport_pos.y * size.y;
size.x *= pass->custom_viewport_size.x;
size.y *= pass->custom_viewport_size.y;
texture_target_sub(pass->output, pass->depth, pass->framebuffer_id,
pos.x, pos.y, size.x, size.y);
if (pass->clear)
{
glEnable(GL_SCISSOR_TEST);
glScissor(pos.x, pos.y, size.x, size.y);
glClear(pass->clear);
glDisable(GL_SCISSOR_TEST);
}
pass->rendered_id = draw_group(pass->draw_signal);
}
pass_unbind_textures(pass);
glerr();
glDisable(GL_DEPTH_TEST);
glDisable(GL_BLEND);
if(pass->auto_mip && pass->output->mipmaped)
{
texture_bind(pass->output, 0);
glGenerateMipmap(pass->output->target); glerr();
}
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0); glerr();
rd->cull_invert = false;
if (profile)
{
glFinish(); glerr();
*profile = ((long)(glfwGetTime() * 1000)) - (*profile);
}
return pass->output;
}
void renderer_set_resolution(renderer_t *self, float resolution)
{
self->resolution = resolution;
self->ready = 0;
}
/* int init_perlin(renderer_t *self) */
/* { */
/* int texes = 8; */
/* int m = self->perlin_size * texes; */
/* self->perlin = texture_new_3D(m, m, m, 4); */
/* loader_wait(g_candle->loader); */
/* int x, y, z; */
/* for(z = 0; z < m; z++) */
/* { */
/* for(y = 0; y < m; y++) for(x = 0; x < m; x++) */
/* { */
/* float n = (cnoise(vec3(((float)x) / 13, ((float)y) / 13, ((float)z) */
/* / 13)) + 1) / 2; */
/* n += (cnoise(vec3((float)x / 2, (float)y / 2, (float)z / 2))) / 8; */
/* n = n * 1.75; */
/* float_clamp(&n, 0.0, 1.0); */
/* texture_set_xyz(self->perlin, x, y, z, (int)round(n * 255), 0, 0, */
/* 255); */
/* } */
/* } */
/* texture_update_gl(self->perlin); */
/* printf("perlin end\n"); */
/* return 1; */
/* } */
renderer_t *renderer_new(float resolution)
{
uint32_t f;
renderer_t *self = calloc(1, sizeof(*self));
for(f = 0; f < 6; f++)
{
self->relative_transform[f] =
self->glvars[f].projection =
self->glvars[f].inv_projection =
self->glvars[f].previous_view =
self->glvars[f].model = mat4();
}
self->proj_near = 0.1f;
self->proj_far = 1000.0f;
self->proj_fov = M_PI / 2.0f;
self->resolution = resolution;
return self;
}
extern texture_t *g_cache;
extern texture_t *g_indir;
extern texture_t *g_probe_cache;
extern texture_t *g_histogram_buffer;
extern texture_t *g_histogram_accum;
int renderer_component_menu(renderer_t *self, void *ctx)
{
int i;
char fps[12];
nk_layout_row_dynamic(ctx, 0, 1);
if(nk_button_label(ctx, "Fullscreen"))
{
c_window_toggle_fullscreen(c_window(&SYS));
}
sprintf(fps, "%d", g_candle->fps);
nk_layout_row_begin(ctx, NK_DYNAMIC, 30, 2);
nk_layout_row_push(ctx, 0.35);
nk_label(ctx, "FPS: ", NK_TEXT_LEFT);
nk_layout_row_push(ctx, 0.65);
nk_label(ctx, fps, NK_TEXT_RIGHT);
nk_layout_row_end(ctx);
nk_layout_row_dynamic(ctx, 0, 1);
for(i = 0; i < self->outputs_num; i++)
{
pass_output_t *output = &self->outputs[i];
if(output->buffer)
{
if (nk_button_label(ctx, output->buffer->name))
{
c_editmode_open_texture(c_editmode(&SYS), output->buffer);
}
}
}
if (nk_button_label(ctx, "histograma"))
{
c_editmode_open_texture(c_editmode(&SYS), g_histogram_accum);
}
if (nk_button_label(ctx, "histogram"))
{
c_editmode_open_texture(c_editmode(&SYS), g_histogram_buffer);
}
if (nk_button_label(ctx, "cache"))
{
c_editmode_open_texture(c_editmode(&SYS), g_cache);
}
if (nk_button_label(ctx, "indir"))
{
c_editmode_open_texture(c_editmode(&SYS), g_indir);
}
if (nk_button_label(ctx, "probes"))
{
c_editmode_open_texture(c_editmode(&SYS), g_probe_cache);
}
return CONTINUE;
}
pass_t *renderer_pass(renderer_t *self, unsigned int hash)
{
int i;
if(!hash) return NULL;
for(i = 0; i < self->passes_size; i++)
{
pass_t *pass = &self->passes[i];
if(pass->hash == hash) return pass;
}
return NULL;
}
void renderer_toggle_pass(renderer_t *self, uint32_t hash, int active)
{
pass_t *pass = renderer_pass(self, hash);
if(pass)
{
pass->active = active;
}
}
bind_t opt_none()
{
bind_t bind;
bind.type = OPT_NONE;
return bind;
}
bind_t opt_tex(const char *name, texture_t *tex, getter_cb getter)
{
bind_t bind = {0};
strncpy(bind.name, name, sizeof(bind.name) - 1);
bind.type = OPT_TEX;
bind.buffer = tex;
bind.getter = getter;
return bind;
}
bind_t opt_num(const char *name, float value, getter_cb getter)
{
bind_t bind = {0};
strncpy(bind.name, name, sizeof(bind.name) - 1);
bind.type = OPT_NUM;
bind.number = value;
bind.getter = getter;
return bind;
}
bind_t opt_int(const char *name, int32_t value, getter_cb getter)
{
bind_t bind = {0};
strncpy(bind.name, name, sizeof(bind.name) - 1);
bind.type = OPT_INT;
bind.integer = value;
bind.getter = getter;
return bind;
}
bind_t opt_uint(const char *name, uint32_t value, getter_cb getter)
{
bind_t bind = {0};
strncpy(bind.name, name, sizeof(bind.name) - 1);
bind.type = OPT_UINT;
bind.uinteger = value;
bind.getter = getter;
return bind;
}
bind_t opt_uvec2(const char *name, uvec2_t value, getter_cb getter)
{
bind_t bind = {0};
strncpy(bind.name, name, sizeof(bind.name) - 1);
bind.type = OPT_UVEC2;
bind.uvec2 = value;
bind.getter = getter;
return bind;
}
bind_t opt_ivec2(const char *name, ivec2_t value, getter_cb getter)
{
bind_t bind = {0};
strncpy(bind.name, name, sizeof(bind.name) - 1);
bind.type = OPT_IVEC2;
bind.ivec2 = value;
bind.getter = getter;
return bind;
}
bind_t opt_vec2(const char *name, vec2_t value, getter_cb getter)
{
bind_t bind = {0};
strncpy(bind.name, name, sizeof(bind.name) - 1);
bind.type = OPT_VEC2;
bind.vec2 = value;
bind.getter = getter;
return bind;
}
bind_t opt_vec3(const char *name, vec3_t value, getter_cb getter)
{
bind_t bind = {0};
strncpy(bind.name, name, sizeof(bind.name) - 1);
bind.type = OPT_VEC3;
bind.vec3 = value;
bind.getter = getter;
return bind;
}
bind_t opt_vec4(const char *name, vec4_t value, getter_cb getter)
{
bind_t bind = {0};
strncpy(bind.name, name, sizeof(bind.name) - 1);
bind.type = OPT_VEC4;
bind.vec4 = value;
bind.getter = getter;
return bind;
}
bind_t opt_cam(uint32_t camera, getter_cb getter)
{
bind_t bind = {0};
bind.type = OPT_CAM;
bind.uinteger = camera;
bind.getter = getter;
return bind;
}
bind_t opt_clear_color(vec4_t color, getter_cb getter)
{
bind_t bind = {0};
bind.type = OPT_CLEAR_COLOR;
bind.vec4 = color;
bind.getter = getter;
return bind;
}
bind_t opt_clear_depth(float depth, getter_cb getter)
{
bind_t bind = {0};
bind.type = OPT_CLEAR_DEPTH;
bind.number = depth;
bind.getter = getter;
return bind;
}
bind_t opt_callback(getter_cb callback)
{
bind_t bind = {0};
bind.type = OPT_CALLBACK;
bind.getter = callback;
return bind;
}
bind_t opt_usrptr(void *ptr)
{
bind_t bind = {0};
bind.type = OPT_USRPTR;
bind.ptr = ptr;
return bind;
}
bind_t opt_skip(uint32_t ticks)
{
bind_t bind = {0};
bind.type = OPT_SKIP;
bind.integer = ticks;
return bind;
}
bind_t opt_viewport(vec2_t min, vec2_t size)
{
bind_t bind = {0};
bind.type = OPT_VIEWPORT;
bind.vec4 = vec4(_vec2(min), _vec2(size));
return bind;
}
void renderer_add_pass(
renderer_t *self,
const char *name,
const char *shader_name,
uint32_t draw_signal,
enum pass_options flags,
texture_t *output,
texture_t *depth,
uint32_t framebuffer,
uint32_t after_pass,
uint32_t num_opts,
...)
{
char buffer[32];
va_list argptr;
int i = -1;
unsigned int hash;
pass_t *pass;
if(!output)
{
printf("Pass %s has no output\n", name);
exit(1);
}
sprintf(buffer, name, self->passes_size);
hash = ref(buffer);
/* TODO add pass replacement */
if (after_pass != ~0)
{
uint32_t pass_id, j;
pass_t *after = renderer_pass(self, after_pass);
assert(after);
pass_id = after - self->passes;
for (j = self->passes_size; j > pass_id; --j)
{
self->passes[j] = self->passes[j - 1];
}
self->passes_size++;
i = pass_id;
}
else if(i == -1)
{
i = self->passes_size++;
}
else
{
printf("Replacing %s\n", name);
}
assert(self->passes_size < 64);
pass = &self->passes[i];
pass->renderer = self;
pass->hash = hash;
pass->framebuffer_id = framebuffer;
pass->auto_mip = !!(flags & GEN_MIP);
pass->track_brightness = !!(flags & TRACK_BRIGHT);
pass->camid = 0;
pass->custom_viewport_pos = vec2(0.0f, 0.0f);
pass->custom_viewport_size = vec2(1.0f, 1.0f);
if(shader_name)
{
strncpy(pass->shader_name, shader_name, sizeof(pass->shader_name) - 1);
}
pass->clear = 0;
pass->depth_update = !(flags & DEPTH_LOCK) && depth;
pass->output = output;
pass->depth = depth;
if(flags & DEPTH_DISABLE)
{
pass->depth_func = GL_ALWAYS;
}
else if(!(flags & DEPTH_EQUAL))
{
if(flags & DEPTH_GREATER)
{
pass->depth_func = GL_GREATER;
}
else
{
pass->depth_func = GL_LESS;
}
}
else
{
if(flags & DEPTH_GREATER)
{
pass->depth_func = GL_GEQUAL;
}
else if (flags & DEPTH_LESSER)
{
pass->depth_func = GL_LEQUAL;
}
else
{
pass->depth_func = GL_EQUAL;
}
}
if(flags & IGNORE_CAM_VIEWPORT)
{
pass->ignore_cam_viewport = true;
}
pass->draw_signal = draw_signal;
pass->additive = !!(flags & ADD);
pass->additive_no_alpha = !!(flags & ADD_NO_ALPHA);
pass->multiply = !!(flags & MUL);
pass->blend = !!(flags & BLEND);
pass->cull = !(flags & CULL_DISABLE);
pass->cull_invert = !!(flags & CULL_INVERT);
pass->clear_depth = 1.0f;
strncpy(pass->name, buffer, sizeof(pass->name) - 1);
pass->draw_every = 1;
pass->binds = malloc(sizeof(bind_t) * num_opts);
pass->binds_size = 0;
va_start(argptr, num_opts);
for (i = 0; i < num_opts; i++)
{
int t;
bind_t *bind;
bind_t opt = va_arg(argptr, bind_t);
hash_bind_t *sb;
if(opt.type == OPT_USRPTR)
{
pass->usrptr = opt.ptr;
continue;
}
if(opt.type == OPT_CAM)
{
pass->camid = opt.uinteger;
continue;
}
if(opt.type == OPT_CLEAR_COLOR)
{
pass->clear |= GL_COLOR_BUFFER_BIT;
pass->clear_color = opt.vec4;
continue;
}
if(opt.type == OPT_CLEAR_DEPTH)
{
pass->clear |= GL_DEPTH_BUFFER_BIT;
pass->clear_depth = opt.number;
continue;
}
if(opt.type == OPT_SKIP)
{
pass->draw_every = opt.integer;
continue;
}
if(opt.type == OPT_VIEWPORT)
{
pass->custom_viewport_pos = XY(opt.vec4);
pass->custom_viewport_size = ZW(opt.vec4);
continue;
}
bind = &pass->binds[pass->binds_size++];
*bind = opt;
sb = &bind->vs_uniforms;
sb->cached = false;
for(t = 0; t < 16; t++)
{
sb->u.buffer.u_tex[t] = -1;
}
bind->hash = ref(bind->name);
}
va_end(argptr);
pass->binds = realloc(pass->binds, sizeof(bind_t) * pass->binds_size);
self->ready = 0;
pass->active = 1;
}
void renderer_destroy(renderer_t *self)
{
uint32_t i;
for(i = 0; i < self->passes_size; i++)
{
if(self->passes[i].binds)
{
free(self->passes[i].binds);
}
}
for(i = 0; i < 6; i++)
{
if(self->ubos[i])
{
glDeleteBuffers(1, &self->ubos[i]);
}
}
for(i = 0; i < self->outputs_num; i++)
{
pass_output_t *output = &self->outputs[i];
texture_destroy(output->buffer);
}
}
void renderer_default_pipeline_config(renderer_t *self)
{
renderer_default_pipeline(self, 1.0f, 1.0f, 1.0f, 0.5f, true, true, true,
true, true);
}
bool_t renderer_updated(const renderer_t *self)
{
uint32_t i;
c_render_device_t *rd;
rd = c_render_device(&SYS);
if (!rd || self->render_device_frame != rd->update_frame)
return false;
for(i = 0; i < self->passes_size; i++)
{
const pass_t *pass = &self->passes[i];
if(!pass->active)
continue;
if (self->frame % pass->draw_every != 0)
continue;
if ( pass->draw_signal != ~0
&& draw_group_state_hash(pass->draw_signal) != pass->rendered_id)
return false;
}
return true;
}
int renderer_draw(renderer_t *self)
{
uint32_t i;
self->frame++;
if (renderer_updated(self))
return false;
glerr();
if(!self->width || !self->height) return false;
if(!self->output) renderer_default_pipeline_config(self);
if(!self->ready) renderer_update_screen_texture(self);
for(i = 0; i < self->camera_count; i++)
{
update_ubo(self, i);
}
for(i = 0; i < self->passes_size; i++)
{
uint32_t timer;
const bool_t profile = false;
/* const bool_t profile = self->frame % 64 == 0; */
const texture_t *output = renderer_draw_pass(self, &self->passes[i],
profile ? &timer : NULL);
if (profile && output)
{
printf("%s: %u\n", self->passes[i].name, timer);
}
}
c_render_device_rebind(c_render_device(&SYS), NULL, NULL);
glerr();
return true;
}
| 18,270 |
4,200 | /*
* Copyright 2009-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ibatis.executor.loader.cglib;
import java.util.List;
import java.util.Map;
import org.apache.ibatis.executor.loader.AbstractSerialStateHolder;
import org.apache.ibatis.executor.loader.ResultLoaderMap;
import org.apache.ibatis.reflection.factory.ObjectFactory;
/**
* @author <NAME>
*/
class CglibSerialStateHolder extends AbstractSerialStateHolder {
private static final long serialVersionUID = 8940388717901644661L;
public CglibSerialStateHolder() {
}
public CglibSerialStateHolder(
final Object userBean,
final Map<String, ResultLoaderMap.LoadPair> unloadedProperties,
final ObjectFactory objectFactory,
List<Class<?>> constructorArgTypes,
List<Object> constructorArgs) {
super(userBean, unloadedProperties, objectFactory, constructorArgTypes, constructorArgs);
}
@Override
protected Object createDeserializationProxy(Object target, Map<String, ResultLoaderMap.LoadPair> unloadedProperties, ObjectFactory objectFactory,
List<Class<?>> constructorArgTypes, List<Object> constructorArgs) {
return new CglibProxyFactory().createDeserializationProxy(target, unloadedProperties, objectFactory, constructorArgTypes, constructorArgs);
}
}
| 580 |
1,313 | <gh_stars>1000+
// Copyright (c) 2009-2010 <NAME>
// Copyright (c) 2017-2019 The WaykiChain Developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "logdb.h"
#include "config/chainparams.h"
bool CLogDBCache::SetExecuteFail(const int32_t blockHeight, const uint256 txid, const uint8_t errorCode,
const string &errorMessage) {
if (!SysCfg().IsLogFailures())
return true;
return executeFailCache.SetData(make_pair(CFixedUInt32(blockHeight), txid),
std::make_pair(errorCode, errorMessage));
}
void CLogDBCache::Flush() { executeFailCache.Flush(); }
| 296 |
2,338 | // RUN: clang-tidy -dump-config %S/Inputs/config-files/- -- | FileCheck %s -check-prefix=CHECK-BASE
// CHECK-BASE: Checks: {{.*}}from-parent
// CHECK-BASE: HeaderFilterRegex: parent
// RUN: clang-tidy -dump-config %S/Inputs/config-files/1/- -- | FileCheck %s -check-prefix=CHECK-CHILD1
// CHECK-CHILD1: Checks: {{.*}}from-child1
// CHECK-CHILD1: HeaderFilterRegex: child1
// RUN: clang-tidy -dump-config %S/Inputs/config-files/2/- -- | FileCheck %s -check-prefix=CHECK-CHILD2
// CHECK-CHILD2: Checks: {{.*}}from-parent
// CHECK-CHILD2: HeaderFilterRegex: parent
// RUN: clang-tidy -dump-config %S/Inputs/config-files/3/- -- | FileCheck %s -check-prefix=CHECK-CHILD3
// CHECK-CHILD3: Checks: {{.*}}from-parent,from-child3
// CHECK-CHILD3: HeaderFilterRegex: child3
// RUN: clang-tidy -dump-config -checks='from-command-line' -header-filter='from command line' %S/Inputs/config-files/- -- | FileCheck %s -check-prefix=CHECK-COMMAND-LINE
// CHECK-COMMAND-LINE: Checks: {{.*}}from-parent,from-command-line
// CHECK-COMMAND-LINE: HeaderFilterRegex: from command line
// For this test we have to use names of the real checks because otherwise values are ignored.
// RUN: clang-tidy -dump-config %S/Inputs/config-files/4/44/- -- | FileCheck %s -check-prefix=CHECK-CHILD4
// CHECK-CHILD4: Checks: {{.*}}modernize-loop-convert,modernize-use-using,llvm-qualified-auto
// CHECK-CHILD4-DAG: - key: llvm-qualified-auto.AddConstToQualified{{ *[[:space:]] *}}value: 'true'
// CHECK-CHILD4-DAG: - key: modernize-loop-convert.MaxCopySize{{ *[[:space:]] *}}value: '20'
// CHECK-CHILD4-DAG: - key: modernize-loop-convert.MinConfidence{{ *[[:space:]] *}}value: reasonable
// CHECK-CHILD4-DAG: - key: modernize-use-using.IgnoreMacros{{ *[[:space:]] *}}value: 'false'
// RUN: clang-tidy --explain-config %S/Inputs/config-files/4/44/- -- | FileCheck %s -check-prefix=CHECK-EXPLAIN
// CHECK-EXPLAIN: 'llvm-qualified-auto' is enabled in the {{.*}}{{[/\\]}}Inputs{{[/\\]}}config-files{{[/\\]}}4{{[/\\]}}44{{[/\\]}}.clang-tidy.
// CHECK-EXPLAIN: 'modernize-loop-convert' is enabled in the {{.*}}{{[/\\]}}Inputs{{[/\\]}}config-files{{[/\\]}}4{{[/\\]}}.clang-tidy.
// CHECK-EXPLAIN: 'modernize-use-using' is enabled in the {{.*}}{{[/\\]}}Inputs{{[/\\]}}config-files{{[/\\]}}4{{[/\\]}}.clang-tidy.
// RUN: clang-tidy -dump-config \
// RUN: --config='{InheritParentConfig: true, \
// RUN: Checks: -llvm-qualified-auto, \
// RUN: CheckOptions: [{key: modernize-loop-convert.MaxCopySize, value: 21}]}' \
// RUN: %S/Inputs/config-files/4/44/- -- | FileCheck %s -check-prefix=CHECK-CHILD5
// CHECK-CHILD5: Checks: {{.*}}modernize-loop-convert,modernize-use-using,llvm-qualified-auto,-llvm-qualified-auto
// CHECK-CHILD5-DAG: - key: modernize-loop-convert.MaxCopySize{{ *[[:space:]] *}}value: '21'
// CHECK-CHILD5-DAG: - key: modernize-loop-convert.MinConfidence{{ *[[:space:]] *}}value: reasonable
// CHECK-CHILD5-DAG: - key: modernize-use-using.IgnoreMacros{{ *[[:space:]] *}}value: 'false'
// RUN: clang-tidy -dump-config \
// RUN: --config='{InheritParentConfig: false, \
// RUN: Checks: -llvm-qualified-auto}' \
// RUN: %S/Inputs/config-files/4/44/- -- | FileCheck %s -check-prefix=CHECK-CHILD6
// CHECK-CHILD6: Checks: {{.*-llvm-qualified-auto'? *$}}
// CHECK-CHILD6-NOT: - key: modernize-use-using.IgnoreMacros
| 1,281 |
5,079 | # Copyright (c) 2018, The Jaeger Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from jaeger_client.metrics import MetricsFactory
from collections import defaultdict
from prometheus_client import Counter, Gauge
class PrometheusMetricsFactory(MetricsFactory):
"""
Provides metrics backed by Prometheus
"""
def __init__(self, namespace=''):
self._cache = defaultdict(object)
self._namespace = namespace
def _get_tag_name_list(self, tags):
if tags is None:
return []
tag_name_list = []
for key in tags.keys():
tag_name_list.append(key)
return tag_name_list
def _get_metric(self, metric, name, label_name_list):
cache_key = name + ''.join(label_name_list)
if self._cache.get(cache_key) is None:
self._cache[cache_key] = metric(name=name, documentation=name,
labelnames=label_name_list, namespace=self._namespace)
return self._cache[cache_key]
def create_counter(self, name, tags=None):
label_name_list = self._get_tag_name_list(tags)
counter = self._get_metric(Counter, name, label_name_list)
if tags is not None and len(tags) > 0:
counter = counter.labels(**tags)
def increment(value):
counter.inc(value)
return increment
def create_gauge(self, name, tags=None):
label_name_list = self._get_tag_name_list(tags)
gauge = self._get_metric(Gauge, name, label_name_list)
if tags is not None and len(tags) > 0:
gauge = gauge.labels(**tags)
def update(value):
gauge.set(value)
return update
| 859 |
10,225 | package io.quarkus.vertx.http.deployment.devmode.tests;
public class TestStatus {
private long lastRun;
private long running;
private long testsRun = -1;
private long testsPassed = -1;
private long testsFailed = -1;
private long testsSkipped = -1;
private long totalTestsPassed = -1;
private long totalTestsFailed = -1;
private long totalTestsSkipped = -1;
public TestStatus() {
}
public TestStatus(long lastRun, long running, long testsRun, long testsPassed, long testsFailed, long testsSkipped,
long totalTestsPassed, long totalTestsFailed, long totalTestsSkipped) {
this.lastRun = lastRun;
this.running = running;
this.testsRun = testsRun;
this.testsPassed = testsPassed;
this.testsFailed = testsFailed;
this.testsSkipped = testsSkipped;
this.totalTestsPassed = totalTestsPassed;
this.totalTestsFailed = totalTestsFailed;
this.totalTestsSkipped = totalTestsSkipped;
}
public long getLastRun() {
return lastRun;
}
public TestStatus setLastRun(long lastRun) {
this.lastRun = lastRun;
return this;
}
public long getRunning() {
return running;
}
public TestStatus setRunning(long running) {
this.running = running;
return this;
}
public long getTestsRun() {
return testsRun;
}
public TestStatus setTestsRun(long testsRun) {
this.testsRun = testsRun;
return this;
}
public long getTestsPassed() {
return testsPassed;
}
public TestStatus setTestsPassed(long testsPassed) {
this.testsPassed = testsPassed;
return this;
}
public long getTestsFailed() {
return testsFailed;
}
public TestStatus setTestsFailed(long testsFailed) {
this.testsFailed = testsFailed;
return this;
}
public long getTestsSkipped() {
return testsSkipped;
}
public TestStatus setTestsSkipped(long testsSkipped) {
this.testsSkipped = testsSkipped;
return this;
}
public long getTotalTestsPassed() {
return totalTestsPassed;
}
public TestStatus setTotalTestsPassed(long totalTestsPassed) {
this.totalTestsPassed = totalTestsPassed;
return this;
}
public long getTotalTestsFailed() {
return totalTestsFailed;
}
public TestStatus setTotalTestsFailed(long totalTestsFailed) {
this.totalTestsFailed = totalTestsFailed;
return this;
}
public long getTotalTestsSkipped() {
return totalTestsSkipped;
}
public TestStatus setTotalTestsSkipped(long totalTestsSkipped) {
this.totalTestsSkipped = totalTestsSkipped;
return this;
}
@Override
public String toString() {
return "TestStatus{" +
"lastRun=" + lastRun +
", running=" + running +
", testsRun=" + testsRun +
", testsPassed=" + testsPassed +
", testsFailed=" + testsFailed +
", testsSkipped=" + testsSkipped +
'}';
}
}
| 1,357 |
806 | <gh_stars>100-1000
#
# tests/middleware/test_auth.py
#
import growler
import pytest
from unittest import mock
from mock_classes import ( # noqa
mock_protocol,
request_uri,
)
@pytest.fixture
def auth():
return growler.middleware.auth.Auth()
def test_constructor(auth):
assert isinstance(auth, growler.middleware.auth.Auth)
def test_docstring(auth):
doc = auth.__doc__
assert isinstance(doc, str)
def test_call(auth):
do_auth = auth()
assert callable(do_auth)
with pytest.raises(NotImplementedError):
do_auth(mock.Mock(), mock.Mock())
def test_do_authentication(auth):
with pytest.raises(NotImplementedError):
auth.do_authentication(mock.Mock(), mock.Mock())
| 333 |
2,690 | <reponame>rasa-silva/Odin
typedef u32 PtrSetIndex;
struct PtrSetFindResult {
PtrSetIndex hash_index;
PtrSetIndex entry_prev;
PtrSetIndex entry_index;
};
enum : PtrSetIndex { PTR_SET_SENTINEL = ~(PtrSetIndex)0 };
template <typename T>
struct PtrSetEntry {
T ptr;
PtrSetIndex next;
};
template <typename T>
struct PtrSet {
Array<PtrSetIndex> hashes;
Array<PtrSetEntry<T>> entries;
};
template <typename T> void ptr_set_init (PtrSet<T> *s, gbAllocator a, isize capacity = 16);
template <typename T> void ptr_set_destroy(PtrSet<T> *s);
template <typename T> T ptr_set_add (PtrSet<T> *s, T ptr);
template <typename T> bool ptr_set_update (PtrSet<T> *s, T ptr); // returns true if it previously existsed
template <typename T> bool ptr_set_exists (PtrSet<T> *s, T ptr);
template <typename T> void ptr_set_remove (PtrSet<T> *s, T ptr);
template <typename T> void ptr_set_clear (PtrSet<T> *s);
template <typename T> void ptr_set_grow (PtrSet<T> *s);
template <typename T> void ptr_set_rehash (PtrSet<T> *s, isize new_count);
template <typename T>
void ptr_set_init(PtrSet<T> *s, gbAllocator a, isize capacity) {
capacity = next_pow2_isize(gb_max(16, capacity));
array_init(&s->hashes, a, capacity);
array_init(&s->entries, a, 0, capacity);
for (isize i = 0; i < capacity; i++) {
s->hashes.data[i] = PTR_SET_SENTINEL;
}
}
template <typename T>
void ptr_set_destroy(PtrSet<T> *s) {
array_free(&s->hashes);
array_free(&s->entries);
}
template <typename T>
gb_internal PtrSetIndex ptr_set__add_entry(PtrSet<T> *s, T ptr) {
PtrSetEntry<T> e = {};
e.ptr = ptr;
e.next = PTR_SET_SENTINEL;
array_add(&s->entries, e);
return cast(PtrSetIndex)(s->entries.count-1);
}
template <typename T>
gb_internal PtrSetFindResult ptr_set__find(PtrSet<T> *s, T ptr) {
PtrSetFindResult fr = {PTR_SET_SENTINEL, PTR_SET_SENTINEL, PTR_SET_SENTINEL};
if (s->hashes.count != 0) {
u64 hash = 0xcbf29ce484222325ull ^ cast(u64)cast(uintptr)ptr;
u64 n = cast(u64)s->hashes.count;
fr.hash_index = cast(PtrSetIndex)(hash & (n-1));
fr.entry_index = s->hashes.data[fr.hash_index];
while (fr.entry_index != PTR_SET_SENTINEL) {
if (s->entries.data[fr.entry_index].ptr == ptr) {
return fr;
}
fr.entry_prev = fr.entry_index;
fr.entry_index = s->entries.data[fr.entry_index].next;
}
}
return fr;
}
template <typename T>
gb_internal bool ptr_set__full(PtrSet<T> *s) {
return 0.75f * s->hashes.count <= s->entries.count;
}
template <typename T>
gb_inline void ptr_set_grow(PtrSet<T> *s) {
isize new_count = s->hashes.count*2;
ptr_set_rehash(s, new_count);
}
template <typename T>
void ptr_set_rehash(PtrSet<T> *s, isize new_count) {
isize i, j;
PtrSet<T> ns = {};
ptr_set_init(&ns, s->hashes.allocator);
array_resize(&ns.hashes, new_count);
array_reserve(&ns.entries, s->entries.count);
for (i = 0; i < new_count; i++) {
ns.hashes.data[i] = PTR_SET_SENTINEL;
}
for (i = 0; i < s->entries.count; i++) {
PtrSetEntry<T> *e = &s->entries.data[i];
PtrSetFindResult fr;
if (ns.hashes.count == 0) {
ptr_set_grow(&ns);
}
fr = ptr_set__find(&ns, e->ptr);
j = ptr_set__add_entry(&ns, e->ptr);
if (fr.entry_prev == PTR_SET_SENTINEL) {
ns.hashes.data[fr.hash_index] = cast(PtrSetIndex)j;
} else {
ns.entries.data[fr.entry_prev].next = cast(PtrSetIndex)j;
}
ns.entries.data[j].next = fr.entry_index;
if (ptr_set__full(&ns)) {
ptr_set_grow(&ns);
}
}
ptr_set_destroy(s);
*s = ns;
}
template <typename T>
gb_inline bool ptr_set_exists(PtrSet<T> *s, T ptr) {
isize index = ptr_set__find(s, ptr).entry_index;
return index != PTR_SET_SENTINEL;
}
// Returns true if it already exists
template <typename T>
T ptr_set_add(PtrSet<T> *s, T ptr) {
PtrSetIndex index;
PtrSetFindResult fr;
if (s->hashes.count == 0) {
ptr_set_grow(s);
}
fr = ptr_set__find(s, ptr);
if (fr.entry_index == PTR_SET_SENTINEL) {
index = ptr_set__add_entry(s, ptr);
if (fr.entry_prev != PTR_SET_SENTINEL) {
s->entries.data[fr.entry_prev].next = index;
} else {
s->hashes.data[fr.hash_index] = index;
}
}
if (ptr_set__full(s)) {
ptr_set_grow(s);
}
return ptr;
}
template <typename T>
bool ptr_set_update(PtrSet<T> *s, T ptr) { // returns true if it previously existsed
bool exists = false;
PtrSetIndex index;
PtrSetFindResult fr;
if (s->hashes.count == 0) {
ptr_set_grow(s);
}
fr = ptr_set__find(s, ptr);
if (fr.entry_index != PTR_SET_SENTINEL) {
exists = true;
} else {
index = ptr_set__add_entry(s, ptr);
if (fr.entry_prev != PTR_SET_SENTINEL) {
s->entries.data[fr.entry_prev].next = index;
} else {
s->hashes.data[fr.hash_index] = index;
}
}
if (ptr_set__full(s)) {
ptr_set_grow(s);
}
return exists;
}
template <typename T>
void ptr_set__erase(PtrSet<T> *s, PtrSetFindResult fr) {
PtrSetFindResult last;
if (fr.entry_prev == PTR_SET_SENTINEL) {
s->hashes.data[fr.hash_index] = s->entries.data[fr.entry_index].next;
} else {
s->entries.data[fr.entry_prev].next = s->entries.data[fr.entry_index].next;
}
if (cast(isize)fr.entry_index == s->entries.count-1) {
array_pop(&s->entries);
return;
}
s->entries.data[fr.entry_index] = s->entries.data[s->entries.count-1];
last = ptr_set__find(s, s->entries.data[fr.entry_index].ptr);
if (last.entry_prev != PTR_SET_SENTINEL) {
s->entries.data[last.entry_prev].next = fr.entry_index;
} else {
s->hashes.data[last.hash_index] = fr.entry_index;
}
}
template <typename T>
void ptr_set_remove(PtrSet<T> *s, T ptr) {
PtrSetFindResult fr = ptr_set__find(s, ptr);
if (fr.entry_index != PTR_SET_SENTINEL) {
ptr_set__erase(s, fr);
}
}
template <typename T>
gb_inline void ptr_set_clear(PtrSet<T> *s) {
array_clear(&s->hashes);
array_clear(&s->entries);
}
| 2,592 |
562 | <filename>recipes/icu/all/test_package/test_package.cpp
// This is taken from ICU samples: source/samples/ustring
/*
*******************************************************************************
*
* © 2016 and later: Unicode, Inc. and others.
* License & terms of use: http://www.unicode.org/copyright.html#License
*
*******************************************************************************
*******************************************************************************
*
* Copyright (C) 2000-2014, International Business Machines
* Corporation and others. All Rights Reserved.
*
*******************************************************************************
* file name: ustring.c
* encoding: UTF-8
* tab size: 8 (not used)
* indentation:4
*
* created on: 2000aug15
* created by: <NAME>
*
* This file contains sample code that illustrates the use of Unicode strings
* with ICU.
*/
#include <stdio.h>
#define U_DEFINE_FALSE_AND_TRUE 1
#include "unicode/utypes.h"
#include "unicode/uchar.h"
#include "unicode/locid.h"
#include "unicode/ustring.h"
#include "unicode/ucnv.h"
#include "unicode/unistr.h"
#ifndef UPRV_LENGTHOF
#define UPRV_LENGTHOF(array) (int32_t)(sizeof(array)/sizeof((array)[0]))
#endif
// helper functions -------------------------------------------------------- ***
// default converter for the platform encoding
static UConverter *cnv=NULL;
using namespace icu;
static void
printUString(const char *announce, const UChar *s, int32_t length) {
static char out[200];
UChar32 c;
int32_t i;
UErrorCode errorCode=U_ZERO_ERROR;
/*
* Convert to the "platform encoding". See notes in printUnicodeString().
* ucnv_fromUChars(), like most ICU APIs understands length==-1
* to mean that the string is NUL-terminated.
*/
ucnv_fromUChars(cnv, out, sizeof(out), s, length, &errorCode);
if(U_FAILURE(errorCode) || errorCode==U_STRING_NOT_TERMINATED_WARNING) {
printf("%sproblem converting string from Unicode: %s\n", announce, u_errorName(errorCode));
return;
}
printf("%s%s {", announce, out);
/* output the code points (not code units) */
if(length>=0) {
/* s is not NUL-terminated */
for(i=0; i<length; /* U16_NEXT post-increments */) {
U16_NEXT(s, i, length, c);
printf(" %04x", c);
}
} else {
/* s is NUL-terminated */
for(i=0; /* condition in loop body */; /* U16_NEXT post-increments */) {
U16_NEXT(s, i, length, c);
if(c==0) {
break;
}
printf(" %04x", c);
}
}
printf(" }\n");
}
static void
printUnicodeString(const char *announce, const UnicodeString &s) {
static char out[200];
int32_t i, length;
// output the string, converted to the platform encoding
// Note for Windows: The "platform encoding" defaults to the "ANSI codepage",
// which is different from the "OEM codepage" in the console window.
// However, if you pipe the output into a file and look at it with Notepad
// or similar, then "ANSI" characters will show correctly.
// Production code should be aware of what encoding is required,
// and use a UConverter or at least a charset name explicitly.
out[s.extract(0, 99, out)]=0;
printf("%s%s {", announce, out);
// output the code units (not code points)
length=s.length();
for(i=0; i<length; ++i) {
printf(" %04x", s.charAt(i));
}
printf(" }\n");
}
// sample code for utf.h macros -------------------------------------------- ***
static void
demo_utf_h_macros() {
static UChar input[]={ 0x0061, 0xd800, 0xdc00, 0xdbff, 0xdfff, 0x0062 };
UChar32 c;
int32_t i;
UBool isError;
printf("\n* demo_utf_h_macros() -------------- ***\n\n");
printUString("iterate forward through: ", input, UPRV_LENGTHOF(input));
for(i=0; i<UPRV_LENGTHOF(input); /* U16_NEXT post-increments */) {
/* Iterating forwards
Codepoint at offset 0: U+0061
Codepoint at offset 1: U+10000
Codepoint at offset 3: U+10ffff
Codepoint at offset 5: U+0062
*/
printf("Codepoint at offset %d: U+", i);
U16_NEXT(input, i, UPRV_LENGTHOF(input), c);
printf("%04x\n", c);
}
puts("");
isError=FALSE;
i=1; /* write position, gets post-incremented so needs to be in an l-value */
U16_APPEND(input, i, UPRV_LENGTHOF(input), 0x0062, isError);
printUString("iterate backward through: ", input, UPRV_LENGTHOF(input));
for(i=UPRV_LENGTHOF(input); i>0; /* U16_PREV pre-decrements */) {
U16_PREV(input, 0, i, c);
/* Iterating backwards
Codepoint at offset 5: U+0062
Codepoint at offset 3: U+10ffff
Codepoint at offset 2: U+dc00 -- unpaired surrogate because lead surr. overwritten
Codepoint at offset 1: U+0062 -- by this BMP code point
Codepoint at offset 0: U+0061
*/
printf("Codepoint at offset %d: U+%04x\n", i, c);
}
}
// sample code for Unicode strings in C ------------------------------------ ***
static void demo_C_Unicode_strings() {
printf("\n* demo_C_Unicode_strings() --------- ***\n\n");
static const UChar text[]={ 0x41, 0x42, 0x43, 0 }; /* "ABC" */
static const UChar appendText[]={ 0x61, 0x62, 0x63, 0 }; /* "abc" */
static const UChar cmpText[]={ 0x61, 0x53, 0x73, 0x43, 0 }; /* "aSsC" */
UChar buffer[32];
int32_t compare;
int32_t length=u_strlen(text); /* length=3 */
/* simple ANSI C-style functions */
buffer[0]=0; /* empty, NUL-terminated string */
u_strncat(buffer, text, 1); /* append just n=1 character ('A') */
u_strcat(buffer, appendText); /* buffer=="Aabc" */
length=u_strlen(buffer); /* length=4 */
printUString("should be \"Aabc\": ", buffer, -1);
/* bitwise comparing buffer with text */
compare=u_strcmp(buffer, text);
if(compare<=0) {
printf("String comparison error, expected \"Aabc\" > \"ABC\"\n");
}
/* Build "A<sharp s>C" in the buffer... */
u_strcpy(buffer, text);
buffer[1]=0xdf; /* sharp s, case-compares equal to "ss" */
printUString("should be \"A<sharp s>C\": ", buffer, -1);
/* Compare two strings case-insensitively using full case folding */
compare=u_strcasecmp(buffer, cmpText, U_FOLD_CASE_DEFAULT);
if(compare!=0) {
printf("String case insensitive comparison error, expected \"AbC\" to be equal to \"ABC\"\n");
}
}
// sample code for case mappings with C APIs -------------------------------- ***
static void demoCaseMapInC() {
/*
* input=
* "aB<capital sigma>"
* "iI<small dotless i><capital dotted I> "
* "<sharp s> <small lig. ffi>"
* "<small final sigma><small sigma><capital sigma>"
*/
static const UChar input[]={
0x61, 0x42, 0x3a3,
0x69, 0x49, 0x131, 0x130, 0x20,
0xdf, 0x20, 0xfb03,
0x3c2, 0x3c3, 0x3a3, 0
};
UChar buffer[32];
UErrorCode errorCode;
UChar32 c;
int32_t i, j, length;
UBool isError;
printf("\n* demoCaseMapInC() ----------------- ***\n\n");
/*
* First, use simple case mapping functions which provide
* 1:1 code point mappings without context/locale ID.
*
* Note that some mappings will not be "right" because some "real"
* case mappings require context, depend on the locale ID,
* and/or result in a change in the number of code points.
*/
printUString("input string: ", input, -1);
/* uppercase */
isError=FALSE;
for(i=j=0; j<UPRV_LENGTHOF(buffer) && !isError; /* U16_NEXT post-increments */) {
U16_NEXT(input, i, INT32_MAX, c); /* without length because NUL-terminated */
if(c==0) {
break; /* stop at terminating NUL, no need to terminate buffer */
}
c=u_toupper(c);
U16_APPEND(buffer, j, UPRV_LENGTHOF(buffer), c, isError);
}
printUString("simple-uppercased: ", buffer, j);
/* lowercase */
isError=FALSE;
for(i=j=0; j<UPRV_LENGTHOF(buffer) && !isError; /* U16_NEXT post-increments */) {
U16_NEXT(input, i, INT32_MAX, c); /* without length because NUL-terminated */
if(c==0) {
break; /* stop at terminating NUL, no need to terminate buffer */
}
c=u_tolower(c);
U16_APPEND(buffer, j, UPRV_LENGTHOF(buffer), c, isError);
}
printUString("simple-lowercased: ", buffer, j);
/* titlecase */
isError=FALSE;
for(i=j=0; j<UPRV_LENGTHOF(buffer) && !isError; /* U16_NEXT post-increments */) {
U16_NEXT(input, i, INT32_MAX, c); /* without length because NUL-terminated */
if(c==0) {
break; /* stop at terminating NUL, no need to terminate buffer */
}
c=u_totitle(c);
U16_APPEND(buffer, j, UPRV_LENGTHOF(buffer), c, isError);
}
printUString("simple-titlecased: ", buffer, j);
/* case-fold/default */
isError=FALSE;
for(i=j=0; j<UPRV_LENGTHOF(buffer) && !isError; /* U16_NEXT post-increments */) {
U16_NEXT(input, i, INT32_MAX, c); /* without length because NUL-terminated */
if(c==0) {
break; /* stop at terminating NUL, no need to terminate buffer */
}
c=u_foldCase(c, U_FOLD_CASE_DEFAULT);
U16_APPEND(buffer, j, UPRV_LENGTHOF(buffer), c, isError);
}
printUString("simple-case-folded/default: ", buffer, j);
/* case-fold/Turkic */
isError=FALSE;
for(i=j=0; j<UPRV_LENGTHOF(buffer) && !isError; /* U16_NEXT post-increments */) {
U16_NEXT(input, i, INT32_MAX, c); /* without length because NUL-terminated */
if(c==0) {
break; /* stop at terminating NUL, no need to terminate buffer */
}
c=u_foldCase(c, U_FOLD_CASE_EXCLUDE_SPECIAL_I);
U16_APPEND(buffer, j, UPRV_LENGTHOF(buffer), c, isError);
}
printUString("simple-case-folded/Turkic: ", buffer, j);
/*
* Second, use full case mapping functions which provide
* 1:n code point mappings (n can be 0!) and are sensitive to context and locale ID.
*
* Note that lower/upper/titlecasing take a locale ID while case-folding
* has bit flag options instead, by design of the Unicode SpecialCasing.txt UCD file.
*
* Also, string titlecasing requires a BreakIterator to find starts of words.
* The sample code here passes in a NULL pointer; u_strToTitle() will open and close a default
* titlecasing BreakIterator automatically.
* For production code where many strings are titlecased it would be more efficient
* to open a BreakIterator externally and pass it in.
*/
printUString("\ninput string: ", input, -1);
/* lowercase/English */
errorCode=U_ZERO_ERROR;
length=u_strToLower(buffer, UPRV_LENGTHOF(buffer), input, -1, "en", &errorCode);
if(U_SUCCESS(errorCode)) {
printUString("full-lowercased/en: ", buffer, length);
} else {
printf("error in u_strToLower(en)=%ld error=%s\n", length, u_errorName(errorCode));
}
/* lowercase/Turkish */
errorCode=U_ZERO_ERROR;
length=u_strToLower(buffer, UPRV_LENGTHOF(buffer), input, -1, "tr", &errorCode);
if(U_SUCCESS(errorCode)) {
printUString("full-lowercased/tr: ", buffer, length);
} else {
printf("error in u_strToLower(tr)=%ld error=%s\n", length, u_errorName(errorCode));
}
/* uppercase/English */
errorCode=U_ZERO_ERROR;
length=u_strToUpper(buffer, UPRV_LENGTHOF(buffer), input, -1, "en", &errorCode);
if(U_SUCCESS(errorCode)) {
printUString("full-uppercased/en: ", buffer, length);
} else {
printf("error in u_strToUpper(en)=%ld error=%s\n", length, u_errorName(errorCode));
}
/* uppercase/Turkish */
errorCode=U_ZERO_ERROR;
length=u_strToUpper(buffer, UPRV_LENGTHOF(buffer), input, -1, "tr", &errorCode);
if(U_SUCCESS(errorCode)) {
printUString("full-uppercased/tr: ", buffer, length);
} else {
printf("error in u_strToUpper(tr)=%ld error=%s\n", length, u_errorName(errorCode));
}
/* titlecase/English */
errorCode=U_ZERO_ERROR;
length=u_strToTitle(buffer, UPRV_LENGTHOF(buffer), input, -1, NULL, "en", &errorCode);
if(U_SUCCESS(errorCode)) {
printUString("full-titlecased/en: ", buffer, length);
} else {
printf("error in u_strToTitle(en)=%ld error=%s\n", length, u_errorName(errorCode));
}
/* titlecase/Turkish */
errorCode=U_ZERO_ERROR;
length=u_strToTitle(buffer, UPRV_LENGTHOF(buffer), input, -1, NULL, "tr", &errorCode);
if(U_SUCCESS(errorCode)) {
printUString("full-titlecased/tr: ", buffer, length);
} else {
printf("error in u_strToTitle(tr)=%ld error=%s\n", length, u_errorName(errorCode));
}
/* case-fold/default */
errorCode=U_ZERO_ERROR;
length=u_strFoldCase(buffer, UPRV_LENGTHOF(buffer), input, -1, U_FOLD_CASE_DEFAULT, &errorCode);
if(U_SUCCESS(errorCode)) {
printUString("full-case-folded/default: ", buffer, length);
} else {
printf("error in u_strFoldCase(default)=%ld error=%s\n", length, u_errorName(errorCode));
}
/* case-fold/Turkic */
errorCode=U_ZERO_ERROR;
length=u_strFoldCase(buffer, UPRV_LENGTHOF(buffer), input, -1, U_FOLD_CASE_EXCLUDE_SPECIAL_I, &errorCode);
if(U_SUCCESS(errorCode)) {
printUString("full-case-folded/Turkic: ", buffer, length);
} else {
printf("error in u_strFoldCase(Turkic)=%ld error=%s\n", length, u_errorName(errorCode));
}
}
// sample code for case mappings with C++ APIs ------------------------------ ***
static void demoCaseMapInCPlusPlus() {
/*
* input=
* "aB<capital sigma>"
* "iI<small dotless i><capital dotted I> "
* "<sharp s> <small lig. ffi>"
* "<small final sigma><small sigma><capital sigma>"
*/
static const UChar input[]={
0x61, 0x42, 0x3a3,
0x69, 0x49, 0x131, 0x130, 0x20,
0xdf, 0x20, 0xfb03,
0x3c2, 0x3c3, 0x3a3, 0
};
printf("\n* demoCaseMapInCPlusPlus() --------- ***\n\n");
UnicodeString s(input), t;
const Locale &en=Locale::getEnglish();
Locale tr("tr");
/*
* Full case mappings as in demoCaseMapInC(), using UnicodeString functions.
* These functions modify the string object itself.
* Since we want to keep the input string around, we copy it each time
* and case-map the copy.
*/
printUnicodeString("input string: ", s);
/* lowercase/English */
printUnicodeString("full-lowercased/en: ", (t=s).toLower(en));
/* lowercase/Turkish */
printUnicodeString("full-lowercased/tr: ", (t=s).toLower(tr));
/* uppercase/English */
printUnicodeString("full-uppercased/en: ", (t=s).toUpper(en));
/* uppercase/Turkish */
printUnicodeString("full-uppercased/tr: ", (t=s).toUpper(tr));
/* titlecase/English */
printUnicodeString("full-titlecased/en: ", (t=s).toTitle(NULL, en));
/* titlecase/Turkish */
printUnicodeString("full-titlecased/tr: ", (t=s).toTitle(NULL, tr));
/* case-folde/default */
printUnicodeString("full-case-folded/default: ", (t=s).foldCase(U_FOLD_CASE_DEFAULT));
/* case-folde/Turkic */
printUnicodeString("full-case-folded/Turkic: ", (t=s).foldCase(U_FOLD_CASE_EXCLUDE_SPECIAL_I));
}
// sample code for UnicodeString storage models ----------------------------- ***
static const UChar readonly[]={
0x61, 0x31, 0x20ac
};
static UChar writeable[]={
0x62, 0x32, 0xdbc0, 0xdc01 // includes a surrogate pair for a supplementary code point
};
static char out[100];
static void
demoUnicodeStringStorage() {
// These sample code lines illustrate how to use UnicodeString, and the
// comments tell what happens internally. There are no APIs to observe
// most of this programmatically, except for stepping into the code
// with a debugger.
// This is by design to hide such details from the user.
int32_t i;
printf("\n* demoUnicodeStringStorage() ------- ***\n\n");
// * UnicodeString with internally stored contents
// instantiate a UnicodeString from a single code point
// the few (2) UChars will be stored in the object itself
UnicodeString one((UChar32)0x24001);
// this copies the few UChars into the "two" object
UnicodeString two=one;
printf("length of short string copy: %d\n", two.length());
// set "one" to contain the 3 UChars from readonly
// this setTo() variant copies the characters
one.setTo(readonly, UPRV_LENGTHOF(readonly));
// * UnicodeString with allocated contents
// build a longer string that will not fit into the object's buffer
one+=UnicodeString(writeable, UPRV_LENGTHOF(writeable));
one+=one;
one+=one;
printf("length of longer string: %d\n", one.length());
// copying will use the same allocated buffer and increment the reference
// counter
two=one;
printf("length of longer string copy: %d\n", two.length());
// * UnicodeString using readonly-alias to a const UChar array
// construct a string that aliases a readonly buffer
UnicodeString three(FALSE, readonly, UPRV_LENGTHOF(readonly));
printUnicodeString("readonly-alias string: ", three);
// copy-on-write: any modification to the string results in
// a copy to either the internal buffer or to a newly allocated one
three.setCharAt(1, 0x39);
printUnicodeString("readonly-aliasing string after modification: ", three);
// the aliased array is not modified
for(i=0; i<three.length(); ++i) {
printf("readonly buffer[%d] after modifying its string: 0x%lx\n",
i, readonly[i]);
}
// setTo() readonly alias
one.setTo(FALSE, writeable, UPRV_LENGTHOF(writeable));
// copying the readonly-alias object with fastCopyFrom() (new in ICU 2.4)
// will readonly-alias the same buffer
two.fastCopyFrom(one);
printUnicodeString("fastCopyFrom(readonly alias of \"writeable\" array): ", two);
printf("verify that a fastCopyFrom(readonly alias) uses the same buffer pointer: %d (should be 1)\n",
one.getBuffer()==two.getBuffer());
// a normal assignment will clone the contents (new in ICU 2.4)
two=one;
printf("verify that a regular copy of a readonly alias uses a different buffer pointer: %d (should be 0)\n",
one.getBuffer()==two.getBuffer());
// * UnicodeString using writeable-alias to a non-const UChar array
UnicodeString four(writeable, UPRV_LENGTHOF(writeable), UPRV_LENGTHOF(writeable));
printUnicodeString("writeable-alias string: ", four);
// a modification writes through to the buffer
four.setCharAt(1, 0x39);
for(i=0; i<four.length(); ++i) {
printf("writeable-alias backing buffer[%d]=0x%lx "
"after modification\n", i, writeable[i]);
}
// a copy will not alias any more;
// instead, it will get a copy of the contents into allocated memory
two=four;
two.setCharAt(1, 0x21);
for(i=0; i<two.length(); ++i) {
printf("writeable-alias backing buffer[%d]=0x%lx after "
"modification of string copy\n", i, writeable[i]);
}
// setTo() writeable alias, capacity==length
one.setTo(writeable, UPRV_LENGTHOF(writeable), UPRV_LENGTHOF(writeable));
// grow the string - it will not fit into the backing buffer any more
// and will get copied before modification
one.append((UChar)0x40);
// shrink it back so it would fit
one.truncate(one.length()-1);
// we still operate on the copy
one.setCharAt(1, 0x25);
printf("string after growing too much and then shrinking[1]=0x%lx\n"
" backing store for this[1]=0x%lx\n",
one.charAt(1), writeable[1]);
// if we need it in the original buffer, then extract() to it
// extract() does not do anything if the string aliases that same buffer
// i=min(one.length(), length of array)
if(one.length()<UPRV_LENGTHOF(writeable)) {
i=one.length();
} else {
i=UPRV_LENGTHOF(writeable);
}
one.extract(0, i, writeable);
for(i=0; i<UPRV_LENGTHOF(writeable); ++i) {
printf("writeable-alias backing buffer[%d]=0x%lx after re-extract\n",
i, writeable[i]);
}
}
// sample code for UnicodeString instantiations ----------------------------- ***
static void
demoUnicodeStringInit() {
// *** Make sure to read about invariant characters in utypes.h! ***
// Initialization of Unicode strings from C literals works _only_ for
// invariant characters!
printf("\n* demoUnicodeStringInit() ---------- ***\n\n");
// the string literal is 32 chars long - this must be counted for the macro
UnicodeString invariantOnly=UNICODE_STRING("such characters are safe 123 %-.", 32);
/*
* In C, we need two macros: one to declare the UChar[] array, and
* one to populate it; the second one is a noop on platforms where
* wchar_t is compatible with UChar and ASCII-based.
* The length of the string literal must be counted for both macros.
*/
/* declare the invString array for the string */
U_STRING_DECL(invString, "such characters are safe 123 %-.", 32);
/* populate it with the characters */
U_STRING_INIT(invString, "such characters are safe 123 %-.", 32);
// compare the C and C++ strings
printf("C and C++ Unicode strings are equal: %d\n", invariantOnly==UnicodeString(TRUE, invString, 32));
/*
* convert between char * and UChar * strings that
* contain only invariant characters
*/
static const char *cs1="such characters are safe 123 %-.";
static UChar us1[40];
static char cs2[40];
u_charsToUChars(cs1, us1, 33); /* include the terminating NUL */
u_UCharsToChars(us1, cs2, 33);
printf("char * -> UChar * -> char * with only "
"invariant characters: \"%s\"\n",
cs2);
// initialize a UnicodeString from a string literal that contains
// escape sequences written with invariant characters
// do not forget to duplicate the backslashes for ICU to see them
// then, count each double backslash only once!
UnicodeString german=UNICODE_STRING(
"Sch\\u00f6nes Auto: \\u20ac 11240.\\fPrivates Zeichen: \\U00102345\\n", 64).
unescape();
printUnicodeString("german UnicodeString from unescaping:\n ", german);
/*
* C: convert and unescape a char * string with only invariant
* characters to fill a UChar * string
*/
UChar buffer[200];
int32_t length;
length=u_unescape(
"Sch\\u00f6nes Auto: \\u20ac 11240.\\fPrivates Zeichen: \\U00102345\\n",
buffer, UPRV_LENGTHOF(buffer));
printf("german C Unicode string from char * unescaping: (length %d)\n ", length);
printUnicodeString("", UnicodeString(buffer));
}
extern int
main(int argc, const char *argv[]) {
UErrorCode errorCode=U_ZERO_ERROR;
// Note: Using a global variable for any object is not exactly thread-safe...
// You can change this call to e.g. ucnv_open("UTF-8", &errorCode) if you pipe
// the output to a file and look at it with a Unicode-capable editor.
// This will currently affect only the printUString() function, see the code above.
// printUnicodeString() could use this, too, by changing to an extract() overload
// that takes a UConverter argument.
cnv=ucnv_open(NULL, &errorCode);
if(U_FAILURE(errorCode)) {
fprintf(stderr, "error %s opening the default converter\n", u_errorName(errorCode));
return errorCode;
}
ucnv_setFromUCallBack(cnv, UCNV_FROM_U_CALLBACK_ESCAPE, UCNV_ESCAPE_C, NULL, NULL, &errorCode);
if(U_FAILURE(errorCode)) {
fprintf(stderr, "error %s setting the escape callback in the default converter\n", u_errorName(errorCode));
ucnv_close(cnv);
return errorCode;
}
demo_utf_h_macros();
demo_C_Unicode_strings();
demoCaseMapInC();
demoCaseMapInCPlusPlus();
demoUnicodeStringStorage();
demoUnicodeStringInit();
ucnv_close(cnv);
return 0;
}
| 9,673 |
872 | <gh_stars>100-1000
"""
Follow up for H-Index: What if the citations array is sorted in ascending order? Could you optimize your algorithm?
"""
__author__ = 'Daniel'
class Solution(object):
def hIndex(self, A):
"""
Given sorted -> binary search
From linear search into bin search
:type A: List[int]
:rtype: int
"""
n = len(A)
s = 0
e = n
while s < e:
m = (s+e)/2
if A[m] >= n-m:
e = m
else:
s = m+1
return n-s
if __name__ == "__main__":
assert Solution().hIndex([0, 1, 3, 5, 6]) == 3 | 329 |
1,013 | /*!
@authors <NAME> (<EMAIL>)
@date 2014-2020
@copyright BSD-3-Clause
*/
#include <pyclustering/cluster/clique_block.hpp>
namespace pyclustering {
namespace clst {
clique_spatial_block::clique_spatial_block(const point & p_max_corner, const point & p_min_corner) :
m_max_corner(p_max_corner),
m_min_corner(p_min_corner)
{ }
bool clique_spatial_block::contains(const point & p_point) const {
for (std::size_t i = 0; i < p_point.size(); ++i) {
if ((p_point[i] < m_min_corner[i]) || (p_point[i] > m_max_corner[i])) {
return false;
}
}
return true;
}
const point & clique_spatial_block::get_max_corner() const {
return m_max_corner;
}
void clique_spatial_block::move_max_corner(point && p_corner) {
m_max_corner = std::move(p_corner);
}
const point & clique_spatial_block::get_min_corner() const {
return m_min_corner;
}
void clique_spatial_block::move_min_corner(point && p_corner) {
m_min_corner = std::move(p_corner);
}
clique_block::clique_block(const clique_block_location & p_location, const clique_spatial_block & p_block) :
m_logical_location(p_location),
m_spatial_location(p_block),
m_points(),
m_visited(false)
{ }
clique_block::clique_block(clique_block_location && p_location, clique_spatial_block && p_block) :
m_logical_location(std::move(p_location)),
m_spatial_location(std::move(p_block)),
m_points(),
m_visited(false)
{ }
const clique_block_location & clique_block::get_logical_location() const {
return m_logical_location;
}
const clique_spatial_block & clique_block::get_spatial_block() const {
return m_spatial_location;
}
const clique_block::content & clique_block::get_points() const {
return m_points;
}
bool clique_block::is_visited() const {
return m_visited;
}
void clique_block::touch() {
m_visited = true;
}
void clique_block::capture_points(const dataset & p_data, std::vector<bool> & p_availability) {
for (std::size_t index_point = 0; index_point < p_data.size(); ++index_point) {
if (p_availability[index_point] && m_spatial_location.contains(p_data[index_point])) {
m_points.push_back(index_point);
p_availability[index_point] = false;
}
}
}
void clique_block::get_location_neighbors(const std::size_t p_edge, std::vector<clique_block_location> & p_neighbors) const {
for (std::size_t index_dimension = 0; index_dimension < m_logical_location.size(); ++index_dimension) {
if (m_logical_location[index_dimension] + 1 < p_edge) {
clique_block_location position = m_logical_location;
++position[index_dimension];
p_neighbors.push_back(position);
}
if (m_logical_location[index_dimension] != 0) {
clique_block_location position = m_logical_location;
--position[index_dimension];
p_neighbors.push_back(position);
}
}
}
}
} | 1,392 |
2,542 | <gh_stars>1000+
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
#include "stdafx.h"
using namespace Transport;
ServerAuthHeader::ServerAuthHeader() : willSendConnectionAuthStatus_(false)
{
}
ServerAuthHeader::ServerAuthHeader(bool willSendConnectionAuthStatus) : willSendConnectionAuthStatus_(willSendConnectionAuthStatus)
{
}
bool ServerAuthHeader::WillSendConnectionAuthStatus() const
{
return willSendConnectionAuthStatus_;
}
void ServerAuthHeader::WriteTo(Common::TextWriter & w, Common::FormatOptions const &) const
{
w << "willSendConnectionAuthStatus=" << willSendConnectionAuthStatus_;
if (metadata_)
{
w << ",metadata=" << *metadata_;
}
else
{
w << ",metadata=none";
}
}
| 278 |
1,444 |
package mage.server.game;
import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInput;
import java.util.UUID;
import java.util.zip.GZIPInputStream;
import mage.game.Game;
import mage.game.GameState;
import mage.game.GameStates;
import mage.server.Main;
import mage.util.CopierObjectInputStream;
import mage.utils.StreamUtils;
import org.apache.log4j.Logger;
/**
*
* @author <EMAIL>
*/
public class GameReplay {
private static final Logger logger = Logger.getLogger(GameReplay.class);
private final GameStates savedGame;
private final Game game;
private int stateIndex;
public GameReplay(UUID gameId) {
this.game = loadGame(gameId);
this.savedGame = game.getGameStates();
}
public void start() {
this.stateIndex = 0;
}
public GameState next() {
if (this.stateIndex < savedGame.getSize()) {
return savedGame.get(stateIndex++);
}
return null;
}
public GameState previous() {
if (this.stateIndex > 0) {
return savedGame.get(--stateIndex);
}
return null;
}
public Game getGame() {
return this.game;
}
private Game loadGame(UUID gameId) {
InputStream file = null;
InputStream buffer = null;
InputStream gzip = null;
ObjectInput input = null;
try{
file = new FileInputStream("saved/" + gameId.toString() + ".game");
buffer = new BufferedInputStream(file);
gzip = new GZIPInputStream(buffer);
input = new CopierObjectInputStream(Main.classLoader, gzip);
Game loadGame = (Game) input.readObject();
GameStates states = (GameStates) input.readObject();
loadGame.loadGameStates(states);
return loadGame;
}
catch(ClassNotFoundException ex) {
logger.fatal("Cannot load game. Class not found.", ex);
}
catch(IOException ex) {
logger.fatal("Cannot load game:" + gameId, ex);
} finally {
StreamUtils.closeQuietly(file);
StreamUtils.closeQuietly(buffer);
StreamUtils.closeQuietly(input);
StreamUtils.closeQuietly(gzip);
}
return null;
}
}
| 1,010 |
787 | // OJ: https://leetcode.com/problems/string-matching-in-an-array/
// Author: github.com/lzl124631x
// Time: O(N^2 * D)
// Space: O(1)
class Solution {
public:
vector<string> stringMatching(vector<string>& words) {
vector<string> ans;
int N = words.size();
for (int i = 0; i < N; ++i) {
for (int j = 0; j < N; ++j) {
if (i == j) continue;
if (words[j].find(words[i]) != string::npos) {
ans.push_back(words[i]);
break;
}
}
}
return ans;
}
}; | 334 |
2,594 | /*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package com.android.tools.build.bundletool.device;
import static com.android.ddmlib.Log.LogLevel.ERROR;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.ImmutableList.toImmutableList;
import com.android.ddmlib.AdbInitOptions;
import com.android.ddmlib.AndroidDebugBridge;
import com.android.ddmlib.Log;
import com.android.ddmlib.Log.ILogOutput;
import com.android.ddmlib.Log.LogLevel;
import com.android.tools.build.bundletool.model.exceptions.CommandExecutionException;
import com.google.common.collect.ImmutableList;
import com.google.errorprone.annotations.concurrent.GuardedBy;
import java.nio.file.Path;
import java.util.Arrays;
import javax.annotation.Nullable;
/**
* Ddmlib-backed implementation of the {@link AdbServer}.
*
* <p>This implementation doesn't support swapping the underlying ADB server. Instead it assumes the
* instance always uses the ADB under the path that was used for initialization.
*/
public class DdmlibAdbServer extends AdbServer {
private static final DdmlibAdbServer instance = new DdmlibAdbServer();
@Nullable private AndroidDebugBridge adb;
@GuardedBy("this")
private State state = State.UNINITIALIZED;
private Path pathToAdb;
private enum State {
UNINITIALIZED,
INITIALIZED,
CLOSED
};
private DdmlibAdbServer() {}
public static DdmlibAdbServer getInstance() {
return instance;
}
/**
* Initializes ADB server, optionally restarting it if it points at a different location.
*
* <p>Can be called multiple times.
*
* @param pathToAdb location of the ADB server to start.
*/
@Override
public synchronized void init(Path pathToAdb) {
checkState(state != State.CLOSED, "Android Debug Bridge has been closed.");
if (state.equals(State.INITIALIZED)) {
checkState(
pathToAdb.equals(this.pathToAdb),
"Re-initializing DdmlibAdbServer with a different ADB path. Expected: '%s', got '%s'.",
this.pathToAdb,
pathToAdb);
return;
}
AdbInitOptions.Builder builder =
AdbInitOptions.builder().setClientSupportEnabled(false).useJdwpProxyService(false);
String env = System.getenv("ANDROID_ADB_SERVER_PORT");
if (env != null) {
try {
builder.enableUserManagedAdbMode(Integer.decode(env));
} catch (NumberFormatException e) {
throw CommandExecutionException.builder()
.withCause(e)
.withInternalMessage(
"Failed to parse ANDROID_ADB_SERVER_PORT environment variable (%s)", env)
.build();
}
}
Log.addLogger(new FilteredLogOutput());
AndroidDebugBridge.init(builder.build());
this.adb = AndroidDebugBridge.createBridge(pathToAdb.toString(), /* forceNewBridge= */ false);
if (adb == null) {
throw CommandExecutionException.builder()
.withInternalMessage("Failed to start ADB server.")
.build();
}
this.pathToAdb = pathToAdb;
this.state = State.INITIALIZED;
}
@Override
public synchronized ImmutableList<Device> getDevicesInternal() {
checkState(state.equals(State.INITIALIZED), "Android Debug Bridge is not initialized.");
return Arrays.stream(adb.getDevices()).map(DdmlibDevice::new).collect(toImmutableList());
}
@Override
public synchronized boolean hasInitialDeviceList() {
checkState(state.equals(State.INITIALIZED), "Android Debug Bridge is not initialized.");
return adb.hasInitialDeviceList();
}
@Override
public synchronized void close() {
if (state.equals(State.INITIALIZED)) {
AndroidDebugBridge.terminate();
}
state = State.CLOSED;
}
private static class FilteredLogOutput implements ILogOutput {
@Override
public void printLog(LogLevel logLevel, String tag, String message) {
if (!shouldIgnore(logLevel, message)) {
Log.printLog(logLevel, tag, message);
}
}
@Override
public void printAndPromptLog(LogLevel logLevel, String tag, String message) {
if (!shouldIgnore(logLevel, message)) {
Log.printLog(logLevel, tag, message);
}
}
private boolean shouldIgnore(LogLevel logLevel, String message) {
// Ignore ddmlib 'com.android.ddmlib.CommandFailedException: bad sub-command' log which
// is shown every time on emulators with version < 30.0.18 but doesn't affect the
// functionality.
// This exception is thrown because 'adb emu avd path' command was added only in version
// 30.0.18.
return ERROR.equals(logLevel)
&& message.contains("bad sub-command")
&& message.contains("EmulatorConsole.getAvdPath");
}
}
}
| 1,853 |
Subsets and Splits