max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
1,118 | <filename>src/traildb.h
#ifndef __TRAILDB_H__
#define __TRAILDB_H__
#include <stdlib.h>
#include <stdint.h>
#include "tdb_limits.h"
#include "tdb_types.h"
#include "tdb_error.h"
#define TDB_VERSION_V0 0LLU
#define TDB_VERSION_V0_1 1LLU
#define TDB_VERSION_LATEST TDB_VERSION_V0_1
/*
-----------------------
Construct a new TrailDB
-----------------------
*/
/* Init a new constructor handle */
tdb_cons *tdb_cons_init(void);
/* Open a new constructor with a schema */
tdb_error tdb_cons_open(tdb_cons *cons,
const char *root,
const char **ofield_names,
uint64_t num_ofields);
/* Close a constructor handle */
void tdb_cons_close(tdb_cons *cons);
/* Set constructor options */
tdb_error tdb_cons_set_opt(tdb_cons *cons,
tdb_opt_key key,
tdb_opt_value value);
/* Get constructor options */
tdb_error tdb_cons_get_opt(tdb_cons *cons,
tdb_opt_key key,
tdb_opt_value *value);
/* Add an event in the constructor */
tdb_error tdb_cons_add(tdb_cons *cons,
const uint8_t uuid[16],
const uint64_t timestamp,
const char **values,
const uint64_t *value_lengths);
/* Merge an existing TrailDB to this constructor */
tdb_error tdb_cons_append(tdb_cons *cons, const tdb *db);
/* Finalize a constructor */
tdb_error tdb_cons_finalize(tdb_cons *cons);
/*
---------------------------------
Open TrailDBs and access metadata
---------------------------------
*/
/* Init a new TrailDB handle */
tdb *tdb_init(void);
/* Open a TrailDB */
tdb_error tdb_open(tdb *db, const char *root);
/* Close a TrailDB */
void tdb_close(tdb *db);
/* Inform the operating system that memory can be paged for this TrailDB */
void tdb_dontneed(const tdb *db);
/* Inform the operating system that this TrailDB will be needed soon */
void tdb_willneed(const tdb *db);
/* Get the number of trails */
uint64_t tdb_num_trails(const tdb *db);
/* Get the number of events */
uint64_t tdb_num_events(const tdb *db);
/* Get the number of fields */
uint64_t tdb_num_fields(const tdb *db);
/* Get the oldest timestamp */
uint64_t tdb_min_timestamp(const tdb *db);
/* Get the newest timestamp */
uint64_t tdb_max_timestamp(const tdb *db);
/* Get the version of this TrailDB */
uint64_t tdb_version(const tdb *db);
/* Translate an error code to a string */
const char *tdb_error_str(tdb_error errcode);
/* Set a top-level option */
tdb_error tdb_set_opt(tdb *db, tdb_opt_key key, tdb_opt_value value);
/* Get a top-level option */
tdb_error tdb_get_opt(tdb *db, tdb_opt_key key, tdb_opt_value *value);
/* Set a trail-level option */
tdb_error tdb_set_trail_opt(tdb *db,
uint64_t trail_id,
tdb_opt_key key,
tdb_opt_value value);
/* Get a trail-level option */
tdb_error tdb_get_trail_opt(tdb *db,
uint64_t trail_id,
tdb_opt_key key,
tdb_opt_value *value);
/*
----------------------------------
Translate items to values and back
----------------------------------
*/
/* Get the number of distinct values in the given field */
uint64_t tdb_lexicon_size(const tdb *db, tdb_field field);
/* Get the field ID given a field name */
tdb_error tdb_get_field(const tdb *db,
const char *field_name,
tdb_field *field);
/* Get the field name given a field ID */
const char *tdb_get_field_name(const tdb *db, tdb_field field);
/* Get item corresponding to a value */
tdb_item tdb_get_item(const tdb *db,
tdb_field field,
const char *value,
uint64_t value_length);
/* Get value corresponding to a field, value ID pair */
const char *tdb_get_value(const tdb *db,
tdb_field field,
tdb_val val,
uint64_t *value_length);
/* Get value given an item */
const char *tdb_get_item_value(const tdb *db,
tdb_item item,
uint64_t *value_length);
/*
------------
Handle UUIDs
------------
*/
/* Get UUID given a Trail ID */
const uint8_t *tdb_get_uuid(const tdb *db, uint64_t trail_id);
/* Get Trail ID given a UUID */
tdb_error tdb_get_trail_id(const tdb *db,
const uint8_t uuid[16],
uint64_t *trail_id);
/* Translate a hex-encoded UUID to a raw 16-byte UUID */
tdb_error tdb_uuid_raw(const uint8_t hexuuid[32], uint8_t uuid[16]);
/* Translate a raw 16-byte UUID to a hex-encoded UUID */
void tdb_uuid_hex(const uint8_t uuid[16], uint8_t hexuuid[32]);
/*
------------
Event filter
------------
*/
/* Create a new event filter */
struct tdb_event_filter *tdb_event_filter_new(void);
/* Create a new event filter that matches all events */
struct tdb_event_filter *tdb_event_filter_new_match_all(void);
/* Create a new event filter that matches nothing */
struct tdb_event_filter *tdb_event_filter_new_match_none(void);
/* Add a new term (item) in an OR-clause */
tdb_error tdb_event_filter_add_term(struct tdb_event_filter *filter,
tdb_item term,
int is_negative);
/* Add a timestamp range query (start_time <= timestamp < end_time) in an OR-clause */
tdb_error tdb_event_filter_add_time_range(struct tdb_event_filter *filter,
uint64_t start_time,
uint64_t end_time);
/* Add a new clause, connected by AND to the previous clauses */
tdb_error tdb_event_filter_new_clause(struct tdb_event_filter *filter);
/* Free an event filter */
void tdb_event_filter_free(struct tdb_event_filter *filter);
/* Get term type for a term in a clause */
tdb_error tdb_event_filter_get_term_type(const struct tdb_event_filter *filter,
uint64_t clause_index,
uint64_t term_index,
tdb_event_filter_term_type *term_type);
/* Get an item in a clause */
tdb_error tdb_event_filter_get_item(const struct tdb_event_filter *filter,
uint64_t clause_index,
uint64_t item_index,
tdb_item *item,
int *is_negative);
/* Get time-range term in a clause */
tdb_error tdb_event_filter_get_time_range(const struct tdb_event_filter *filter,
uint64_t clause_index,
uint64_t term_index,
uint64_t *start_time,
uint64_t *end_time);
/* Get the number of clauses in this filter */
uint64_t tdb_event_filter_num_clauses(const struct tdb_event_filter *filter);
/* Get the number of terms in a clause */
tdb_error tdb_event_filter_num_terms(const struct tdb_event_filter *filter,
uint64_t clause_index,
uint64_t *num_terms);
/*
------------
Trail cursor
------------
*/
/* Create a new cursor */
tdb_cursor *tdb_cursor_new(const tdb *db);
/* Free a cursor */
void tdb_cursor_free(tdb_cursor *cursor);
/* Reset the cursor to the given Trail ID */
tdb_error tdb_get_trail(tdb_cursor *cursor, uint64_t trail_id);
/* Get the number of events remaining in this cursor */
uint64_t tdb_get_trail_length(tdb_cursor *cursor);
/* Set an event filter for this cursor */
tdb_error tdb_cursor_set_event_filter(tdb_cursor *cursor,
const struct tdb_event_filter *filter);
/* Unset an event filter */
void tdb_cursor_unset_event_filter(tdb_cursor *cursor);
/* Internal function used by tdb_cursor_next() */
int _tdb_cursor_next_batch(tdb_cursor *cursor);
/*
------------
Multi cursor
------------
*/
/* Create a new multicursor */
tdb_multi_cursor *tdb_multi_cursor_new(tdb_cursor **cursors,
uint64_t num_cursors);
/*
Reset the multicursor to reflect the underlying status of individual
cursors. Call after tdb_get_trail() or tdb_cursor_next()
*/
void tdb_multi_cursor_reset(tdb_multi_cursor *mc);
/* Return next event in the timestamp order from the underlying cursors */
const tdb_multi_event *tdb_multi_cursor_next(tdb_multi_cursor *mcursor);
/*
Return a batch of maximum max_events in the timestamp order from the
underlying cursors
*/
uint64_t tdb_multi_cursor_next_batch(tdb_multi_cursor *mcursor,
tdb_multi_event *events,
uint64_t max_events);
/* Peek the next event in the cursor */
const tdb_multi_event *tdb_multi_cursor_peek(tdb_multi_cursor *mcursor);
/* Free multicursors */
void tdb_multi_cursor_free(tdb_multi_cursor *mcursor);
/*
Return the next event from the cursor
tdb_cursor_next() is defined here so it can be inlined
the pragma is a workaround for older GCCs that have this issue:
https://gcc.gnu.org/bugzilla/show_bug.cgi?id=54113
*/
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wmissing-prototypes"
__attribute__((visibility("default"))) inline const tdb_event *tdb_cursor_next(tdb_cursor *cursor)
{
if (cursor->num_events_left > 0 || _tdb_cursor_next_batch(cursor)){
const tdb_event *e = (const tdb_event*)cursor->next_event;
cursor->next_event += sizeof(tdb_event) +
e->num_items * sizeof(tdb_item);
--cursor->num_events_left;
return e;
}else
return NULL;
}
/*
Peek the next event in the cursor
*/
__attribute__((visibility("default"))) inline const tdb_event *tdb_cursor_peek(tdb_cursor *cursor)
{
if (cursor->num_events_left > 0 || _tdb_cursor_next_batch(cursor)){
return (const tdb_event*)cursor->next_event;
}else
return NULL;
}
#pragma GCC diagnostic pop
#endif /* __TRAILDB_H__ */
| 4,726 |
2,498 | /*
* phase3.h
*
* Created on: May 30, 2021
* Author: mad
*/
#ifndef INCLUDE_CHIA_PHASE3_H_
#define INCLUDE_CHIA_PHASE3_H_
#include <chia/phase2.h>
namespace phase3 {
struct entry_kpp {
uintkx_t pos[2]; // 2x 32-bit position / 2x 35-bit
uintkx_t key; // 32-bit (sort_key) / 35 bit
};
struct entry_lp {
uintlp_t point; // 63-bit (line_point) / 67 bit
uintkx_t key; // 32-bit (sort_key) / 35 bit
#ifdef CHIA_K34
static constexpr size_t disk_size = 13;
size_t read(const uint8_t* buf) {
memcpy(&point, buf, 9);
point &= (uint128_t(1) << 68) - 1; // 68 bit
key = 0;
memcpy(&key, buf + 8, 5);
key >>= 4; // 36 bit
return disk_size;
}
size_t write(uint8_t* buf) const {
memcpy(buf, &point, 9);
const auto tmp = (key << 4) | buf[8];
memcpy(buf + 8, &tmp, 5);
return disk_size;
}
#else
static constexpr size_t disk_size = 12;
size_t read(const uint8_t* buf) {
memcpy(&point, buf, 8);
memcpy(&key, buf + 8, 4);
return disk_size;
}
size_t write(uint8_t* buf) const {
memcpy(buf, &point, 8);
memcpy(buf + 8, &key, 4);
return disk_size;
}
#endif
};
struct entry_np {
uintkx_t key; // 32-bit (sort_key) / 35 bit
uintkx_t pos; // 32-bit (new_pos) / 35 bit
#ifdef CHIA_K34
static constexpr size_t disk_size = 9;
size_t read(const uint8_t* buf) {
memcpy(&key, buf, 5);
key &= 0xFFFFFFFFF; // 36 bit
pos = 0;
memcpy(&pos, buf + 4, 5);
pos >>= 4; // 36 bit
return disk_size;
}
size_t write(uint8_t* buf) const {
memcpy(buf, &key, 5);
const auto tmp = (pos << 4) | buf[4];
memcpy(buf + 4, &tmp, 5);
return disk_size;
}
#else
static constexpr size_t disk_size = 8;
size_t read(const uint8_t* buf) {
memcpy(&key, buf, 4);
memcpy(&pos, buf + 4, 4);
return disk_size;
}
size_t write(uint8_t* buf) const {
memcpy(buf, &key, 4);
memcpy(buf + 4, &pos, 4);
return disk_size;
}
#endif
};
template<typename T>
struct get_new_pos {
uint64_t operator()(const T& entry) {
return entry.pos;
}
};
template<>
struct get_new_pos<phase2::entry_1> {
uint64_t operator()(const phase2::entry_1& entry) {
return entry.x;
}
};
template<typename T>
struct get_sort_key {
uint64_t operator()(const T& entry) {
return entry.key;
}
};
template<>
struct get_sort_key<phase2::entry_7> {
uint64_t operator()(const phase2::entry_7& entry) {
return entry.y;
}
};
template<typename T>
struct get_line_point {
uint128_t operator()(const T& entry) {
return entry.point;
}
};
typedef DiskSort<entry_lp, get_line_point<entry_lp>> DiskSortLP;
typedef DiskSort<entry_np, get_sort_key<entry_np>> DiskSortNP;
struct output_t {
int header_size = 0;
uint64_t num_written_7 = 0;
uint64_t final_pointer_7 = 0;
phase1::input_t params;
std::string plot_file_name;
std::shared_ptr<DiskSortNP> sort_7;
};
} // phase3
#endif /* INCLUDE_CHIA_PHASE3_H_ */
| 1,333 |
926 | import unittest
import paddle
class StaticCase(unittest.TestCase):
def setUp(self):
# switch mode
paddle.enable_static()
| 56 |
1,781 | package com.marshalchen.common.demoofui.sampleModules;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.widget.ImageView;
import butterknife.ButterKnife;
import butterknife.InjectView;
import com.marshalchen.common.commonUtils.basicUtils.HandlerUtils;
import com.marshalchen.common.ui.HomeasUpActionbarActivity;
import com.marshalchen.common.uimodule.photoview.PhotoViewAttacher;
import com.marshalchen.common.demoofui.R;
/**
* Created by cym on 14-6-26.
*/
public class PhotoViewActivity extends HomeasUpActionbarActivity {
// @InjectView(R.id.photoView)
// PhotoView photoView;
@InjectView(R.id.photoViewImageView)
ImageView photoViewImageView;
PhotoViewAttacher photoViewAttacher;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.photo_view_activity);
ButterKnife.inject(this);
// Set the Drawable displayed
Drawable bitmap = getResources().getDrawable(R.drawable.test_back);
photoViewImageView.setImageDrawable(bitmap);
// Attach a PhotoViewAttacher, which takes care of all of the zooming functionality.
photoViewAttacher = new PhotoViewAttacher(photoViewImageView);
HandlerUtils.sendMessageHandlerDelay(changeImageHandler, 0, 2000);
}
Handler changeImageHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
photoViewImageView.setImageResource(R.drawable.test);
// If you later call mImageView.setImageDrawable/setImageBitmap/setImageResource/etc then you just need to call
photoViewAttacher.update();
}
};
}
| 665 |
777 | // Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/memory/memory_kills_monitor.h"
#include <errno.h>
#include <fcntl.h>
#include <inttypes.h>
#include <stdio.h>
#include <string>
#include <vector>
#include "base/bind.h"
#include "base/command_line.h"
#include "base/debug/leak_annotations.h"
#include "base/files/file_util.h"
#include "base/files/scoped_file.h"
#include "base/lazy_instance.h"
#include "base/location.h"
#include "base/logging.h"
#include "base/memory/ptr_util.h"
#include "base/metrics/histogram_macros.h"
#include "base/posix/safe_strerror.h"
#include "base/sequenced_task_runner.h"
#include "base/strings/string_number_conversions.h"
#include "base/strings/string_split.h"
#include "base/synchronization/atomic_flag.h"
#include "base/time/time.h"
#include "chrome/browser/memory/memory_kills_histogram.h"
#include "third_party/re2/src/re2/re2.h"
namespace memory {
using base::SequencedWorkerPool;
using base::TimeDelta;
namespace {
int64_t GetTimestamp(const std::string& line) {
std::vector<std::string> fields = base::SplitString(
line, ",", base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
int64_t timestamp = -1;
// Timestamp is the third field in a line of /dev/kmsg.
if (fields.size() < 3 || !base::StringToInt64(fields[2], ×tamp))
return -1;
return timestamp;
}
void LogEvent(const base::Time& time_stamp, const std::string& event) {
VLOG(1) << time_stamp.ToJavaTime() << ", " << event;
}
void LogOOMKill(int64_t time_stamp, int oom_badness) {
static int64_t last_kill_time = -1;
static int oom_kills = 0;
// Ideally the timestamp should be parsed from /dev/kmsg, but the timestamp
// there is the elapsed time since system boot. So the timestamp |now| used
// here is a bit delayed.
base::Time now = base::Time::Now();
LogEvent(now, "OOM_KILL");
++oom_kills;
// Report the cumulative count of killed process in one login session.
// For example if there are 3 processes killed, it would report 1 for the
// first kill, 2 for the second kill, then 3 for the final kill.
// It doesn't report a final count at the end of a user session because
// the code runs in a dedicated thread and never ends until browser shutdown
// (or logout on Chrome OS). And on browser shutdown the thread may be
// terminated brutally so there's no chance to execute a "final" block.
// More specifically, code outside the main loop of MemoryKillsMonitor::Run()
// are not guaranteed to be executed.
UMA_HISTOGRAM_CUSTOM_COUNTS("Arc.OOMKills.Count", oom_kills, 1, 1000, 1001);
// In practice most process has oom_badness < 1000, but
// strictly speaking the number could be [1, 2000]. What it really
// means is the baseline, proportion of memory used (normalized to
// [0, 1000]), plus an adjustment score oom_score_adj [-1000, 1000],
// truncated to 1 if negative (0 means never kill).
// Ref: https://lwn.net/Articles/396552/
UMA_HISTOGRAM_CUSTOM_COUNTS("Arc.OOMKills.Score", oom_badness, 1, 2000, 2001);
if (time_stamp > 0) {
// Sets to |kMaxMemoryKillTimeDelta| for the first kill event.
const TimeDelta time_delta =
last_kill_time < 0 ? kMaxMemoryKillTimeDelta:
TimeDelta::FromMicroseconds(time_stamp - last_kill_time);
last_kill_time = time_stamp;
UMA_HISTOGRAM_MEMORY_KILL_TIME_INTERVAL(
"Arc.OOMKills.TimeDelta", time_delta);
}
}
} // namespace
MemoryKillsMonitor::Handle::Handle(MemoryKillsMonitor* outer) : outer_(outer) {
DCHECK(outer_);
}
MemoryKillsMonitor::Handle::Handle(MemoryKillsMonitor::Handle&& other)
: outer_(nullptr) {
outer_ = other.outer_;
other.outer_ = nullptr;
}
MemoryKillsMonitor::Handle::~Handle() {
if (outer_) {
VLOG(2) << "Chrome is shutting down" << outer_;
outer_->is_shutting_down_.Set();
}
}
MemoryKillsMonitor::MemoryKillsMonitor() {
base::SimpleThread::Options non_joinable_options;
non_joinable_options.joinable = false;
non_joinable_worker_thread_ = base::MakeUnique<base::DelegateSimpleThread>(
this, "memory_kills_monitor", non_joinable_options);
non_joinable_worker_thread_->Start();
}
MemoryKillsMonitor::~MemoryKillsMonitor() {
// The instance has to be leaked on shutdown as it is referred to by a
// non-joinable thread but ~MemoryKillsMonitor() can't be explicitly deleted
// as it overrides ~SimpleThread(), it should nevertheless never be invoked.
NOTREACHED();
}
// static
MemoryKillsMonitor::Handle MemoryKillsMonitor::StartMonitoring() {
#if DCHECK_IS_ON()
static volatile bool monitoring_active = false;
DCHECK(!monitoring_active);
monitoring_active = true;
#endif
// Instantiate the MemoryKillsMonitor and its underlying thread. The
// MemoryKillsMonitor itself has to be leaked on shutdown per having a
// non-joinable thread associated to its state. The MemoryKillsMonitor::Handle
// will notify the MemoryKillsMonitor when it is destroyed so that the
// underlying thread can at a minimum not do extra work during shutdown.
MemoryKillsMonitor* instance = new MemoryKillsMonitor();
ANNOTATE_LEAKING_OBJECT_PTR(instance);
return Handle(instance);
}
// static
void MemoryKillsMonitor::LogLowMemoryKill(
const std::string& type, int estimated_freed_kb) {
static base::Time last_kill_time;
static int low_memory_kills = 0;
base::Time now = base::Time::Now();
LogEvent(now, "LOW_MEMORY_KILL_" + type);
const TimeDelta time_delta =
last_kill_time.is_null() ?
kMaxMemoryKillTimeDelta :
(now - last_kill_time);
UMA_HISTOGRAM_MEMORY_KILL_TIME_INTERVAL(
"Arc.LowMemoryKiller.TimeDelta", time_delta);
last_kill_time = now;
++low_memory_kills;
UMA_HISTOGRAM_CUSTOM_COUNTS(
"Arc.LowMemoryKiller.Count", low_memory_kills, 1, 1000, 1001);
UMA_HISTOGRAM_MEMORY_KB("Arc.LowMemoryKiller.FreedSize",
estimated_freed_kb);
}
// static
void MemoryKillsMonitor::TryMatchOomKillLine(const std::string& line) {
// Sample OOM log line:
// 3,1362,97646497541,-;Out of memory: Kill process 29582 (android.vending)
// score 961 or sacrifice child.
int oom_badness;
TimeDelta time_delta;
if (RE2::PartialMatch(line,
"Out of memory: Kill process .* score (\\d+)",
&oom_badness)) {
int64_t time_stamp = GetTimestamp(line);
LogOOMKill(time_stamp, oom_badness);
}
}
void MemoryKillsMonitor::Run() {
VLOG(1) << "MemoryKillsMonitor started";
base::ScopedFILE kmsg_handle(
base::OpenFile(base::FilePath("/dev/kmsg"), "r"));
if (!kmsg_handle) {
LOG(WARNING) << "Open /dev/kmsg failed: " << base::safe_strerror(errno);
return;
}
// Skip kernel messages prior to the instantiation of this object to avoid
// double reporting.
fseek(kmsg_handle.get(), 0, SEEK_END);
static constexpr int kMaxBufSize = 512;
char buf[kMaxBufSize];
while (fgets(buf, kMaxBufSize, kmsg_handle.get())) {
if (is_shutting_down_.IsSet()) {
// Not guaranteed to execute when the process is shutting down,
// because the thread might be blocked in fgets().
VLOG(1) << "Chrome is shutting down, MemoryKillsMonitor exits.";
break;
}
TryMatchOomKillLine(buf);
}
}
} // namespace memory
| 2,630 |
372 | <filename>clients/google-api-services-cloudbilling/v1/1.30.1/com/google/api/services/cloudbilling/model/Category.java
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.cloudbilling.model;
/**
* Represents the category hierarchy of a SKU.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Billing API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Category extends com.google.api.client.json.GenericJson {
/**
* The type of product the SKU refers to. Example: "Compute", "Storage", "Network",
* "ApplicationServices" etc.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String resourceFamily;
/**
* A group classification for related SKUs. Example: "RAM", "GPU", "Prediction", "Ops",
* "GoogleEgress" etc.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String resourceGroup;
/**
* The display name of the service this SKU belongs to.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String serviceDisplayName;
/**
* Represents how the SKU is consumed. Example: "OnDemand", "Preemptible", "Commit1Mo",
* "Commit1Yr" etc.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String usageType;
/**
* The type of product the SKU refers to. Example: "Compute", "Storage", "Network",
* "ApplicationServices" etc.
* @return value or {@code null} for none
*/
public java.lang.String getResourceFamily() {
return resourceFamily;
}
/**
* The type of product the SKU refers to. Example: "Compute", "Storage", "Network",
* "ApplicationServices" etc.
* @param resourceFamily resourceFamily or {@code null} for none
*/
public Category setResourceFamily(java.lang.String resourceFamily) {
this.resourceFamily = resourceFamily;
return this;
}
/**
* A group classification for related SKUs. Example: "RAM", "GPU", "Prediction", "Ops",
* "GoogleEgress" etc.
* @return value or {@code null} for none
*/
public java.lang.String getResourceGroup() {
return resourceGroup;
}
/**
* A group classification for related SKUs. Example: "RAM", "GPU", "Prediction", "Ops",
* "GoogleEgress" etc.
* @param resourceGroup resourceGroup or {@code null} for none
*/
public Category setResourceGroup(java.lang.String resourceGroup) {
this.resourceGroup = resourceGroup;
return this;
}
/**
* The display name of the service this SKU belongs to.
* @return value or {@code null} for none
*/
public java.lang.String getServiceDisplayName() {
return serviceDisplayName;
}
/**
* The display name of the service this SKU belongs to.
* @param serviceDisplayName serviceDisplayName or {@code null} for none
*/
public Category setServiceDisplayName(java.lang.String serviceDisplayName) {
this.serviceDisplayName = serviceDisplayName;
return this;
}
/**
* Represents how the SKU is consumed. Example: "OnDemand", "Preemptible", "Commit1Mo",
* "Commit1Yr" etc.
* @return value or {@code null} for none
*/
public java.lang.String getUsageType() {
return usageType;
}
/**
* Represents how the SKU is consumed. Example: "OnDemand", "Preemptible", "Commit1Mo",
* "Commit1Yr" etc.
* @param usageType usageType or {@code null} for none
*/
public Category setUsageType(java.lang.String usageType) {
this.usageType = usageType;
return this;
}
@Override
public Category set(String fieldName, Object value) {
return (Category) super.set(fieldName, value);
}
@Override
public Category clone() {
return (Category) super.clone();
}
}
| 1,505 |
1,045 | /***************************************************************************************************
Tencent is pleased to support the open source community by making RapidView available.
Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MITLicense (the "License"); you may not use this file except in compliance
withthe License. You mayobtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is
distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing permissions and limitations under the
License.
***************************************************************************************************/
package com.tencent.rapidview;
/**
* @Class RapidVersion
* @Desc 光子皮肤引擎版本号,非常重要,修改引擎一定要增加版本号!
* 由于CMS限制,GRAY_ID禁止使用空格
*
* @author arlozhang
* @date 2016.04.21
*/
public class RapidVersion {
/**RAPID_ENGINE_VERSION
* 光子皮肤引擎版本号,当更新了在XML中有体现的功能时,如增加XML参数,增加ACTION等,都需要增加引擎版本号,以确
* 保CMS下发皮肤文件时,不会把含有不支持的功能的新XML配置发到老的版本中,引起不能解析的异常。在配置XML时,
* 需要在CMS中指定该文件最低支持的光子皮肤引擎版本号,即为功能开发后,引擎版本号增加后的那一版版本号。**/
public final static int RAPID_ENGINE_VERSION = 1;
/**RAPID_GRAY_ID
* 灰度标识,当有多个团队同时开发皮肤引擎时。可能同时存在相同的引擎版本对应不同的功能。例如,主干的光子皮肤
* 引擎版本号为7,两个团队基于主干拉了两个svn分支,A团队开发了XXX功能,增加引擎版本号,为8。发布灰度。B团队开
* 发了YYY功能,增加引擎版本号也为8,发布灰度。此时两个8版本对应于不同的功能。
*
* 为了解决这个问题,出现了灰度标识。当发布灰度时,无需增加引擎版本号,只需要配置唯一灰度标识。如A团队可以配
* 置a_gray_xxx。B团队可以配置b_gray_yyy。此时。在CMS配置视图时,同时配置灰度版唯一灰度标识后。本地针对
* 同名视图会优先拉取基于当前「引擎版本」的配置了「灰度标识」的视图。当功能合入主干后,删除灰度标识,增加引擎
* 版本号,此时发布的主线包为单线程发布,就不会出现这个问题了。**/
public final static String RAPID_GRAY_ID = "";
}
| 1,448 |
802 | package io.github.biezhi.wechat.api.model;
import com.google.gson.annotations.SerializedName;
import lombok.Data;
import java.io.Serializable;
/**
* KeyItem
*
* @author biezhi
* @date 2018/1/19
*/
@Data
public class KeyItem implements Serializable {
@SerializedName("Key")
private Integer key;
@SerializedName("Val")
private Integer val;
}
| 132 |
317 | <filename>include/edyn/serialization/shape/cylinder_shape_s11n.hpp
#ifndef EDYN_SERIALIZATION_SHAE_CYLINDER_SHAPE_S11N_HPP
#define EDYN_SERIALIZATION_SHAE_CYLINDER_SHAPE_S11N_HPP
#include "edyn/shapes/cylinder_shape.hpp"
namespace edyn {
template<typename Archive>
void serialize(Archive &archive, cylinder_shape &s) {
archive(s.half_length);
archive(s.radius);
}
}
#endif // EDYN_SERIALIZATION_SHAE_CYLINDER_SHAPE_S11N_HPP | 191 |
831 | /*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.tools.idea.tests.gui.uibuilder;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertEquals;
import com.android.tools.idea.tests.gui.framework.GuiTestRule;
import com.android.tools.idea.tests.gui.framework.fixture.CreateResourceFileDialogFixture;
import com.android.tools.idea.tests.gui.framework.fixture.EditorFixture.Tab;
import com.android.tools.idea.tests.gui.framework.fixture.IdeFrameFixture;
import com.intellij.testGuiFramework.framework.GuiTestRemoteRunner;
import org.intellij.lang.annotations.Language;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(GuiTestRemoteRunner.class)
public final class GridLayoutTest {
@Rule
public final GuiTestRule myGuiTest = new GuiTestRule();
@Rule
public final RenderTaskLeakCheckRule renderTaskLeakCheckRule = new RenderTaskLeakCheckRule();
@Test
public void dragViewIntoEmptyGridLayout() throws Exception {
IdeFrameFixture frame = myGuiTest.importSimpleApplication();
frame.getProjectView().selectAndroidPane().clickPath("app");
frame.openFromMenu(CreateResourceFileDialogFixture::find, "File", "New", "Android Resource File")
.setFilename("gridlayout")
.setType("layout")
.setRootElement("GridLayout")
.clickOk()
.getEditor()
.getLayoutEditor()
.waitForRenderToFinish()
.showOnlyDesignView()
.dragComponentToSurface("Text", "TextView");
@Language("XML")
String expected = "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
"<GridLayout xmlns:android=\"http://schemas.android.com/apk/res/android\"\n" +
" android:layout_width=\"match_parent\" android:layout_height=\"match_parent\">\n" +
"\n" +
" <TextView\n" +
" android:id=\"@+id/textView\"\n" +
" android:layout_width=\"wrap_content\"\n" +
" android:layout_height=\"wrap_content\"\n" +
" android:layout_row=\"0\"\n" +
" android:layout_column=\"0\"\n" +
" android:text=\"TextView\" />\n" +
"</GridLayout>";
String contents = frame.getEditor().open("app/src/main/res/layout/gridlayout.xml", Tab.EDITOR).getCurrentFileContents();
assertThat(contents).isEqualTo(expected);
}
}
| 1,202 |
332 | from django.db import models
class Play(models.Model):
genre = models.CharField(max_length=100)
title = models.CharField(max_length=200)
year = models.IntegerField()
author = models.ForeignKey('tests.Author', related_name='plays', on_delete=models.CASCADE)
class Poem(models.Model):
title = models.CharField(max_length=200)
style = models.CharField(max_length=100)
author = models.ForeignKey('tests.Author', related_name='poems', on_delete=models.CASCADE)
class Author(models.Model):
name = models.CharField(max_length=100)
| 195 |
678 | <filename>WeChat-Headers/WCPayOrderProductInfo.h<gh_stars>100-1000
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>.
//
#import "NSObject.h"
@class NSArray, NSString;
@interface WCPayOrderProductInfo : NSObject
{
NSString *m_nsProductId;
NSString *m_nsProductName;
NSString *m_nsProductImage;
unsigned int m_uiProductCount;
NSString *m_nsProductMoney;
NSArray *m_arrSKUInfo;
NSString *m_nsJumpUrl;
unsigned int m_uiJumpType;
}
@property(retain, nonatomic) NSString *m_nsJumpUrl; // @synthesize m_nsJumpUrl;
@property(nonatomic) unsigned int m_uiJumpType; // @synthesize m_uiJumpType;
@property(nonatomic) unsigned int m_uiProductCount; // @synthesize m_uiProductCount;
@property(retain, nonatomic) NSString *m_nsProductName; // @synthesize m_nsProductName;
@property(retain, nonatomic) NSString *m_nsProductImage; // @synthesize m_nsProductImage;
@property(retain, nonatomic) NSString *m_nsProductId; // @synthesize m_nsProductId;
@property(retain, nonatomic) NSString *m_nsProductMoney; // @synthesize m_nsProductMoney;
@property(retain, nonatomic) NSArray *m_arrSKUInfo; // @synthesize m_arrSKUInfo;
- (void).cxx_destruct;
- (void)dealloc;
@end
| 485 |
2,448 | <gh_stars>1000+
package com.yiqiniu.easytrans.log.vo.tcc;
import com.yiqiniu.easytrans.log.vo.AfterCommit;
import com.yiqiniu.easytrans.log.vo.DemiRightContent;
@AfterCommit
public class TccCallConfirmedContent extends DemiRightContent {
private static final long serialVersionUID = 1L;
@Override
public int getLogType() {
return ContentType.TccCallConfirmed.getContentTypeId();
}
}
| 156 |
665 | <filename>Deco_Keypad/code.py<gh_stars>100-1000
# SPDX-FileCopyrightText: Copyright (c) 2021 <NAME> for Adafruit
#
# SPDX-License-Identifier: MIT
# Deco Keypad
import time
import board
from digitalio import DigitalInOut, Pull
from adafruit_debouncer import Debouncer
import usb_hid
from adafruit_hid.keyboard import Keyboard
from adafruit_hid.keyboard_layout_us import KeyboardLayoutUS
from adafruit_hid.keycode import Keycode
import neopixel
print("- Deco Keypad -")
time.sleep(1) # Sleep for a bit to avoid a race condition on some systems
# ----- Keymap ----- #
# change as needed, e.g. capital A (Keycode.SHIFT, Keycode.A)
switch_a_output = Keycode.Z
switch_b_output = Keycode.X
# ----- Keyboard setup ----- #
keyboard = Keyboard(usb_hid.devices)
keyboard_layout = KeyboardLayoutUS(keyboard) # We're in the US :)
# ----- Key setup ----- #
switch_a_in = DigitalInOut(board.D5)
switch_b_in = DigitalInOut(board.D6)
switch_a_in.pull = Pull.UP
switch_b_in.pull = Pull.UP
switch_a = Debouncer(switch_a_in)
switch_b = Debouncer(switch_b_in)
# ----- NeoPixel setup ----- #
MAGENTA = 0xFF00FF
CYAN = 0x0088DD
WHITE = 0xCCCCCC
BLACK = 0x000000
pixel_pin = board.D9
pixels = neopixel.NeoPixel(pixel_pin, 2, brightness=1.0)
pixels.fill(BLACK)
time.sleep(0.3)
pixels.fill(WHITE)
time.sleep(0.3)
pixels.fill(BLACK)
time.sleep(0.3)
pixels[0] = MAGENTA
pixels[1] = CYAN
while True:
switch_a.update() # Debouncer checks for changes in switch state
switch_b.update()
if switch_a.fell:
keyboard.press(switch_a_output)
pixels[0] = WHITE
if switch_a.rose:
keyboard.release(switch_a_output)
pixels[0] = MAGENTA
if switch_b.fell:
keyboard.press(switch_b_output)
pixels[1] = WHITE
if switch_b.rose:
keyboard.release(switch_b_output)
pixels[1] = CYAN
| 732 |
403 | package io.craft.atom.rpc;
/**
* @author mindwind
* @version 1.0, Sep 5, 2014
*/
public interface DemoService {
String echo(String in);
String attachment();
String oneway();
void noreturn(String in);
void timeout(String in) throws InterruptedException;
void overload() throws InterruptedException;
void bizException() throws IllegalAccessException;
void undeclaredException() throws IllegalStateException;
void error();
}
| 128 |
14,425 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.webapp.util;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientHandlerException;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.UniformInterfaceException;
import com.sun.jersey.api.client.WebResource.Builder;
import javax.ws.rs.core.MediaType;
import com.sun.jersey.api.json.JSONJAXBContext;
import com.sun.jersey.api.json.JSONMarshaller;
import org.apache.hadoop.conf.Configuration;
import org.codehaus.jettison.json.JSONObject;
import java.io.StringWriter;
/**
* This class contains several utility function which could be used to generate
* Restful calls to RM/NM/AHS.
*
*/
public final class YarnWebServiceUtils {
private YarnWebServiceUtils() {}
/**
* Utility function to get NodeInfo by calling RM WebService.
* @param conf the configuration
* @param nodeId the nodeId
* @return a JSONObject which contains the NodeInfo
* @throws ClientHandlerException if there is an error
* processing the response.
* @throws UniformInterfaceException if the response status
* is 204 (No Content).
*/
public static JSONObject getNodeInfoFromRMWebService(Configuration conf,
String nodeId) throws ClientHandlerException,
UniformInterfaceException {
try {
return WebAppUtils.execOnActiveRM(conf,
YarnWebServiceUtils::getNodeInfoFromRM, nodeId);
} catch (Exception e) {
if (e instanceof ClientHandlerException) {
throw ((ClientHandlerException) e);
} else if (e instanceof UniformInterfaceException) {
throw ((UniformInterfaceException) e);
} else {
throw new RuntimeException(e);
}
}
}
private static JSONObject getNodeInfoFromRM(String webAppAddress,
String nodeId) throws ClientHandlerException, UniformInterfaceException {
Client webServiceClient = Client.create();
ClientResponse response = null;
try {
Builder builder = webServiceClient.resource(webAppAddress)
.path("ws").path("v1").path("cluster")
.path("nodes").path(nodeId).accept(MediaType.APPLICATION_JSON);
response = builder.get(ClientResponse.class);
return response.getEntity(JSONObject.class);
} finally {
if (response != null) {
response.close();
}
webServiceClient.destroy();
}
}
@SuppressWarnings("rawtypes")
public static String toJson(Object nsli, Class klass) throws Exception {
StringWriter sw = new StringWriter();
JSONJAXBContext ctx = new JSONJAXBContext(klass);
JSONMarshaller jm = ctx.createJSONMarshaller();
jm.marshallToJSON(nsli, sw);
return sw.toString();
}
}
| 1,133 |
1,470 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-01-22 01:16
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("challenges", "0012_added_code_name_field")]
operations = [
migrations.AlterUniqueTogether(
name="challengephase",
unique_together=set([("code_name", "challenge")]),
)
]
| 174 |
892 | <filename>advisories/unreviewed/2022/05/GHSA-mp3w-62hp-vh8j/GHSA-mp3w-62hp-vh8j.json
{
"schema_version": "1.2.0",
"id": "GHSA-mp3w-62hp-vh8j",
"modified": "2022-05-13T01:45:52Z",
"published": "2022-05-13T01:45:52Z",
"aliases": [
"CVE-2017-3756"
],
"details": "A privilege escalation vulnerability was identified in Lenovo Active Protection System for ThinkPad systems versions earlier than 172.16.31.10. An attacker with local privileges could execute code with administrative privileges via an unquoted service path.",
"severity": [
{
"type": "CVSS_V3",
"score": "CVSS:3.0/AV:L/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:H"
}
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2017-3756"
},
{
"type": "WEB",
"url": "https://support.lenovo.com/us/en/product_security/LEN-15765"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/bid/100305"
}
],
"database_specific": {
"cwe_ids": [
],
"severity": "HIGH",
"github_reviewed": false
}
} | 512 |
343 | <filename>rootfs/usr/lib/python3/dist-packages/numpy/distutils/compat.py
"""Small modules to cope with python 2 vs 3 incompatibilities inside
numpy.distutils
"""
import sys
def get_exception():
return sys.exc_info()[1]
| 76 |
14,668 | // Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef THIRD_PARTY_BLINK_RENDERER_MODULES_PEERCONNECTION_ADAPTERS_ICE_TRANSPORT_ADAPTER_CROSS_THREAD_FACTORY_H_
#define THIRD_PARTY_BLINK_RENDERER_MODULES_PEERCONNECTION_ADAPTERS_ICE_TRANSPORT_ADAPTER_CROSS_THREAD_FACTORY_H_
#include "third_party/blink/renderer/modules/peerconnection/adapters/ice_transport_adapter.h"
namespace blink {
class LocalFrame;
// This class creates a single concrete instance of an IceTransportAdapter with
// a hook to allow creating dependencies on the main thread (the
// IceTransportAdapter is created on the worker thread).
//
// Callers must call InitializeOnMainThread() before ConstructOnWorkerThread().
class IceTransportAdapterCrossThreadFactory {
public:
virtual ~IceTransportAdapterCrossThreadFactory() = default;
// Construct any dependencies on the main thread. Can only be called once.
virtual void InitializeOnMainThread(LocalFrame&) = 0;
// Construct the IceTransportAdapter instance with the given delegate. Can
// only be called once.
virtual std::unique_ptr<IceTransportAdapter> ConstructOnWorkerThread(
IceTransportAdapter::Delegate* delegate) = 0;
};
} // namespace blink
#endif // THIRD_PARTY_BLINK_RENDERER_MODULES_PEERCONNECTION_ADAPTERS_ICE_TRANSPORT_ADAPTER_CROSS_THREAD_FACTORY_H_
| 453 |
1,346 | package com.ctrip.platform.dal.exceptions;
/**
* Created by taochen on 2019/9/17.
*/
public class TransactionSystemException extends RuntimeException {
public TransactionSystemException(String msg) {
super(msg);
}
public TransactionSystemException(String msg, Throwable cause) {
super(msg, cause);
}
}
| 111 |
3,428 | {"id":"01356","group":"spam-2","checksum":{"type":"MD5","value":"8d996c0bc08a47a90611de2e8a829048"},"text":"From <EMAIL> Thu Aug 8 14:37:17 2002\nReturn-Path: <<EMAIL>>\nDelivered-To: yyyy@<EMAIL>.netnoteinc.com\nReceived: from localhost (localhost [127.0.0.1])\n\tby phobos.labs.netnoteinc.com (Postfix) with ESMTP id 8685944207\n\tfor <jm@localhost>; Thu, 8 Aug 2002 08:40:35 -0400 (EDT)\nReceived: from phobos [127.0.0.1]\n\tby localhost with IMAP (fetchmail-5.9.0)\n\tfor jm@localhost (single-drop); Thu, 08 Aug 2002 13:40:35 +0100 (IST)\nReceived: from webnote.net (mail.webnote.net [193.120.211.219]) by\n dogma.slashnull.org (8.11.6/8.11.6) with ESMTP id g78CKY202619 for\n <<EMAIL>>; Thu, 8 Aug 2002 13:20:34 +0100\nReceived: from xent.com ([172.16.17.32]) by webnote.net (8.9.3/8.9.3)\n with ESMTP id IAA28455 for <<EMAIL>>; Thu, 8 Aug 2002 08:31:10 +0100\nReceived: from lair.xent.com (localhost [1172.16.31.10]) by xent.com (Postfix)\n with ESMTP id 50F8529409F; Thu, 8 Aug 2002 00:16:05 -0700 (PDT)\nDelivered-To: <EMAIL>\nReceived: from rly116.threeloot.com (rly116.threeloot.com [64.39.19.116])\n by xent.com (Postfix) with ESMTP id 4A04329409E for <<EMAIL>>;\n Thu, 8 Aug 2002 00:15:54 -0700 (PDT)\nReceived: from gcd.c0.threeloot.com (64.39.19.116) by rly116.threeloot.com\n (8.11.1/8.12.9) with ESMTP id g787K2p16608 for <<EMAIL>>;\n Thu, 8 Aug 2002 02:20:02 -0500 (CDT) (envelope-from\n <EMAIL>)\nMessage-Id: <<EMAIL>>\nFrom: \"GiftCD Alert\" <<EMAIL>>\nTo: \" \" <<EMAIL>>\nSubject: 911 Anniv Bush Memorial Bill for You\nSender: <EMAIL>-<EMAIL>\nErrors-To: [email protected]\nX-Beenthere: <EMAIL>\nX-Mailman-Version: 2.0.11\nPrecedence: bulk\nList-Help: <mailto:<EMAIL>?subject=help>\nList-Post: <mailto:<EMAIL>>\nList-Subscribe: <http://xent.com/mailman/listinfo/fork>, <mailto:<EMAIL>?subject=subscribe>\nList-Id: Friends of <NAME> <fork.xent.com>\nList-Unsubscribe: <http://xent.com/mailman/listinfo/fork>,\n <mailto:<EMAIL>?subject=unsubscribe>\nList-Archive: <http://xent.com/pipermail/fork/>\nDate: Thu, 8 Aug 2002 02:20:02 -0500 (CDT)\n\n----------------------------------------------------------\n>> GiftCD Offer Newsletter Confirmation August 8th, 2002\n----------------------------------------------------------\n\nThank you for your subscription.\nAs a valued GiftCD subscriber, check out this great \ngift!\n\nThank you for your subscription.\nThis message was sent to you as a Valued GiftCD\nsubscriber. Check out this great gift!\n\n\n\n+--------------------------------------------------------+\n911 Anniversary Gift.\n+--------------------------------------------------------+\n\nFree with NO SHIPPING! Grab one of this hot gift,\navailable to all Americans for free. (valid until Sep 11)\nhttp://giftcd.com/offers/track_colonialmint.shtml\n\n\n\n\n+--------------------------------------------------------+\nFREE ColonialMint $2001 Bush Memorial Dollar Bill\n+--------------------------------------------------------+\n\nPerfect gift for mom, dad, family members, friends and \nco-workers. Every American should have one.\n(Free gift valid for U.S. Residents Only)\nhttp://www.giftcd.com/offers/track_colonialmint.shtml\n\n\n\n\n+--------------------------------------------------------+\n~~~ Free 911 Anniv $2001 Dollar gift.\n+--------------------------------------------------------+\n\nLimited collector item. Grab one of the hottest \nAmerican collection for FREE before Sep 11 2002.\nhttp://giftcd.com/offers/track_colonialmint.shtml\n\n\n\n\nRegards,\n<NAME>\nGift CD Editorial Team\nhttp://www.giftcd.com\n\n\n----------------------------------------------------------\n>> Subscription, Disclaimer & Copyright\n----------------------------------------------------------\n\nThis email is part of your GiftCD Newsletter\nsubscription. If you are no longer interested, \nplease forward this email to:\n<EMAIL>.<EMAIL>\n\nIMPORTANT: To protect our subscribers privacy, \nwe do not allow third party subscriptions. If you \nreceive this email by mistake or your friend subscribe \nfor you without your consent, please forward this email \nto us for further investigation. \n\nWe welcome suggestions, comments and feedback. \nContact us at: <EMAIL>\n\n==========================================================\nCopyright 2002 GiftCD.com. All rights reserved.\nhttp://xent.com/mailman/listinfo/fork\n\n\n"} | 1,500 |
892 | {
"schema_version": "1.2.0",
"id": "GHSA-47xc-8r2q-5m83",
"modified": "2022-04-26T00:00:54Z",
"published": "2022-04-16T00:00:25Z",
"aliases": [
"CVE-2022-29281"
],
"details": "Notable before 1.9.0-beta.8 doesn't effectively prevent the opening of executable files when clicking on a link. There is improper validation of the file URI scheme. A hyperlink to an SMB share could lead to execution of an arbitrary program (or theft of NTLM credentials via an SMB relay attack, because the application resolves UNC paths).",
"severity": [
{
"type": "CVSS_V3",
"score": "CVSS:3.1/AV:N/AC:L/PR:N/UI:R/S:U/C:H/I:H/A:H"
}
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2022-29281"
},
{
"type": "WEB",
"url": "https://github.com/hmsec/Advisories/blob/master/CVE-2022-29281.md"
},
{
"type": "WEB",
"url": "https://github.com/notable/notable-insiders/releases/tag/v1.9.0-beta.8"
}
],
"database_specific": {
"cwe_ids": [
"CWE-20"
],
"severity": "HIGH",
"github_reviewed": false
}
} | 531 |
879 | <filename>header/src/main/java/org/zstack/header/storage/snapshot/ShrinkVolumeSnapshotOnPrimaryStorageReply.java
package org.zstack.header.storage.snapshot;
import org.zstack.header.message.APIEvent;
import org.zstack.header.message.MessageReply;
import org.zstack.header.rest.RestResponse;
import org.zstack.utils.data.SizeUnit;
/**
* @ Author : yh.w
* @ Date : Created in 13:25 2020/7/28
*/
public class ShrinkVolumeSnapshotOnPrimaryStorageReply extends MessageReply {
private ShrinkResult shrinkResult;
public ShrinkResult getShrinkResult() {
return shrinkResult;
}
public void setShrinkResult(ShrinkResult shrinkResult) {
this.shrinkResult = shrinkResult;
}
}
| 244 |
2,441 | <filename>core/src/main/java/tech/tablesaw/aggregate/DateTimeAggregateFunction.java
package tech.tablesaw.aggregate;
import java.time.LocalDateTime;
import tech.tablesaw.api.ColumnType;
import tech.tablesaw.api.DateTimeColumn;
/** A partial implementation of aggregate functions to summarize over a dateTime column */
public abstract class DateTimeAggregateFunction
extends AggregateFunction<DateTimeColumn, LocalDateTime> {
/**
* Constructs an DateTimeAggregateFunction with the given name. The name is used as a column name
* in the output
*/
public DateTimeAggregateFunction(String name) {
super(name);
}
/** Returns an LocalDateTime that is the result of applying this function to the given column */
public abstract LocalDateTime summarize(DateTimeColumn column);
/** {@inheritDoc} */
@Override
public boolean isCompatibleColumn(ColumnType type) {
return type.equals(ColumnType.LOCAL_DATE_TIME);
}
/** {@inheritDoc} */
@Override
public ColumnType returnType() {
return ColumnType.LOCAL_DATE_TIME;
}
}
| 317 |
312 | /*
* Copyright (C) the libgit2 contributors. All rights reserved.
*
* This file is part of libgit2, distributed under the GNU GPL v2 with
* a Linking Exception. For full terms see the included COPYING file.
*/
#include "common.h"
#include "git2/credential.h"
#include "git2/sys/credential.h"
#include "git2/credential_helpers.h"
static int git_credential_ssh_key_type_new(
git_credential **cred,
const char *username,
const char *publickey,
const char *privatekey,
const char *passphrase,
git_credential_t credtype);
int git_credential_has_username(git_credential *cred)
{
if (cred->credtype == GIT_CREDENTIAL_DEFAULT)
return 0;
return 1;
}
const char *git_credential_get_username(git_credential *cred)
{
switch (cred->credtype) {
case GIT_CREDENTIAL_USERNAME:
{
git_credential_username *c = (git_credential_username *) cred;
return c->username;
}
case GIT_CREDENTIAL_USERPASS_PLAINTEXT:
{
git_credential_userpass_plaintext *c = (git_credential_userpass_plaintext *) cred;
return c->username;
}
case GIT_CREDENTIAL_SSH_KEY:
case GIT_CREDENTIAL_SSH_MEMORY:
{
git_credential_ssh_key *c = (git_credential_ssh_key *) cred;
return c->username;
}
case GIT_CREDENTIAL_SSH_CUSTOM:
{
git_credential_ssh_custom *c = (git_credential_ssh_custom *) cred;
return c->username;
}
case GIT_CREDENTIAL_SSH_INTERACTIVE:
{
git_credential_ssh_interactive *c = (git_credential_ssh_interactive *) cred;
return c->username;
}
default:
return NULL;
}
}
static void plaintext_free(struct git_credential *cred)
{
git_credential_userpass_plaintext *c = (git_credential_userpass_plaintext *)cred;
git__free(c->username);
/* Zero the memory which previously held the password */
if (c->password) {
size_t pass_len = strlen(c->password);
git__memzero(c->password, pass_len);
git__free(c->password);
}
git__free(c);
}
int git_credential_userpass_plaintext_new(
git_credential **cred,
const char *username,
const char *password)
{
git_credential_userpass_plaintext *c;
assert(cred && username && password);
c = git__malloc(sizeof(git_credential_userpass_plaintext));
GIT_ERROR_CHECK_ALLOC(c);
c->parent.credtype = GIT_CREDENTIAL_USERPASS_PLAINTEXT;
c->parent.free = plaintext_free;
c->username = git__strdup(username);
if (!c->username) {
git__free(c);
return -1;
}
c->password = git__strdup(password);
if (!c->password) {
git__free(c->username);
git__free(c);
return -1;
}
*cred = &c->parent;
return 0;
}
static void ssh_key_free(struct git_credential *cred)
{
git_credential_ssh_key *c =
(git_credential_ssh_key *)cred;
git__free(c->username);
if (c->privatekey) {
/* Zero the memory which previously held the private key */
size_t key_len = strlen(c->privatekey);
git__memzero(c->privatekey, key_len);
git__free(c->privatekey);
}
if (c->passphrase) {
/* Zero the memory which previously held the passphrase */
size_t pass_len = strlen(c->passphrase);
git__memzero(c->passphrase, pass_len);
git__free(c->passphrase);
}
if (c->publickey) {
/* Zero the memory which previously held the public key */
size_t key_len = strlen(c->publickey);
git__memzero(c->publickey, key_len);
git__free(c->publickey);
}
git__free(c);
}
static void ssh_interactive_free(struct git_credential *cred)
{
git_credential_ssh_interactive *c = (git_credential_ssh_interactive *)cred;
git__free(c->username);
git__free(c);
}
static void ssh_custom_free(struct git_credential *cred)
{
git_credential_ssh_custom *c = (git_credential_ssh_custom *)cred;
git__free(c->username);
if (c->publickey) {
/* Zero the memory which previously held the publickey */
size_t key_len = strlen(c->publickey);
git__memzero(c->publickey, key_len);
git__free(c->publickey);
}
git__free(c);
}
static void default_free(struct git_credential *cred)
{
git_credential_default *c = (git_credential_default *)cred;
git__free(c);
}
static void username_free(struct git_credential *cred)
{
git__free(cred);
}
int git_credential_ssh_key_new(
git_credential **cred,
const char *username,
const char *publickey,
const char *privatekey,
const char *passphrase)
{
return git_credential_ssh_key_type_new(
cred,
username,
publickey,
privatekey,
passphrase,
GIT_CREDENTIAL_SSH_KEY);
}
int git_credential_ssh_key_memory_new(
git_credential **cred,
const char *username,
const char *publickey,
const char *privatekey,
const char *passphrase)
{
#ifdef GIT_SSH_MEMORY_CREDENTIALS
return git_credential_ssh_key_type_new(
cred,
username,
publickey,
privatekey,
passphrase,
GIT_CREDENTIAL_SSH_MEMORY);
#else
GIT_UNUSED(cred);
GIT_UNUSED(username);
GIT_UNUSED(publickey);
GIT_UNUSED(privatekey);
GIT_UNUSED(passphrase);
git_error_set(GIT_ERROR_INVALID,
"this version of libgit2 was not built with ssh memory credentials.");
return -1;
#endif
}
static int git_credential_ssh_key_type_new(
git_credential **cred,
const char *username,
const char *publickey,
const char *privatekey,
const char *passphrase,
git_credential_t credtype)
{
git_credential_ssh_key *c;
assert(username && cred && privatekey);
c = git__calloc(1, sizeof(git_credential_ssh_key));
GIT_ERROR_CHECK_ALLOC(c);
c->parent.credtype = credtype;
c->parent.free = ssh_key_free;
c->username = git__strdup(username);
GIT_ERROR_CHECK_ALLOC(c->username);
c->privatekey = git__strdup(privatekey);
GIT_ERROR_CHECK_ALLOC(c->privatekey);
if (publickey) {
c->publickey = git__strdup(publickey);
GIT_ERROR_CHECK_ALLOC(c->publickey);
}
if (passphrase) {
c->passphrase = git__strdup(passphrase);
GIT_ERROR_CHECK_ALLOC(c->passphrase);
}
*cred = &c->parent;
return 0;
}
int git_credential_ssh_interactive_new(
git_credential **out,
const char *username,
git_credential_ssh_interactive_cb prompt_callback,
void *payload)
{
git_credential_ssh_interactive *c;
assert(out && username && prompt_callback);
c = git__calloc(1, sizeof(git_credential_ssh_interactive));
GIT_ERROR_CHECK_ALLOC(c);
c->parent.credtype = GIT_CREDENTIAL_SSH_INTERACTIVE;
c->parent.free = ssh_interactive_free;
c->username = git__strdup(username);
GIT_ERROR_CHECK_ALLOC(c->username);
c->prompt_callback = prompt_callback;
c->payload = payload;
*out = &c->parent;
return 0;
}
int git_credential_ssh_key_from_agent(git_credential **cred, const char *username) {
git_credential_ssh_key *c;
assert(username && cred);
c = git__calloc(1, sizeof(git_credential_ssh_key));
GIT_ERROR_CHECK_ALLOC(c);
c->parent.credtype = GIT_CREDENTIAL_SSH_KEY;
c->parent.free = ssh_key_free;
c->username = git__strdup(username);
GIT_ERROR_CHECK_ALLOC(c->username);
c->privatekey = NULL;
*cred = &c->parent;
return 0;
}
int git_credential_ssh_custom_new(
git_credential **cred,
const char *username,
const char *publickey,
size_t publickey_len,
git_credential_sign_cb sign_callback,
void *payload)
{
git_credential_ssh_custom *c;
assert(username && cred);
c = git__calloc(1, sizeof(git_credential_ssh_custom));
GIT_ERROR_CHECK_ALLOC(c);
c->parent.credtype = GIT_CREDENTIAL_SSH_CUSTOM;
c->parent.free = ssh_custom_free;
c->username = git__strdup(username);
GIT_ERROR_CHECK_ALLOC(c->username);
if (publickey_len > 0) {
c->publickey = git__malloc(publickey_len);
GIT_ERROR_CHECK_ALLOC(c->publickey);
memcpy(c->publickey, publickey, publickey_len);
}
c->publickey_len = publickey_len;
c->sign_callback = sign_callback;
c->payload = payload;
*cred = &c->parent;
return 0;
}
int git_credential_default_new(git_credential **cred)
{
git_credential_default *c;
assert(cred);
c = git__calloc(1, sizeof(git_credential_default));
GIT_ERROR_CHECK_ALLOC(c);
c->credtype = GIT_CREDENTIAL_DEFAULT;
c->free = default_free;
*cred = c;
return 0;
}
int git_credential_username_new(git_credential **cred, const char *username)
{
git_credential_username *c;
size_t len, allocsize;
assert(cred);
len = strlen(username);
GIT_ERROR_CHECK_ALLOC_ADD(&allocsize, sizeof(git_credential_username), len);
GIT_ERROR_CHECK_ALLOC_ADD(&allocsize, allocsize, 1);
c = git__malloc(allocsize);
GIT_ERROR_CHECK_ALLOC(c);
c->parent.credtype = GIT_CREDENTIAL_USERNAME;
c->parent.free = username_free;
memcpy(c->username, username, len + 1);
*cred = (git_credential *) c;
return 0;
}
void git_credential_free(git_credential *cred)
{
if (!cred)
return;
cred->free(cred);
}
/* Deprecated credential functions */
int git_cred_has_username(git_credential *cred)
{
return git_credential_has_username(cred);
}
const char *git_cred_get_username(git_credential *cred)
{
return git_credential_get_username(cred);
}
int git_cred_userpass_plaintext_new(
git_credential **out,
const char *username,
const char *password)
{
return git_credential_userpass_plaintext_new(out,username, password);
}
int git_cred_default_new(git_credential **out)
{
return git_credential_default_new(out);
}
int git_cred_username_new(git_credential **out, const char *username)
{
return git_credential_username_new(out, username);
}
int git_cred_ssh_key_new(
git_credential **out,
const char *username,
const char *publickey,
const char *privatekey,
const char *passphrase)
{
return git_credential_ssh_key_new(out, username,
publickey, privatekey, passphrase);
}
int git_cred_ssh_key_memory_new(
git_credential **out,
const char *username,
const char *publickey,
const char *privatekey,
const char *passphrase)
{
return git_credential_ssh_key_memory_new(out, username,
publickey, privatekey, passphrase);
}
int git_cred_ssh_interactive_new(
git_credential **out,
const char *username,
git_credential_ssh_interactive_cb prompt_callback,
void *payload)
{
return git_credential_ssh_interactive_new(out, username,
prompt_callback, payload);
}
int git_cred_ssh_key_from_agent(
git_credential **out,
const char *username)
{
return git_credential_ssh_key_from_agent(out, username);
}
int git_cred_ssh_custom_new(
git_credential **out,
const char *username,
const char *publickey,
size_t publickey_len,
git_credential_sign_cb sign_callback,
void *payload)
{
return git_credential_ssh_custom_new(out, username,
publickey, publickey_len, sign_callback, payload);
}
void git_cred_free(git_credential *cred)
{
git_credential_free(cred);
}
| 4,173 |
1,652 | package com.ctrip.xpipe.monitor;
import com.ctrip.xpipe.api.monitor.DelayMonitor;
import com.ctrip.xpipe.lifecycle.AbstractStartStoppable;
import com.ctrip.xpipe.utils.DateTimeUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
/**
* @author wenchao.meng
*
* May 21, 2016 10:14:33 PM
*/
public class DefaultDelayMonitor extends AbstractStartStoppable implements DelayMonitor, Runnable{
protected Logger logger = LoggerFactory.getLogger(getClass());
private ScheduledExecutorService scheduled;
private AtomicLong totalDelay = new AtomicLong();
private AtomicLong totalNum = new AtomicLong();
private long previousDelay = 0, previousNum = 0;
private String delayType, delayInfo;
private long infoDelta = 1000;
private boolean consolePrint = false;
private long max, maxTime;
public DefaultDelayMonitor(String delayType) {
this(delayType, 1000);
}
public DefaultDelayMonitor(String delayType, long infoDelta) {
this.delayType = delayType;
this.infoDelta = infoDelta;
}
@Override
public void setConsolePrint(boolean consolePrint) {
this.consolePrint = consolePrint;
}
@Override
protected void doStart() throws Exception {
scheduled = Executors.newScheduledThreadPool(4);
ScheduledFuture<?> future = scheduled.scheduleAtFixedRate(this, 0, 5, TimeUnit.SECONDS);
new Thread(new Runnable() {
@Override
public void run() {
try {
future.get();
} catch (Exception e) {
logger.error("[doStart]", e);
}
}
}).start();
}
@Override
protected void doStop() throws Exception {
scheduled.shutdown();
}
@Override
public void addData(long lastTime) {
if(lastTime < 0 ){
return;
}
long current = System.currentTimeMillis();
long delta = current - lastTime;
if(delta > infoDelta){
logger.info("{} - {} = {}", current, lastTime, delta);
}
if(delta >= 0){
totalDelay.addAndGet(delta);
totalNum.incrementAndGet();
}
if(delta > max){
max = delta;
maxTime = System.currentTimeMillis();
}
}
@Override
public void run() {
try{
long currentDelay = totalDelay.get();
long currentNum = totalNum.get();
long deltaNum = currentNum - previousNum;
if(deltaNum > 0 ){
double avgDelay = (double)(currentDelay - previousDelay)/deltaNum;
logger.info(String.format("%d - %d = %d, %d - %d = %d", currentDelay,previousDelay, currentDelay - previousDelay, currentNum, previousNum, currentNum - previousNum));
String info = String.format("[delay]%s, %s, %s", getDelayType(), delayInfo == null ? "" :delayInfo, String.format("%.2f", avgDelay));
if(consolePrint){
System.out.println(info);
}
logger.info(info);
}
String maxInfo = String.format("[max]%d, %s", max, DateTimeUtils.timeAsString(maxTime));
logger.info(maxInfo);
if(consolePrint){
System.out.println(maxInfo);
}
previousDelay = currentDelay;
previousNum = currentNum;
max = 0;
}catch(Throwable th){
logger.error("[run]", th);
}
}
@Override
public String getDelayType() {
return delayType;
}
@Override
public void setDelayInfo(String delayInfo) {
this.delayInfo = delayInfo;
}
}
| 1,269 |
805 | <reponame>SHITIANYU-hue/flow<gh_stars>100-1000
"""Empty init file to handle deprecations."""
import warnings
from flow.core.kernel.network import * # noqa: F401,F403
warnings.simplefilter('always', PendingDeprecationWarning)
warnings.warn(
"flow.core.kernel.scenario will be deprecated in a future release. Please "
"use flow.core.kernel.network instead.",
PendingDeprecationWarning
)
| 134 |
646 | /*
Copyright (c) 2017 TOSHIBA Digital Solutions Corporation
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "sql_processor_ddl.h"
#include "sql_execution.h"
#include "sql_execution_manager.h"
#include "sql_compiler.h"
#include "nosql_db.h"
#include "nosql_container.h"
#include "nosql_request.h"
#include "resource_set.h"
#include "partition_table.h"
#include "transaction_service.h"
UTIL_TRACER_DECLARE(SQL_SERVICE);
const DDLProcessor::Registrar DDLProcessor::registrar_(SQLType::EXEC_DDL);
DDLProcessor::DDLProcessor(
Context &cxt, const TypeInfo &typeInfo) :
SQLProcessor(cxt, typeInfo),
resourceSet_(cxt.getResourceSet()),
globalVarAlloc_(resourceSet_->getSQLExecutionManager()
->getVarAllocator()),
clientId_(*cxt.getClientId()),
commandType_(SyntaxTree::CMD_NONE),
createUserInfo_(NULL),
dropUserInfo_(NULL),
setPasswordInfo_(NULL),
grantInfo_(NULL),
revokeInfo_(NULL),
createDatabaseInfo_(NULL),
dropDatabaseInfo_(NULL),
createTableInfo_(NULL),
dropTableInfo_(NULL),
createIndexInfo_(NULL),
dropIndexInfo_(NULL),
dropPartitionInfo_(NULL),
addColumnInfo_(NULL),
createViewInfo_(NULL),
dropViewInfo_(NULL),
targetAckCount_(1),
currentAckCount_(0),
phase_(0),
ackList_(globalVarAlloc_),
ackContainerInfoList_(globalVarAlloc_),
isRetry_(false),
execution_(NULL),
targetCheckAckCount_(0),
currentCheckAckCount_(0),
baseInfo_(NULL),
isCleanup_(false) {
}
DDLProcessor::~DDLProcessor() {
cleanupNoSQL();
if (createUserInfo_) {
ALLOC_VAR_SIZE_DELETE(
globalVarAlloc_, createUserInfo_);
}
if (dropUserInfo_) {
ALLOC_VAR_SIZE_DELETE(
globalVarAlloc_, dropUserInfo_);
}
if (setPasswordInfo_) {
ALLOC_VAR_SIZE_DELETE(
globalVarAlloc_, setPasswordInfo_);
}
if (grantInfo_) {
ALLOC_VAR_SIZE_DELETE(
globalVarAlloc_, grantInfo_);
}
if (revokeInfo_) {
ALLOC_VAR_SIZE_DELETE(
globalVarAlloc_, revokeInfo_);
}
if (createDatabaseInfo_) {
ALLOC_VAR_SIZE_DELETE(
globalVarAlloc_, createDatabaseInfo_);
}
if (dropDatabaseInfo_) {
ALLOC_VAR_SIZE_DELETE(
globalVarAlloc_, dropDatabaseInfo_);
}
if (createTableInfo_) {
ALLOC_VAR_SIZE_DELETE(
globalVarAlloc_, createTableInfo_);
}
if (dropTableInfo_) {
ALLOC_VAR_SIZE_DELETE(
globalVarAlloc_, dropTableInfo_);
}
if (createIndexInfo_) {
ALLOC_VAR_SIZE_DELETE(
globalVarAlloc_, createIndexInfo_);
}
if (dropIndexInfo_) {
ALLOC_VAR_SIZE_DELETE(
globalVarAlloc_, dropIndexInfo_);
}
if (dropPartitionInfo_) {
ALLOC_VAR_SIZE_DELETE(
globalVarAlloc_, dropPartitionInfo_);
}
if (addColumnInfo_) {
ALLOC_VAR_SIZE_DELETE(
globalVarAlloc_, addColumnInfo_);
}
if (createViewInfo_) {
ALLOC_VAR_SIZE_DELETE(
globalVarAlloc_, createViewInfo_);
}
if (dropViewInfo_) {
ALLOC_VAR_SIZE_DELETE(
globalVarAlloc_, dropViewInfo_);
}
}
struct AllocatorScope {
AllocatorScope(SQLExecutionManager *manager) :
manager_(manager),
alloc_(manager->getStackAllocator()) {}
~AllocatorScope() {
if (alloc_) {
manager_->releaseStackAllocator(alloc_);
}
}
SQLExecutionManager *manager_;
util::StackAllocator *alloc_;
};
void DDLProcessor::cleanupNoSQL() {
try {
if (isCleanup_) {
return;
}
isCleanup_ = true;
if (baseInfo_ && baseInfo_->isSync()) {
ExecutionLatch latch(
clientId_,
resourceSet_->getSQLExecutionManager()
->getResourceManager(),
NULL);
SQLExecution *execution = latch.get();
if (execution) {
if (execution->getContext().getSyncContext().isRunning()
&& baseInfo_->isSync()
&& baseInfo_->currentContainerName_ != NULL) {
AllocatorScope allocatorScope(
resourceSet_->getSQLExecutionManager());
util::StackAllocator &alloc = *allocatorScope.alloc_;
util::StackAllocator::Scope scope(alloc);
EventEngine::Stats stats;
EventEngine::VariableSizeAllocator varSizeAlloc(
util::AllocatorInfo(
ALLOCATOR_GROUP_STORE, "getContainer"));
EventEngine::EventContext::Source eventSource(
varSizeAlloc, alloc, stats);
EventEngine::EventContext ec(eventSource);
NoSQLContainer targetContainer(
ec,
baseInfo_->currentContainerName_,
execution->getContext().getSyncContext(),
execution);
NoSQLStoreOption option;
option.isSync_ = false;
targetContainer.setNoSQLAbort(
execution->getContext().getCurrentSessionId());
targetContainer.abort(option);
}
}
}
if (targetAckCount_ != currentAckCount_) {
ExecutionLatch latch(
clientId_,
resourceSet_->getSQLExecutionManager()
->getResourceManager(),
NULL);
SQLExecution *execution = latch.get();
if (execution) {
if (targetAckCount_ != currentAckCount_) {
AllocatorScope allocatorScope(
resourceSet_->getSQLExecutionManager());
util::StackAllocator &alloc = *allocatorScope.alloc_;
util::StackAllocator::Scope scope(alloc);
EventEngine::Stats stats;
EventEngine::VariableSizeAllocator varSizeAlloc(
util::AllocatorInfo(
ALLOCATOR_GROUP_STORE, "getContainer"));
EventEngine::EventContext::Source eventSource(
varSizeAlloc, alloc, stats);
EventEngine::EventContext ec(eventSource);
for (size_t pos = 0; pos < ackList_.size(); pos++) {
if (ackList_[pos] == ACK_STATUS_ON) {
NoSQLContainer targetContainer(
ec,
ackContainerInfoList_[pos].containerId_,
0,
ackContainerInfoList_[pos].pId_,
execution->getContext().getSyncContext(),
execution);
NoSQLStoreOption option;
option.isSync_ = false;
targetContainer.setNoSQLAbort(
ackContainerInfoList_[pos].sessionId_);
targetContainer.setNoSQLClientId(
ackContainerInfoList_[pos].clientId_);
targetContainer.abort(option);
}
}
}
}
}
isCleanup_ = true;
}
catch (std::exception &e) {
UTIL_TRACE_EXCEPTION(
SQL_SERVICE, e, "Cleanup nosql operation failed");
}
}
bool DDLProcessor::applyInfo(
Context &cxt,
const Option &option,
const TupleInfoList &inputInfo,
TupleInfo &outputInfo) {
UNUSED_VARIABLE(inputInfo);
try {
if (option.plan_ != NULL) {
if (option.planNodeId_ >= option.plan_->nodeList_.size()) {
GS_THROW_USER_ERROR(GS_ERROR_SQL_INTERNAL, "");
}
const SQLPreparedPlan::Node &node
= option.plan_->nodeList_[option.planNodeId_];
commandType_ = node.commandType_;
switch (commandType_) {
case SyntaxTree::CMD_CREATE_USER:
createUserInfo_ = ALLOC_VAR_SIZE_NEW(globalVarAlloc_)
CreateUserInfo(globalVarAlloc_);
setOptionString(
node,
0,
createUserInfo_->userName_,
&createUserInfo_->userNameCaseSensitive_);
if (node.cmdOptionList_->size() > 1) {
setOptionString(
node, 1, createUserInfo_->password_);
}
break;
case SyntaxTree::CMD_DROP_USER:
dropUserInfo_ = ALLOC_VAR_SIZE_NEW(globalVarAlloc_)
DropUserInfo(globalVarAlloc_);
setOptionString(
node,
0,
dropUserInfo_->userName_,
&dropUserInfo_->userNameCaseSensitive_);
break;
case SyntaxTree::CMD_SET_PASSWORD:
setPasswordInfo_ = ALLOC_VAR_SIZE_NEW(globalVarAlloc_)
SetPasswordInfo(globalVarAlloc_);
setOptionString(
node,
0,
setPasswordInfo_->userName_,
&setPasswordInfo_->userNameCaseSensitive_);
if (node.cmdOptionList_->size() > 1) {
setOptionString(
node,
1,
setPasswordInfo_->password_);
}
break;
case SyntaxTree::CMD_GRANT:
grantInfo_ = ALLOC_VAR_SIZE_NEW(globalVarAlloc_)
GrantInfo(globalVarAlloc_);
setOptionString(
node,
0,
grantInfo_->dbName_,
&grantInfo_->dbNameCaseSensitive_);
if (node.cmdOptionList_->size() > 1) {
setOptionString(
node,
1,
grantInfo_->userName_,
&grantInfo_->userNameCaseSensitive_);
}
if (node.cmdOptionList_->size() > 2) {
setOptionValue(
node,
2,
grantInfo_->controlType_);
}
break;
case SyntaxTree::CMD_REVOKE:
revokeInfo_ = ALLOC_VAR_SIZE_NEW(globalVarAlloc_)
RevokeInfo(globalVarAlloc_);
setOptionString(
node,
0,
revokeInfo_->dbName_,
&revokeInfo_->dbNameCaseSensitive_);
if (node.cmdOptionList_->size() > 1) {
setOptionString(
node,
1,
revokeInfo_->userName_,
&revokeInfo_->userNameCaseSensitive_);
}
if (node.cmdOptionList_->size() > 2) {
setOptionValue(
node,
2,
revokeInfo_->controlType_);
}
break;
case SyntaxTree::CMD_CREATE_DATABASE:
createDatabaseInfo_ = ALLOC_VAR_SIZE_NEW(globalVarAlloc_)
CreateDatabaseInfo(globalVarAlloc_);
setOptionString(
node,
0,
createDatabaseInfo_->dbName_,
&createDatabaseInfo_->dbNameCaseSensitive_);
break;
case SyntaxTree::CMD_DROP_DATABASE:
dropDatabaseInfo_ = ALLOC_VAR_SIZE_NEW(globalVarAlloc_)
DropDatabaseInfo(globalVarAlloc_);
setOptionString(
node,
0,
dropDatabaseInfo_->dbName_,
&dropDatabaseInfo_->dbNameCaseSensitive_);
break;
case SyntaxTree::CMD_CREATE_TABLE:
createTableInfo_ = ALLOC_VAR_SIZE_NEW(globalVarAlloc_)
CreateTableInfo(globalVarAlloc_);
if (node.qName_->db_) {
createTableInfo_->dbName_
= node.qName_->db_->c_str();
createTableInfo_->dbNameCaseSensitive_
= node.qName_->dbCaseSensitive_;
}
baseInfo_ = static_cast<DDLBaseInfo*>(
createTableInfo_);
if (node.qName_->table_ == NULL) {
GS_THROW_USER_ERROR(
GS_ERROR_SQL_DDL_INVALID_PARAMETER,
"Table name is invalid");
}
createTableInfo_->tableName_
= node.qName_->table_->c_str();
createTableInfo_->tableNameCaseSensitive_
= node.qName_->tableCaseSensitive_;
if (node.createTableOpt_ == NULL) {
GS_THROW_USER_ERROR(
GS_ERROR_SQL_DDL_INVALID_PARAMETER,
"Table name is invalid");
}
createTableInfo_->createTableOpt_
= node.createTableOpt_;
createTableInfo_->execId_ = cxt.getExecId();
createTableInfo_->jobVersionId_ = cxt.getVersionId();
createTableInfo_->processor_ = this;
break;
case SyntaxTree::CMD_DROP_TABLE:
dropTableInfo_ = ALLOC_VAR_SIZE_NEW(globalVarAlloc_)
DropTableInfo(globalVarAlloc_);
baseInfo_ = static_cast<DDLBaseInfo*>(dropTableInfo_);
if (node.qName_->db_) {
dropTableInfo_->dbName_
= node.qName_->db_->c_str();
dropTableInfo_->dbNameCaseSensitive_
= node.qName_->dbCaseSensitive_;
}
if (node.qName_->table_ == NULL) {
GS_THROW_USER_ERROR(
GS_ERROR_SQL_DDL_INVALID_PARAMETER,
"Table name is invalid");
}
dropTableInfo_->tableName_
= node.qName_->table_->c_str();
dropTableInfo_->tableNameCaseSensitive_
= node.qName_->tableCaseSensitive_;
if (node.cmdOptionFlag_ == 1) {
dropTableInfo_->ifExists_ = true;
}
dropTableInfo_->execId_ = cxt.getExecId();
dropTableInfo_->jobVersionId_ = cxt.getVersionId();
dropTableInfo_->processor_ = this;
break;
case SyntaxTree::CMD_CREATE_INDEX: {
createIndexInfo_ = ALLOC_VAR_SIZE_NEW(globalVarAlloc_)
CreateIndexInfo(globalVarAlloc_);
baseInfo_ = static_cast<DDLBaseInfo*>(createIndexInfo_);
if (node.qName_->db_) {
createIndexInfo_->dbName_
= node.qName_->db_->c_str();
createIndexInfo_->dbNameCaseSensitive_
= node.qName_->dbCaseSensitive_;
}
if (node.qName_->table_ == NULL) {
GS_THROW_USER_ERROR(
GS_ERROR_SQL_DDL_INVALID_PARAMETER,
"Table name is invalid");
}
createIndexInfo_->tableName_
= node.qName_->table_->c_str();
createIndexInfo_->tableNameCaseSensitive_
= node.qName_->tableCaseSensitive_;
if (node.createIndexOpt_ == NULL) {
GS_THROW_USER_ERROR(
GS_ERROR_SQL_DDL_INVALID_PARAMETER,
"Table name is invalid");
}
if (node.cmdOptionList_->size() > 0) {
setOptionString(
node,
0,
createIndexInfo_->indexName_,
&createIndexInfo_->indexNameCaseSensitive_);
}
createIndexInfo_->createIndexOpt_
= node.createIndexOpt_;
createIndexInfo_->execId_ = cxt.getExecId();
createIndexInfo_->jobVersionId_ = cxt.getVersionId();
createIndexInfo_->processor_ = this;
}
break;
case SyntaxTree::CMD_DROP_INDEX:
dropIndexInfo_ = ALLOC_VAR_SIZE_NEW(globalVarAlloc_)
DropIndexInfo(globalVarAlloc_);
baseInfo_ = static_cast<DDLBaseInfo*>(dropIndexInfo_);
if (node.qName_->db_) {
dropIndexInfo_->dbName_
= node.qName_->db_->c_str();
dropIndexInfo_->dbNameCaseSensitive_
= node.qName_->dbCaseSensitive_;
}
if (node.qName_->table_ == NULL) {
GS_THROW_USER_ERROR(
GS_ERROR_SQL_DDL_INVALID_PARAMETER,
"Table name is invalid");
}
if (node.qName_->name_ == NULL) {
GS_THROW_USER_ERROR(
GS_ERROR_SQL_DDL_INVALID_PARAMETER,
"Index name is invalid");
}
dropIndexInfo_->tableName_
= node.qName_->table_->c_str();
dropIndexInfo_->indexName_
= node.qName_->name_->c_str();
dropIndexInfo_->tableNameCaseSensitive_
= node.qName_->tableCaseSensitive_;
dropIndexInfo_->indexNameCaseSensitive_
= node.qName_->nameCaseSensitive_;
if (node.cmdOptionFlag_ == 1) {
dropIndexInfo_->ifExists_ = true;
}
dropIndexInfo_->execId_ = cxt.getExecId();
dropIndexInfo_->jobVersionId_ = cxt.getVersionId();
dropIndexInfo_->processor_ = this;
break;
case SyntaxTree::CMD_ALTER_TABLE_DROP_PARTITION:
dropPartitionInfo_ = ALLOC_VAR_SIZE_NEW(globalVarAlloc_)
DropTablePartitionInfo(globalVarAlloc_);
baseInfo_ = static_cast<DDLBaseInfo*>(dropPartitionInfo_);
if (node.qName_->db_) {
dropPartitionInfo_->dbName_
= node.qName_->db_->c_str();
dropPartitionInfo_->dbNameCaseSensitive_
= node.qName_->dbCaseSensitive_;
}
if (node.qName_->table_ == NULL) {
GS_THROW_USER_ERROR(
GS_ERROR_SQL_DDL_INVALID_PARAMETER,
"Table name is invalid");
}
dropPartitionInfo_->tableName_
= node.qName_->table_->c_str();
dropPartitionInfo_->tableNameCaseSensitive_
= node.qName_->tableCaseSensitive_;
if (node.cmdOptionFlag_ == 1) {
dropPartitionInfo_->ifExists_ = true;
}
dropPartitionInfo_->cmdOptionList_
= node.cmdOptionList_;
dropPartitionInfo_->execId_ = cxt.getExecId();
dropPartitionInfo_->jobVersionId_ = cxt.getVersionId();
dropPartitionInfo_->processor_ = this;
break;
case SyntaxTree::CMD_ALTER_TABLE_ADD_COLUMN:
addColumnInfo_ = ALLOC_VAR_SIZE_NEW(globalVarAlloc_)
AddColumnInfo(globalVarAlloc_);
baseInfo_ = static_cast<DDLBaseInfo*>(addColumnInfo_);
if (node.qName_->db_) {
addColumnInfo_->dbName_
= node.qName_->db_->c_str();
addColumnInfo_->dbNameCaseSensitive_
= node.qName_->dbCaseSensitive_;
}
if (node.qName_->table_ == NULL) {
GS_THROW_USER_ERROR(
GS_ERROR_SQL_DDL_INVALID_PARAMETER,
"Table name is invalid");
}
addColumnInfo_->tableName_
= node.qName_->table_->c_str();
addColumnInfo_->tableNameCaseSensitive_
= node.qName_->tableCaseSensitive_;
if (node.createTableOpt_->columnInfoList_) {
SyntaxTree::ColumnInfoList::iterator
itr = node.createTableOpt_->columnInfoList_->begin();
for (; itr != node.createTableOpt_->columnInfoList_->end();
++itr) {
if ((*itr)->isPrimaryKey()) {
GS_THROW_USER_ERROR(
GS_ERROR_SQL_DDL_INVALID_PARAMETER,
"Column constraint PRIMARY KEY is not allowed here");
}
if ((*itr)->isVirtual()) {
GS_THROW_USER_ERROR(
GS_ERROR_SQL_DDL_INVALID_PARAMETER,
"Column constraint VIRTUAL is not allowed here");
}
}
addColumnInfo_->createTableOpt_
= node.createTableOpt_;
addColumnInfo_->execId_ = cxt.getExecId();
addColumnInfo_->jobVersionId_ = cxt.getVersionId();
addColumnInfo_->processor_ = this;
}
else {
GS_THROW_USER_ERROR(
GS_ERROR_SQL_DDL_INVALID_PARAMETER,
"No column list");
}
break;
case SyntaxTree::CMD_CREATE_VIEW:
createViewInfo_ = ALLOC_VAR_SIZE_NEW(globalVarAlloc_)
CreateViewInfo(globalVarAlloc_);
if (node.qName_->db_) {
createViewInfo_->dbName_
= node.qName_->db_->c_str();
createViewInfo_->dbNameCaseSensitive_
= node.qName_->dbCaseSensitive_;
}
baseInfo_ = static_cast<DDLBaseInfo*>(createViewInfo_);
if (node.qName_->table_ == NULL) {
GS_THROW_USER_ERROR(
GS_ERROR_SQL_DDL_INVALID_PARAMETER,
"View name is invalid");
}
createViewInfo_->tableName_
= node.qName_->table_->c_str();
createViewInfo_->tableNameCaseSensitive_
= node.qName_->tableCaseSensitive_;
if (node.createTableOpt_ == NULL) {
GS_THROW_USER_ERROR(
GS_ERROR_SQL_DDL_INVALID_PARAMETER,
"View parametor is invalid");
}
createViewInfo_->createTableOpt_
= node.createTableOpt_;
createViewInfo_->execId_ = cxt.getExecId();
createViewInfo_->jobVersionId_ = cxt.getVersionId();
createViewInfo_->processor_ = this;
break;
case SyntaxTree::CMD_DROP_VIEW:
dropViewInfo_ = ALLOC_VAR_SIZE_NEW(globalVarAlloc_)
DropViewInfo(globalVarAlloc_);
baseInfo_ = static_cast<DDLBaseInfo*>(dropViewInfo_);
if (node.qName_->db_) {
dropViewInfo_->dbName_
= node.qName_->db_->c_str();
dropViewInfo_->dbNameCaseSensitive_
= node.qName_->dbCaseSensitive_;
}
if (node.qName_->table_ == NULL) {
GS_THROW_USER_ERROR(
GS_ERROR_SQL_DDL_INVALID_PARAMETER,
"Table name is invalid");
}
dropViewInfo_->tableName_
= node.qName_->table_->c_str();
dropViewInfo_->tableNameCaseSensitive_
= node.qName_->tableCaseSensitive_;
if (node.cmdOptionFlag_ == 1) {
dropViewInfo_->ifExists_ = true;
}
dropViewInfo_->execId_ = cxt.getExecId();
dropViewInfo_->jobVersionId_ = cxt.getVersionId();
dropViewInfo_->processor_ = this;
break;
default:
break;
}
}
else if (option.byteInStream_ != NULL) {
}
else if (option.inStream_ != NULL) {
}
else if (option.jsonValue_ != NULL) {
}
else {
GS_THROW_USER_ERROR(
GS_ERROR_SQL_PROC_INTERNAL_INVALID_OPTION, "");
}
outputInfo.push_back(TupleList::TYPE_LONG);
ExecutionLatch latch(
clientId_,
resourceSet_->getSQLExecutionManager()
->getResourceManager(),
NULL);
execution_ = latch.get();
return true;
}
catch (std::exception &e) {
GS_RETHROW_USER_OR_SYSTEM(e, "");
}
}
bool DDLProcessor::pipe(
Context &cxt, InputId inputId,
const Block &block) {
UNUSED_VARIABLE(inputId);
UNUSED_VARIABLE(block);
try {
EventContext &ec = *cxt.getEventContext();
ExecutionLatch latch(
clientId_,
cxt.getExecutionManager()->getResourceManager(),
NULL);
SQLExecution *execution = latch.get();
if (execution == NULL) {
GS_THROW_USER_ERROR(
GS_ERROR_SQL_CANCELLED,
"Cancel SQL, clientId=" << clientId_ << ", location=ddl");
}
NoSQLDB *db = resourceSet_->getSQLExecutionManager()->getDB();
bool isFinish = true;
switch (commandType_) {
case SyntaxTree::CMD_CREATE_USER:
db->createUser(
ec,
execution,
NameWithCaseSensitivity(
createUserInfo_->userName_.c_str(),
createUserInfo_->userNameCaseSensitive_),
NameWithCaseSensitivity(
createUserInfo_->password_.c_str()));
break;
case SyntaxTree::CMD_DROP_USER:
db->dropUser(
ec,
execution,
NameWithCaseSensitivity(
dropUserInfo_->userName_.c_str(),
dropUserInfo_->userNameCaseSensitive_));
break;
case SyntaxTree::CMD_SET_PASSWORD:
db->setPassword(
ec,
execution,
NameWithCaseSensitivity(
setPasswordInfo_->userName_.c_str(),
setPasswordInfo_->userNameCaseSensitive_),
NameWithCaseSensitivity(
setPasswordInfo_->password_.c_str()));
break;
case SyntaxTree::CMD_GRANT:
db->grant(
ec,
execution,
NameWithCaseSensitivity(
grantInfo_->userName_.c_str(),
grantInfo_->userNameCaseSensitive_),
NameWithCaseSensitivity(
grantInfo_->dbName_.c_str(),
grantInfo_->dbNameCaseSensitive_),
grantInfo_->controlType_);
break;
case SyntaxTree::CMD_REVOKE:
db->revoke(
ec,
execution,
NameWithCaseSensitivity(
revokeInfo_->userName_.c_str(),
revokeInfo_->userNameCaseSensitive_),
NameWithCaseSensitivity(
revokeInfo_->dbName_.c_str(),
revokeInfo_->dbNameCaseSensitive_),
revokeInfo_->controlType_);
break;
case SyntaxTree::CMD_CREATE_DATABASE:
db->createDatabase(
ec,
execution,
NameWithCaseSensitivity(
createDatabaseInfo_->dbName_.c_str(),
createDatabaseInfo_->dbNameCaseSensitive_));
break;
case SyntaxTree::CMD_DROP_DATABASE:
db->dropDatabase(
ec,
execution,
NameWithCaseSensitivity(
dropDatabaseInfo_->dbName_.c_str(),
dropDatabaseInfo_->dbNameCaseSensitive_));
break;
case SyntaxTree::CMD_CREATE_TABLE:
db->createTable(
ec,
execution,
NameWithCaseSensitivity(
createTableInfo_->dbName_.c_str(),
createTableInfo_->dbNameCaseSensitive_),
NameWithCaseSensitivity(
createTableInfo_->tableName_.c_str(),
createTableInfo_->tableNameCaseSensitive_),
*createTableInfo_->createTableOpt_,
createTableInfo_);
break;
case SyntaxTree::CMD_DROP_TABLE:
if (phase_ == DDL_STATUS_INIT) {
db->dropTable(
ec,
execution,
NameWithCaseSensitivity(
dropTableInfo_->dbName_.c_str(),
dropTableInfo_->dbNameCaseSensitive_),
NameWithCaseSensitivity(
dropTableInfo_->tableName_.c_str(),
dropTableInfo_->tableNameCaseSensitive_),
dropTableInfo_->ifExists_,
dropTableInfo_);
isFinish = dropTableInfo_->isFinished_;
phase_ = DDL_STATUS_POST;
}
else {
currentAckCount_++;
if (currentAckCount_ != targetAckCount_) {
isFinish = false;
}
phase_ = DDL_STATUS_END;
}
break;
case SyntaxTree::CMD_CREATE_INDEX:
if (phase_ == DDL_STATUS_INIT) {
createIndexInfo_->ec_ = cxt.getEventContext();
db->createIndex(
ec,
execution,
NameWithCaseSensitivity(
createIndexInfo_->dbName_.c_str(),
createIndexInfo_->dbNameCaseSensitive_),
NameWithCaseSensitivity(
createIndexInfo_->indexName_.c_str(),
createIndexInfo_->indexNameCaseSensitive_),
NameWithCaseSensitivity(
createIndexInfo_->tableName_.c_str(),
createIndexInfo_->tableNameCaseSensitive_),
*createIndexInfo_->createIndexOpt_,
createIndexInfo_);
isFinish = createIndexInfo_->isFinished_;
phase_ = DDL_STATUS_POST;
}
else {
currentAckCount_++;
if (currentAckCount_ != targetAckCount_) {
isFinish = false;
}
else {
db->createIndexPost(
ec,
execution,
NameWithCaseSensitivity(
createIndexInfo_->dbName_.c_str(),
createIndexInfo_->dbNameCaseSensitive_),
NameWithCaseSensitivity(
createIndexInfo_->indexName_.c_str(),
createIndexInfo_->indexNameCaseSensitive_),
NameWithCaseSensitivity(
createIndexInfo_->tableName_.c_str(),
createIndexInfo_->tableNameCaseSensitive_),
createIndexInfo_);
phase_ = DDL_STATUS_END;
}
}
break;
case SyntaxTree::CMD_DROP_INDEX:
if (phase_ == DDL_STATUS_INIT) {
dropIndexInfo_->ec_ = cxt.getEventContext();
db->dropIndex(
ec,
execution,
NameWithCaseSensitivity(
dropIndexInfo_->dbName_.c_str(),
dropIndexInfo_->dbNameCaseSensitive_),
NameWithCaseSensitivity(
dropIndexInfo_->indexName_.c_str(),
dropIndexInfo_->indexNameCaseSensitive_),
NameWithCaseSensitivity(
dropIndexInfo_->tableName_.c_str(),
dropIndexInfo_->tableNameCaseSensitive_),
dropIndexInfo_->ifExists_,
dropIndexInfo_);
isFinish = dropIndexInfo_->isFinished_;
phase_ = DDL_STATUS_POST;
}
else {
currentAckCount_++;
if (currentAckCount_ != targetAckCount_) {
isFinish = false;
}
else {
db->dropIndexPost(
ec,
execution,
NameWithCaseSensitivity(
dropIndexInfo_->dbName_.c_str(),
dropIndexInfo_->dbNameCaseSensitive_),
NameWithCaseSensitivity(
dropIndexInfo_->indexName_.c_str(),
dropIndexInfo_->indexNameCaseSensitive_),
NameWithCaseSensitivity(
dropIndexInfo_->tableName_.c_str(),
dropIndexInfo_->tableNameCaseSensitive_),
dropIndexInfo_);
phase_ = DDL_STATUS_END;
}
}
break;
case SyntaxTree::CMD_ALTER_TABLE_DROP_PARTITION:
if (phase_ == DDL_STATUS_INIT) {
db->dropTablePartition(
ec,
execution,
NameWithCaseSensitivity(
dropPartitionInfo_->dbName_.c_str(),
dropPartitionInfo_->dbNameCaseSensitive_),
NameWithCaseSensitivity(
dropPartitionInfo_->tableName_.c_str(),
dropPartitionInfo_->tableNameCaseSensitive_),
dropPartitionInfo_);
isFinish = dropPartitionInfo_->isFinished_;
phase_ = DDL_STATUS_POST;
}
else {
currentAckCount_++;
if (currentAckCount_ != targetAckCount_) {
isFinish = false;
}
else {
db->dropTablePartitionPost(
ec,
execution,
NameWithCaseSensitivity(
dropPartitionInfo_->dbName_.c_str(),
dropPartitionInfo_->dbNameCaseSensitive_),
NameWithCaseSensitivity(
dropPartitionInfo_->tableName_.c_str(),
dropPartitionInfo_->tableNameCaseSensitive_));
}
phase_ = DDL_STATUS_END;
}
break;
case SyntaxTree::CMD_ALTER_TABLE_ADD_COLUMN:
db->addColumn(
ec,
execution,
NameWithCaseSensitivity(
addColumnInfo_->dbName_.c_str(),
addColumnInfo_->dbNameCaseSensitive_),
NameWithCaseSensitivity(
addColumnInfo_->tableName_.c_str(),
addColumnInfo_->tableNameCaseSensitive_),
*addColumnInfo_->createTableOpt_,
addColumnInfo_);
break;
case SyntaxTree::CMD_CREATE_VIEW:
db->createView(
ec,
execution,
NameWithCaseSensitivity(
createViewInfo_->dbName_.c_str(),
createViewInfo_->dbNameCaseSensitive_),
NameWithCaseSensitivity(
createViewInfo_->tableName_.c_str(),
createViewInfo_->tableNameCaseSensitive_),
*createViewInfo_->createTableOpt_,
createViewInfo_);
break;
case SyntaxTree::CMD_DROP_VIEW:
db->dropView(
ec,
execution,
NameWithCaseSensitivity(
dropViewInfo_->dbName_.c_str(),
dropViewInfo_->dbNameCaseSensitive_),
NameWithCaseSensitivity(
dropViewInfo_->tableName_.c_str(),
dropViewInfo_->tableNameCaseSensitive_),
dropViewInfo_->ifExists_);
break;
default:
break;
}
if (isFinish) {
int64_t resultCount = 0;
TupleList::Block block;
getResultCountBlock(cxt, resultCount, block);
cxt.transfer(block);
cxt.finish();
}
}
catch (std::exception &e) {
GS_RETHROW_USER_OR_SYSTEM(e, "");
}
return false;
}
bool DDLProcessor::finish(
Context &cxt, InputId inputId) {
UNUSED_VARIABLE(cxt);
UNUSED_VARIABLE(inputId);
try {
GS_THROW_USER_ERROR(GS_ERROR_SQL_DDL_INTERNAL, "");
}
catch (std::exception &e) {
GS_RETHROW_USER_OR_SYSTEM(e, "");
}
return false;
}
void DDLProcessor::exportTo(
Context &cxt, const OutOption &option) const {
UNUSED_VARIABLE(cxt);
UNUSED_VARIABLE(option);
}
void DDLProcessor::setOptionString(
const SQLPreparedPlan::Node &node,
int32_t pos,
SQLString &str,
bool *isCaseSensitive) {
if ((*node.cmdOptionList_)[pos] != NULL) {
const SyntaxTree::Expr&
expr = *(*node.cmdOptionList_)[pos];
if (expr.op_ == SQLType::EXPR_CONSTANT) {
if (expr.value_.getType() == TupleList::TYPE_STRING) {
str.append(
static_cast<const char*>(expr.value_.varData()),
expr.value_.varSize());
}
else if (expr.value_.getType() == TupleList::TYPE_NULL) {
}
else {
assert(0);
}
if (isCaseSensitive != NULL) {
*isCaseSensitive = expr.qName_->nameCaseSensitive_;
}
}
else if (expr.op_ == SQLType::EXPR_COLUMN ||
expr.op_ == SQLType::EXPR_ID) {
assert(expr.qName_ && expr.qName_->name_);
SQLString tmpStr(globalVarAlloc_);
tmpStr = expr.qName_->name_->c_str();
str.append(tmpStr);
if (isCaseSensitive != NULL) {
*isCaseSensitive = expr.qName_->nameCaseSensitive_;
}
}
else {
assert(false);
}
}
}
template <typename T>
void DDLProcessor::setOptionValue(
const SQLPreparedPlan::Node &node,
int32_t pos, T &t) {
int32_t value;
memcpy(&value,
(*node.cmdOptionList_)[pos]->value_.fixedData(),
sizeof(int32_t));
t = static_cast<T>(value);
}
void DDLProcessor::setAckStatus(
NoSQLContainer *containerInfo,
int32_t pos,
uint8_t status) {
if (pos < 0) {
return;
}
if (pos < static_cast<int32_t>(ackList_.size())) {
}
else {
return;
}
ackList_[pos] = status;
if (containerInfo) {
AckContainerInfo ackInfo;
ackInfo.containerId_ = containerInfo->getContainerId();
ackInfo.pId_ = containerInfo->getPartitionId();
ackInfo.stmtId_ = containerInfo->getStatementId();
ackInfo.clientId_ = containerInfo->getNoSQLClientId();
if (execution_) {
ackInfo.sessionId_
= execution_->getContext().getCurrentSessionId();
}
PartitionTable *pt
= execution_->getResourceSet()->getPartitionTable();
if (ackInfo.pId_ != UNDEF_PARTITIONID) {
ackInfo.ptRev_
= pt->getNewSQLPartitionRevision(ackInfo.pId_);
ackInfo.nodeId_
= pt->getNewSQLOwner(ackInfo.pId_);
ackInfo.masterNodeId_ = pt->getMaster();
ackInfo.pos_ = pos;
}
ackContainerInfoList_.push_back(ackInfo);
}
}
void DDLProcessor::checkPartitionStatus() {
if (phase_ == DDL_STATUS_INIT) return;
PartitionTable *pt = resourceSet_->getPartitionTable();
assert(ackList_.size() == ackContainerInfoList_.size());
for (size_t pos = 0;
pos < ackContainerInfoList_.size(); pos++) {
size_t realPos = ackContainerInfoList_[pos].pos_;
if (realPos >= 0
&& realPos < ackList_.size()
&& ackList_[realPos] == ACK_STATUS_OFF) continue;
PartitionId pId = ackContainerInfoList_[pos].pId_;
if (pId == UNDEF_PARTITIONID) continue;
if (ackContainerInfoList_[pos].ptRev_
!= pt->getNewSQLPartitionRevision(pId)
|| ackContainerInfoList_[pos].masterNodeId_
!= pt->getMaster()) {
GS_THROW_CUSTOM_ERROR(DenyException,
GS_ERROR_TXN_PARTITION_ROLE_UNMATCH,
"Check partition status, unmatch partition revison "
"(expected revision=" << ackContainerInfoList_[pos].ptRev_
<< ", actual revision=" <<pt->getNewSQLPartitionRevision(pId)
<< ", expected master=" << pt->dumpNodeAddress(
ackContainerInfoList_[pos].masterNodeId_)
<< ", actual master=" << pt->dumpNodeAddress(pt->getMaster()) << ")");
}
if (ackContainerInfoList_[pos].nodeId_
!= pt->getNewSQLOwner(pId)) {
GS_THROW_CUSTOM_ERROR(
DenyException,
GS_ERROR_TXN_PARTITION_ROLE_UNMATCH,
"Check partition status, "
"unmatch partition role (expected owner="
<< pt->dumpNodeAddress(
ackContainerInfoList_[pos].nodeId_)
<< ", actual owner="
<< pt->dumpNodeAddress(
pt->getNewSQLOwner(pId)));
}
}
}
| 15,331 |
852 | import FWCore.ParameterSet.Config as cms
TrackerMapTest = cms.EDFilter("TrackerGeometryTest",
TkmapParameters = cms.PSet(
loadFedCabling = cms.untracked.bool(False),
trackerdatPath = cms.untracked.string('CommonTools/TrackerMap/data/'),
trackermaptxtPath = cms.untracked.string('CommonTools/TrackerMap/data/')
)
)
| 138 |
372 | /*
* DISTRHO Plugin Framework (DPF)
* Copyright (C) 2012-2021 <NAME> <<EMAIL>>
*
* Permission to use, copy, modify, and/or distribute this software for any purpose with
* or without fee is hereby granted, provided that the above copyright notice and this
* permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
* TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN
* NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL
* DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
* IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
* CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
// needed for IDE
#include "DistrhoPluginInfo.h"
#include "DistrhoUI.hpp"
#define MPV_TEST
// #define KDE_FIFO_TEST
#ifdef KDE_FIFO_TEST
// Extra includes for current path and fifo stuff
#include <dlfcn.h>
#include <fcntl.h>
#include <sys/stat.h>
#include <sys/types.h>
#endif
START_NAMESPACE_DISTRHO
#ifdef KDE_FIFO_TEST
// TODO: generate a random, not-yet-existing, filename
const char* const kFifoFilename = "/tmp/dpf-fifo-test";
// Helper to get current path of this plugin
static const char* getCurrentPluginFilename()
{
Dl_info exeInfo;
void* localSymbol = (void*)kFifoFilename;
dladdr(localSymbol, &exeInfo);
return exeInfo.dli_fname;
}
// Helper to check if a file exists
static bool fileExists(const char* const filename)
{
return access(filename, F_OK) != -1;
}
// Helper function to keep trying to write until it succeeds or really errors out
static ssize_t
writeRetry(int fd, const void* src, size_t size)
{
ssize_t error;
int attempts = 0;
do {
error = write(fd, src, size);
} while (error == -1 && (errno == EINTR || errno == EPIPE) && ++attempts < 5);
return error;
}
#endif
// -----------------------------------------------------------------------------------------------------------
class ExternalExampleUI : public UI
{
public:
ExternalExampleUI()
: UI(405, 256),
#ifdef KDE_FIFO_TEST
fFifo(-1),
fExternalScript(getNextBundlePath()),
#endif
fValue(0.0f)
{
#ifdef KDE_FIFO_TEST
if (fExternalScript.isEmpty())
{
fExternalScript = getCurrentPluginFilename();
fExternalScript.truncate(fExternalScript.rfind('/'));
}
fExternalScript += "/ExternalLauncher.sh";
d_stdout("External script = %s", fExternalScript.buffer());
#endif
if (isVisible() || isEmbed())
visibilityChanged(true);
}
~ExternalExampleUI()
{
if (isEmbed())
terminateAndWaitForExternalProcess();
}
protected:
/* --------------------------------------------------------------------------------------------------------
* DSP/Plugin Callbacks */
/**
A parameter has changed on the plugin side.
This is called by the host to inform the UI about parameter changes.
*/
void parameterChanged(uint32_t index, float value) override
{
if (index != 0)
return;
fValue = value;
#ifdef KDE_FIFO_TEST
if (fFifo == -1)
return;
// NOTE: This is a terrible way to pass values, also locale might get in the way...
char valueStr[24];
std::memset(valueStr, 0, sizeof(valueStr));
std::snprintf(valueStr, 23, "%i\n", static_cast<int>(value + 0.5f));
DISTRHO_SAFE_ASSERT(writeRetry(fFifo, valueStr, 24) == sizeof(valueStr));
#endif
}
/* --------------------------------------------------------------------------------------------------------
* External Window overrides */
/**
Keep-alive.
*/
void uiIdle() override
{
#ifdef KDE_FIFO_TEST
if (fFifo == -1)
return;
writeRetry(fFifo, "idle\n", 5);
#endif
}
/**
Manage external process and IPC when UI is requested to be visible.
*/
void visibilityChanged(const bool visible) override
{
#ifdef KDE_FIFO_TEST
if (visible)
{
DISTRHO_SAFE_ASSERT_RETURN(fileExists(fExternalScript),);
mkfifo(kFifoFilename, 0666);
sync();
char winIdStr[24];
std::memset(winIdStr, 0, sizeof(winIdStr));
std::snprintf(winIdStr, 23, "%lu", getTransientWindowId());
const char* args[] = {
fExternalScript.buffer(),
kFifoFilename,
"--progressbar", "External UI example",
"--title", getTitle(),
nullptr,
};
DISTRHO_SAFE_ASSERT_RETURN(startExternalProcess(args),);
// NOTE: this can lockup the current thread if the other side does not read the file!
fFifo = open(kFifoFilename, O_WRONLY);
DISTRHO_SAFE_ASSERT_RETURN(fFifo != -1,);
parameterChanged(0, fValue);
}
else
{
if (fFifo != -1)
{
if (isRunning())
{
DISTRHO_SAFE_ASSERT(writeRetry(fFifo, "quit\n", 5) == 5);
fsync(fFifo);
}
::close(fFifo);
fFifo = -1;
}
unlink(kFifoFilename);
terminateAndWaitForExternalProcess();
}
#endif
#ifdef MPV_TEST
if (visible)
{
const char* const file = "/home/falktx/Videos/HD/"; // TODO make this a state file?
if (isEmbed())
{
char winIdStr[64];
snprintf(winIdStr, sizeof(winIdStr), "--wid=%lu", getParentWindowHandle());
const char* args[] = {
"mpv",
"--ao=jack",
winIdStr,
file,
nullptr
};
unsetenv("LD_LIBRARY_PATH");
startExternalProcess(args);
}
else
{
const char* args[] = {
"mpv",
"--ao=jack",
file,
nullptr
};
startExternalProcess(args);
}
}
else
{
terminateAndWaitForExternalProcess();
}
#endif
}
// -------------------------------------------------------------------------------------------------------
private:
#ifdef KDE_FIFO_TEST
// IPC Stuff
int fFifo;
// Path to external ui script
String fExternalScript;
#endif
// Current value, cached for when UI becomes visible
float fValue;
/**
Set our UI class as non-copyable and add a leak detector just in case.
*/
DISTRHO_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(ExternalExampleUI)
};
/* ------------------------------------------------------------------------------------------------------------
* UI entry point, called by DPF to create a new UI instance. */
UI* createUI()
{
return new ExternalExampleUI();
}
// -----------------------------------------------------------------------------------------------------------
END_NAMESPACE_DISTRHO
| 3,165 |
583 | from typing import List, Tuple
import random
from interfaces.TaggingOperation import TaggingOperation
from tasks.TaskTypes import TaskType
"""
A tagging implementation of NER systems.
I am travelling to London. --> I am travelling to South/Central London.
"""
def pick_random_word(seed):
"""
pick random "pre" or "post" perturb type with respective word
"""
pre_word_list = ["East", "West", "South", "North", "Eastern", "Western", "Central", "Southern",
"Northern", "Northeast", "Southwest", "Southeast", "Northwest", "NE", "SW", "SE", "NW",
"ESE", "SSE", "SSW", "WSW", "WNW", "NNW", "NNE", "North East", "ENE", "South West",
"South East", "North West", "Eastern South East", "Southern South East",
"Southern South West", "Western South West", "Western North West", "Northern North West",
"Northern North East", "Eastern North East"]
post_word_list = ["City", "Republic", "University", "Airport", "Palace"]
random.seed(seed)
if bool(random.getrandbits(1)):
perturb_type = "pre"
selected_word = random.choice(pre_word_list)
else:
perturb_type = "post"
selected_word = random.choice(post_word_list)
return selected_word, perturb_type
def create_token_and_tag_seq(token_seq, tag_seq, b_tag_index, i_tag_index, b_tag, i_tag, seed):
"""
Select perturb_type and respected phrase associated with it randomly.
"""
phrase, perturb_type = pick_random_word(seed)
if perturb_type == "pre":
token_seq, tag_seq = add_location_prefix(token_seq, tag_seq, phrase, b_tag_index, b_tag, i_tag)
if perturb_type == "post":
token_seq, tag_seq = add_location_postfix(token_seq, tag_seq, phrase, i_tag_index, i_tag)
return token_seq, tag_seq
def add_location_prefix(token_seq, tag_seq, phrase, b_tag_index, b_tag, i_tag):
"""
Create token sequence and tag sequence for prefix perturbation
"""
word_list = phrase.strip().split(" ")
if len(word_list) == 1: # if selected phrase contains single word
token_seq.insert(b_tag_index, phrase) # put phrase in token_seq at index b_tag_index
tag_seq[b_tag_index] = i_tag # replace B-LOC with I-LOC
tag_seq.insert(b_tag_index, b_tag) # put B-LOC in tag_seq at index b_tag_index
return token_seq, tag_seq
else: # if selected phrase is multi-word
for i, word in enumerate(word_list):
if i == 0:
tag_seq[b_tag_index] = i_tag # change B-LOC to I-LOC
token_seq.insert(b_tag_index, word)
tag_seq.insert(b_tag_index, b_tag)
else:
token_seq.insert(b_tag_index + i, word)
tag_seq.insert(b_tag_index + i, i_tag)
return token_seq, tag_seq
def add_location_postfix(token_seq, tag_seq, phrase, i_tag_index, i_tag):
"""
Create token sequence and tag sequence for postfix perturbation
"""
word_list = phrase.strip().split(" ")
if len(word_list) == 1:
token_seq.insert(i_tag_index + 1, phrase)
tag_seq.insert(i_tag_index + 1, i_tag)
return token_seq, tag_seq
else:
for i, word in enumerate(word_list):
i += 1
token_seq.insert(i_tag_index + i, word)
tag_seq.insert(i_tag_index + i, i_tag)
return token_seq, tag_seq
def extract_tag_indexes(tag_seq, b_tag, i_tag):
"""
Returns index of B-LOC and I-LOC, I-LOC = -1 if no I-LOC exist
"""
i_tag_index = -1 # default value
b_tag_index = tag_seq.index(b_tag)
if b_tag_index == len(tag_seq) - 1:
i_tag_index = b_tag_index
return b_tag_index, i_tag_index # case when B-LOC at the end of tag_sequence", no I-LOC
if tag_seq[b_tag_index + 1] == "O":
i_tag_index = b_tag_index # case when B-LOC in the middle but no "I-LOC"
return b_tag_index, i_tag_index
else:
for i in range(b_tag_index + 1, len(tag_seq)):
if tag_seq[i] == i_tag:
i_tag_index = i # return index of last I-LOC
return b_tag_index, i_tag_index
class LongerLocationNer(TaggingOperation):
tasks = [TaskType.TEXT_TAGGING]
languages = "en"
def __init__(self, max_outputs=1, seed=0):
super().__init__(seed, max_outputs=max_outputs)
def generate(
self, token_sequence: List[str], tag_sequence: List[str]
) -> List[Tuple[List[str], List[str]]]:
token_seq = token_sequence.copy()
tag_seq = tag_sequence.copy()
perturbed_sentences = []
tag = "LOCATION" if "B-LOCATION" in tag_seq else "LOC"
b_tag = "B-" + tag
i_tag = "I-" + tag
assert len(token_seq) == len(tag_seq), \
"Lengths of `token_sequence` and `tag_sequence` should be the same"
if b_tag in tag_seq:
b_tag_index, i_tag_index = extract_tag_indexes(tag_seq, b_tag, i_tag)
for _ in range(self.max_outputs):
token_seq, tag_seq = create_token_and_tag_seq(token_seq, tag_seq, b_tag_index, i_tag_index,
b_tag, i_tag, self.seed)
#assert len(token_seq) == len(tag_seq)
perturbed_sentences.append((token_seq, tag_seq))
token_seq = token_sequence.copy()
tag_seq = tag_sequence.copy()
return perturbed_sentences
# if __name__ == '__main__':
# import json
# from TestRunner import convert_to_snake_case
#
# tf = LongerLocationNer(max_outputs=1)
# test_cases = []
# src = ["I am going to New Zealand via Costa Rica .",
# "I am travelling to London .",
# "Edison was born in Ohio .",
# "<NAME> is a professor at Berkeley .",
# "Google head office is located in California ."
# ]
# tgt = ["O O O O B-LOC I-LOC O B-LOC I-LOC O",
# "O O O O B-LOC O",
# "B-PER O O O B-LOC O",
# "B-PER I-PER O O O O B-LOC O",
# "B-ORG O O O O O B-LOC O"
# ]
#
# for i, (token_sequence, tag_sequence) in enumerate(zip(src, tgt)):
# sentences = tf.generate(token_sequence.split(" "), tag_sequence.split(" "))
# test_cases.append({
# "class": tf.name(),
# "inputs": {"token_sequence": token_sequence, "tag_sequence":tag_sequence},
# "outputs": []}
# )
# for sentence, target in sentences:
# test_cases[i]["outputs"].append({"token_sequence": " ".join(sentence)
# , "tag_sequence":" ".join(target)})
# json_file = {"type": convert_to_snake_case(tf.name()), "test_cases":test_cases}
# print(json.dumps(json_file))
# for i, (token_seq, tag_seq) in enumerate(zip(src, tgt)):
# tf = LongerLocationNer(max_outputs=1)
# print(token_seq, tag_seq)
# res = tf.generate(token_seq.split(" "), tag_seq.split(" "))
# print(res)
| 3,279 |
892 | <filename>advisories/unreviewed/2022/05/GHSA-wxqx-h8v6-v72p/GHSA-wxqx-h8v6-v72p.json
{
"schema_version": "1.2.0",
"id": "GHSA-wxqx-h8v6-v72p",
"modified": "2022-05-01T07:42:37Z",
"published": "2022-05-01T07:42:37Z",
"aliases": [
"CVE-2006-6887"
],
"details": "Unrestricted file upload vulnerability in logahead UNU 1.0 allows remote attackers to upload and execute arbitrary PHP code via unspecified vectors related to plugins/widged/_widged.php (aka the WidgEd plugin), a different vulnerability than CVE-2006-6783. NOTE: The provenance of this information is unknown; the details are obtained solely from third party information.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2006-6887"
},
{
"type": "WEB",
"url": "http://secunia.com/advisories/23470"
}
],
"database_specific": {
"cwe_ids": [
"CWE-94"
],
"severity": "MODERATE",
"github_reviewed": false
}
} | 430 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.form;
import javax.swing.*;
import javax.swing.border.*;
import javax.swing.event.*;
import java.awt.*;
import java.awt.event.*;
import java.beans.BeanInfo;
import java.util.*;
import org.openide.nodes.*;
import org.openide.util.HelpCtx;
import org.openide.util.actions.SystemAction;
import org.netbeans.modules.form.palette.*;
import org.netbeans.modules.form.actions.TestAction;
import org.openide.util.ImageUtilities;
/**
* ToolBar in the FormDesigner - by default it holds buttons for selection and
* connection mode and for testing the form. May contain other buttons for
* some form editor actions.
*
* @author <NAME>
*/
final class FormToolBar {
private FormDesigner formDesigner;
private JToolBar toolbar;
private JToggleButton selectionButton;
private JToggleButton connectionButton;
private JToggleButton paletteButton;
private JLabel addLabel;
private PaletteMenuView paletteMenuView;
private Listener listener;
public FormToolBar(FormDesigner designer, JToolBar toolbar) {
this.formDesigner = designer;
if (toolbar == null) {
toolbar = new ToolBar();
} else {
Object tb = toolbar.getClientProperty(FormToolBar.class);
if (tb instanceof FormToolBar) { // clean everything added by the previous FormToolBar
FormToolBar prevFormToolBar = (FormToolBar) tb;
toolbar.removeMouseListener(prevFormToolBar.listener);
// remove all relevant components - the first one is a horizontal strut before the selection button
int i = toolbar.getComponentIndex(prevFormToolBar.selectionButton) - 1;
if (i >= 0) {
while (i < toolbar.getComponentCount()) {
toolbar.remove(i);
}
}
}
}
this.toolbar = toolbar;
toolbar.putClientProperty(FormToolBar.class, this);
toolbar.putClientProperty("isPrimary", Boolean.TRUE); // for JDev // NOI18N
listener = new Listener();
// selection button
selectionButton = new JToggleButton(new ImageIcon(ImageUtilities.loadImage("org/netbeans/modules/form/resources/selection_mode.png", true)), // NOI18N
false);
selectionButton.addActionListener(listener);
selectionButton.addMouseListener(listener);
selectionButton.setToolTipText(
FormUtils.getBundleString("CTL_SelectionButtonHint")); // NOI18N
HelpCtx.setHelpIDString(selectionButton, "gui.about"); // NOI18N
selectionButton.setSelected(true);
initButton(selectionButton);
// connection button
connectionButton = new JToggleButton(new ImageIcon(ImageUtilities.loadImage("org/netbeans/modules/form/resources/connection_mode.png", true)), // NOI18N
false);
connectionButton.addActionListener(listener);
connectionButton.addMouseListener(listener);
connectionButton.setToolTipText(
FormUtils.getBundleString("CTL_ConnectionButtonHint")); // NOI18N
HelpCtx.setHelpIDString(connectionButton, "gui.connecting.intro"); // NOI18N
initButton(connectionButton);
// palette button
paletteButton = new JToggleButton(
new ImageIcon(getClass().getResource(
"/org/netbeans/modules/form/resources/beansButton.gif")), // NOI18N
false);
paletteButton.addActionListener(listener);
paletteButton.addMouseListener(listener);
paletteButton.setToolTipText(
FormUtils.getBundleString("CTL_BeansButtonHint")); // NOI18N
HelpCtx.setHelpIDString(paletteButton, "gui.components.adding"); // NOI18N
initButton(paletteButton);
// status label
addLabel = new JLabel();
addLabel.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 6));
// popup menu
toolbar.addMouseListener(listener);
// a11y
connectionButton.getAccessibleContext().setAccessibleName(connectionButton.getToolTipText());
selectionButton.getAccessibleContext().setAccessibleName(selectionButton.getToolTipText());
paletteButton.getAccessibleContext().setAccessibleName(paletteButton.getToolTipText());
connectionButton.getAccessibleContext().setAccessibleDescription(FormUtils.getBundleString("ACSD_ConnectionMode")); // NOI18N
selectionButton.getAccessibleContext().setAccessibleDescription(FormUtils.getBundleString("ACSD_SelectionMode")); // NOI18N
paletteButton.getAccessibleContext().setAccessibleDescription(FormUtils.getBundleString("ACSD_AddMode")); // NOI18N
// adding the components to the toolbar
JToolBar.Separator separator2 = new JToolBar.Separator();
separator2.setOrientation(JSeparator.VERTICAL);
JToolBar.Separator separator3 = new JToolBar.Separator();
separator3.setOrientation(JSeparator.VERTICAL);
TestAction testAction = SystemAction.get(TestAction.class);
JButton testButton = (JButton) testAction.getToolbarPresenter();
testButton.addMouseListener(listener);
initButton(testButton);
toolbar.add(Box.createHorizontalStrut(6));
toolbar.add(selectionButton);
toolbar.add(connectionButton);
toolbar.add(paletteButton);
toolbar.add(Box.createHorizontalStrut(6));
toolbar.add(testButton);
toolbar.add(Box.createHorizontalStrut(4));
toolbar.add(separator2);
toolbar.add(Box.createHorizontalStrut(4));
installDesignerActions();
toolbar.add(Box.createHorizontalStrut(4));
toolbar.add(separator3);
toolbar.add(Box.createHorizontalStrut(4));
installResizabilityActions();
// Add "addLabel" at the end of the toolbar
toolbar.add(Box.createHorizontalGlue());
toolbar.add(addLabel);
if (!FormLoaderSettings.getInstance().isPaletteInToolBar()) {
showPaletteButton(false);
}
}
JToolBar getToolBar() {
return toolbar;
}
void installDesignerActions() {
Collection actions = formDesigner.getDesignerActions(true);
Iterator iter = actions.iterator();
while (iter.hasNext()) {
Action action = (Action)iter.next();
JButton button = toolbar.add(action);
initButton(button);
}
}
void installResizabilityActions() {
Action[] actions = formDesigner.getResizabilityActions();
JToggleButton[] resButtons = new JToggleButton[2];
for (int i=0; i < actions.length; i++) {
Action action = actions[i];
JToggleButton button = new JToggleButton();
button.setAction(action);
initButton(button);
resButtons[i] = button;
toolbar.add(button);
toolbar.add(Box.createHorizontalStrut(2));
}
}
// --------
private void initButton(AbstractButton button) {
if (!("Windows".equals(UIManager.getLookAndFeel().getID()) // NOI18N
&& (button instanceof JToggleButton))) {
button.setBorderPainted(false);
}
button.setOpaque(false);
button.setFocusPainted(false);
button.setMargin(new Insets(0, 0, 0, 0));
}
void updateDesignerMode(int mode) {
selectionButton.setSelected(mode == FormDesigner.MODE_SELECT);
connectionButton.setSelected(mode == FormDesigner.MODE_CONNECT);
paletteButton.setSelected(mode == FormDesigner.MODE_ADD);
if (addLabel.isVisible()) {
PaletteItem item = PaletteUtils.getSelectedItem();
if (item != null && mode == FormDesigner.MODE_ADD) {
addLabel.setIcon(
new ImageIcon(item.getNode().getIcon(BeanInfo.ICON_COLOR_16x16)));
addLabel.setText(item.getNode().getDisplayName());
}
else {
addLabel.setText(""); // NOI18N
addLabel.setIcon(null);
}
}
}
void showPaletteButton(boolean visible) {
addLabel.setVisible(visible);
paletteButton.setVisible(visible);
// Hack that solves issue 147578
if ("Nimbus".equals(UIManager.getLookAndFeel().getID())) { // NOI18N
if (visible) {
addLabel.setPreferredSize(null);
paletteButton.setPreferredSize(null);
} else {
addLabel.setPreferredSize(new Dimension());
paletteButton.setPreferredSize(new Dimension());
}
}
}
private void showPaletteViewMenu() {
if (paletteMenuView == null) {
paletteMenuView = new PaletteMenuView(listener);
paletteMenuView.getPopupMenu().addPopupMenuListener(listener);
}
Point p = paletteButton.getLocation();
p.y += paletteButton.getHeight() + 2;
paletteMenuView.getPopupMenu().show(toolbar, p.x, p.y);
}
private void showVisibilityPopupMenu(Point p) {
JPopupMenu menu = new JPopupMenu();
final JMenuItem item = new JCheckBoxMenuItem(
FormUtils.getBundleString("CTL_PaletteButton_MenuItem")); // NOI18N
item.setSelected(FormLoaderSettings.getInstance().isPaletteInToolBar());
item.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
FormLoaderSettings.getInstance().setPaletteInToolBar(
item.isSelected());
}
});
menu.add(item);
menu.show(toolbar, p.x, p.y);
}
// -------
private class Listener extends MouseAdapter
implements ActionListener, NodeAcceptor,
PopupMenuListener
{
// Determines whether palette popup menu should be shown (see issue 46673)
private boolean showMenu;
/** Action to switch to selection, connection or add mode. */
@Override
public void actionPerformed(ActionEvent ev) {
if (ev.getSource() == selectionButton)
formDesigner.toggleSelectionMode();
else if (ev.getSource() == connectionButton)
formDesigner.toggleConnectionMode();
else if (ev.getSource() == paletteButton) {
if (showMenu) {
formDesigner.toggleAddMode();
showPaletteViewMenu();
} else {
MenuSelectionManager.defaultManager().clearSelectedPath();
formDesigner.toggleSelectionMode();
}
}
}
/** Acceptor for nodes in PaletteMenuView */
@Override
public boolean acceptNodes(Node[] nodes) {
if (nodes.length == 0)
return false;
PaletteItem item = nodes[0].getCookie(PaletteItem.class);
PaletteUtils.selectItem( item );
return true;
}
/** Handles closing of PaletteMenuView popup */
@Override
public void popupMenuWillBecomeInvisible(PopupMenuEvent e) {
if( PaletteUtils.getSelectedItem() == null )
formDesigner.toggleSelectionMode();
}
@Override
public void popupMenuWillBecomeVisible(PopupMenuEvent e) {
}
@Override
public void popupMenuCanceled(PopupMenuEvent e) {
}
@Override
public void mousePressed(MouseEvent e) {
if (e.getSource() == paletteButton) {
showMenu = !paletteButton.isSelected();
}
}
/** Reacts on right mouse button up - showing toolbar's popup menu. */
@Override
public void mouseReleased(MouseEvent e) {
if (SwingUtilities.isRightMouseButton(e)
&& formDesigner.getDesignerMode() == FormDesigner.MODE_SELECT)
showVisibilityPopupMenu(e.getPoint());
}
}
private static class ToolBar extends JToolBar {
ToolBar() {
// Proper initialization of aqua toolbar ui, see commit dbd66075827a
super("editorToolbar"); // NOI18N
// the toolbar should have roll-over buttons and no handle for dragging
setFloatable(false);
setRollover(true);
setBorder(new EmptyBorder(0, 0, 0, 0));
add(Box.createHorizontalStrut(4));
addSeparator();
}
@Override
public String getUIClassID() {
// For GTK and Aqua look and feels, we provide a custom toolbar UI
if (UIManager.get("Nb.Toolbar.ui") != null) { // NOI18N
return "Nb.Toolbar.ui"; // NOI18N
} else {
return super.getUIClassID();
}
}
}
}
| 5,954 |
946 | <gh_stars>100-1000
#include "utils/NoiseUtils.hpp"
#include <boost/test/unit_test.hpp>
using namespace utymap::utils;
namespace {
const double Tolerance = 1e-3;
}
BOOST_AUTO_TEST_SUITE(Utils_NoiseUtils)
BOOST_AUTO_TEST_CASE(GivenTestValues_WhenPerlin3d_ThenReturnExpectedValues) {
// Compare with original C# implementation
BOOST_CHECK_CLOSE(NoiseUtils::perlin3D(1, 1, 1, 0.1), 0.1862334, Tolerance);
BOOST_CHECK_CLOSE(NoiseUtils::perlin3D(10, 42, 10, 0.15), 0.4103059, Tolerance);
BOOST_CHECK_CLOSE(NoiseUtils::perlin3D(52, 120, 13, 0.12), -0.1014592, Tolerance);
}
BOOST_AUTO_TEST_SUITE_END()
| 268 |
542 | <gh_stars>100-1000
package com.intsmaze.flink.base.bean;
/**
* github地址: https://github.com/intsmaze
* 博客地址:https://www.cnblogs.com/intsmaze/
* 出版书籍《深入理解Flink核心设计与实践原理》
*
* @auther: intsmaze(刘洋)
* @date: 2020/10/15 18:33
*/
public class ResultBean {
private String flowId;
private int cycleNumber;
private long total;
private int stepNumber;
private String stepType;
public String getFlowId() {
return flowId;
}
public void setFlowId(String flowId) {
this.flowId = flowId;
}
public int getCycleNumber() {
return cycleNumber;
}
public void setCycleNumber(int cycleNumber) {
this.cycleNumber = cycleNumber;
}
public long getTotal() {
return total;
}
public void setTotal(long total) {
this.total = total;
}
public int getStepNumber() {
return stepNumber;
}
public void setStepNumber(int stepNumber) {
this.stepNumber = stepNumber;
}
public String getStepType() {
return stepType;
}
public void setStepType(String stepType) {
this.stepType = stepType;
}
@Override
public String toString() {
return "ResultBean{" +
"flowId='" + flowId + '\'' +
", cycleNumber=" + cycleNumber +
", total=" + total +
", stepNumber=" + stepNumber +
", stepType='" + stepType + '\'' +
'}';
}
}
| 706 |
539 | /*************************************************************************/
/* */
/* Language Technologies Institute */
/* Carnegie Mellon University */
/* Copyright (c) 2001 */
/* All Rights Reserved. */
/* */
/* Permission is hereby granted, free of charge, to use and distribute */
/* this software and its documentation without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of this work, and to */
/* permit persons to whom this work is furnished to do so, subject to */
/* the following conditions: */
/* 1. The code must retain the above copyright notice, this list of */
/* conditions and the following disclaimer. */
/* 2. Any modifications must be clearly marked as such. */
/* 3. Original authors' names are not deleted. */
/* 4. The authors' names are not used to endorse or promote products */
/* derived from this software without specific prior written */
/* permission. */
/* */
/* CARNEGIE MELLON UNIVERSITY AND THE CONTRIBUTORS TO THIS WORK */
/* DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING */
/* ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT */
/* SHALL CARNEGIE MELLON UNIVERSITY NOR THE CONTRIBUTORS BE LIABLE */
/* FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES */
/* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN */
/* AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, */
/* ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF */
/* THIS SOFTWARE. */
/* */
/*************************************************************************/
/* Author: <NAME> (<EMAIL>) */
/* Date: January 2001 */
/*************************************************************************/
/* */
/* Simple top level program */
/* */
/*************************************************************************/
#include <stdio.h>
#include <string.h>
#include <sys/time.h>
#include <unistd.h>
#include "flite.h"
#include "flite_version.h"
cst_val *flite_set_voice_list(const char *voxdir);
#ifdef WASM32_WASI
void flite_set_lang_list(void);
#else
void *flite_set_lang_list(void);
#endif
void cst_alloc_debug_summary();
/* Its not very appropriate that these are declared here */
void usenglish_init(cst_voice *v);
cst_lexicon *cmu_lex_init(void);
static void flite_version()
{
printf(" Carnegie Mellon University, Copyright (c) 1999-2016, all rights reserved\n");
printf(" version: %s-%s-%s %s (http://cmuflite.org)\n",
FLITE_PROJECT_PREFIX,
FLITE_PROJECT_VERSION,
FLITE_PROJECT_STATE,
FLITE_PROJECT_DATE);
}
static void flite_usage()
{
printf("flite: a small simple speech synthesizer\n");
flite_version();
printf("usage: flite TEXT/FILE [WAVEFILE]\n"
" Converts text in TEXTFILE to a waveform in WAVEFILE\n"
" If text contains a space the it is treated as a literal\n"
" textstring and spoken, and not as a file name\n"
" if WAVEFILE is unspecified or \"play\" the result is\n"
" played on the current systems audio device. If WAVEFILE\n"
" is \"none\" the waveform is discarded (good for benchmarking)\n"
" Other options must appear before these options\n"
" --version Output flite version number\n"
" --help Output usage string\n"
" -o WAVEFILE Explicitly set output filename\n"
" -f TEXTFILE Explicitly set input filename\n"
" -t TEXT Explicitly set input textstring\n"
" -p PHONES Explicitly set input textstring and synthesize as phones\n"
" --set F=V Set feature (guesses type)\n"
" -s F=V Set feature (guesses type)\n"
" --seti F=V Set int feature\n"
" --setf F=V Set float feature\n"
" --sets F=V Set string feature\n"
" -ssml Read input text/file in ssml mode\n"
" -b Benchmark mode\n"
" -l Loop endlessly\n"
" -voice NAME Use voice NAME (NAME can be pathname/url to flitevox file)\n"
" -voicedir NAME Directory containing (clunit) voice data\n"
" -lv List voices available\n"
" -add_lex FILENAME add lex addenda from FILENAME\n"
" -pw Print words\n"
" -ps Print segments\n"
" -psdur Print segments and their durations (end-time)\n"
" -pr RelName Print relation RelName\n"
" -voicedump FILENAME Dump selected (cg) voice to FILENAME\n"
" -v Verbose mode\n");
exit(0);
}
static void flite_voice_list_print(void)
{
cst_voice *voice;
const cst_val *v;
printf("Voices available: ");
for (v=flite_voice_list; v; v=val_cdr(v))
{
voice = val_voice(val_car(v));
printf("%s ",voice->name);
}
printf("\n");
return;
}
static cst_utterance *print_info(cst_utterance *u)
{
cst_item *item;
const char *relname;
int printEndTime = 0;
int printStress = 0;
relname = utt_feat_string(u,"print_info_relation");
if (cst_streq(relname, "SegmentEndTime"))
{
relname = "Segment";
printEndTime = 1;
}
if (cst_streq(relname, "SegmentStress"))
{
relname = "Segment";
printStress = 1;
}
for (item=relation_head(utt_relation(u,relname));
item;
item=item_next(item))
{
if (!printEndTime)
printf("%s",item_feat_string(item,"name"));
else
printf("%s:%1.3f",item_feat_string(item,"name"), item_feat_float(item,"end"));
if (printStress == 1)
{
if (cst_streq("+",ffeature_string(item,"ph_vc")))
printf("%s",ffeature_string(item,"R:SylStructure.parent.stress"));
}
printf(" ");
}
printf("\n");
return u;
}
static void ef_set(cst_features *f,const char *fv,const char *type)
{
/* set feature from fv (F=V), guesses type if not explicit type given */
const char *val;
char *feat;
const char *fname;
if ((val = strchr(fv,'=')) == 0)
{
fprintf(stderr,
"flite: can't find '=' in featval \"%s\", ignoring it\n",
fv);
}
else
{
feat = cst_strdup(fv);
feat[cst_strlen(fv)-cst_strlen(val)] = '\0';
fname=feat_own_string(f,feat);
val = val+1;
if ((type && cst_streq("int",type)) ||
((type == 0) && (cst_regex_match(cst_rx_int,val))))
feat_set_int(f,fname,atoi(val));
else if ((type && cst_streq("float",type)) ||
((type == 0) && (cst_regex_match(cst_rx_double,val))))
feat_set_float(f,fname,atof(val));
else
feat_set_string(f,fname,val);
cst_free(feat);
}
}
int main(int argc, char **argv)
{
struct timeval tv;
cst_voice *v;
const char *filename;
const char *outtype;
cst_voice *desired_voice = 0;
const char *voicedir = NULL;
int i;
float durs;
double time_start, time_end;
int flite_verbose, flite_loop, flite_bench;
int explicit_filename, explicit_text, explicit_phones, ssml_mode;
#define ITER_MAX 3
int bench_iter = 0;
cst_features *extra_feats;
const char *lex_addenda_file = NULL;
const char *voicedumpfile = NULL;
cst_audio_streaming_info *asi;
filename = 0;
outtype = "play"; /* default is to play */
flite_verbose = FALSE;
flite_loop = FALSE;
flite_bench = FALSE;
explicit_text = explicit_filename = explicit_phones = FALSE;
ssml_mode = FALSE;
extra_feats = new_features();
flite_init();
flite_set_lang_list(); /* defined at compilation time */
for (i=1; i<argc; i++)
{
if (cst_streq(argv[i],"--version"))
{
flite_version();
return 1;
}
else if (cst_streq(argv[i],"-h") ||
cst_streq(argv[i],"--help") ||
cst_streq(argv[i],"-?"))
flite_usage();
else if (cst_streq(argv[i],"-v"))
flite_verbose = TRUE;
else if (cst_streq(argv[i],"-lv"))
{
if (flite_voice_list == NULL)
flite_set_voice_list(voicedir);
flite_voice_list_print();
exit(0);
}
else if (cst_streq(argv[i],"-l"))
flite_loop = TRUE;
else if (cst_streq(argv[i],"-b"))
{
flite_bench = TRUE;
break; /* ignore other arguments */
}
else if ((cst_streq(argv[i],"-o")) && (i+1 < argc))
{
outtype = argv[i+1];
i++;
}
else if ((cst_streq(argv[i],"-voice")) && (i+1 < argc))
{
if (flite_voice_list == NULL)
flite_set_voice_list(voicedir);
desired_voice = flite_voice_select(argv[i+1]);
i++;
}
else if ((cst_streq(argv[i],"-voicedir")) && (i+1 < argc))
{
voicedir = argv[i+1];
if (flite_voice_list == NULL)
flite_set_voice_list(voicedir);
i++;
}
else if ((cst_streq(argv[i],"-add_lex")) && (i+1 < argc))
{
lex_addenda_file = argv[i+1];
i++;
}
else if (cst_streq(argv[i],"-f") && (i+1 < argc))
{
filename = argv[i+1];
explicit_filename = TRUE;
i++;
}
else if (cst_streq(argv[i],"-pw"))
{
feat_set_string(extra_feats,"print_info_relation","Word");
feat_set(extra_feats,"post_synth_hook_func",
uttfunc_val(&print_info));
}
else if (cst_streq(argv[i],"-ps"))
{
feat_set_string(extra_feats,"print_info_relation","Segment");
feat_set(extra_feats,"post_synth_hook_func",
uttfunc_val(&print_info));
}
else if (cst_streq(argv[i],"-psdur"))
{
// Added by AUP Mar 2013 for extracting durations (end-time) of segments
// (useful in talking heads, etc.)
feat_set_string(extra_feats,"print_info_relation","SegmentEndTime");
feat_set(extra_feats,"post_synth_hook_func",
uttfunc_val(&print_info));
}
else if (cst_streq(argv[i],"-psstress"))
{
feat_set_string(extra_feats,"print_info_relation","SegmentStress");
feat_set(extra_feats,"post_synth_hook_func",
uttfunc_val(&print_info));
}
else if (cst_streq(argv[i],"-ssml"))
{
ssml_mode = TRUE;
}
else if (cst_streq(argv[i],"-pr") && (i+1 < argc))
{
feat_set_string(extra_feats,"print_info_relation",argv[i+1]);
feat_set(extra_feats,"post_synth_hook_func",
uttfunc_val(&print_info));
i++;
}
else if (cst_streq(argv[i],"-voicedump") && (i+1 < argc))
{
voicedumpfile = argv[i+1];
i++;
}
else if ((cst_streq(argv[i],"-set") || cst_streq(argv[i],"-s"))
&& (i+1 < argc))
{
ef_set(extra_feats,argv[i+1],0);
i++;
}
else if (cst_streq(argv[i],"--seti") && (i+1 < argc))
{
ef_set(extra_feats,argv[i+1],"int");
i++;
}
else if (cst_streq(argv[i],"--setf") && (i+1 < argc))
{
ef_set(extra_feats,argv[i+1],"float");
i++;
}
else if (cst_streq(argv[i],"--sets") && (i+1 < argc))
{
ef_set(extra_feats,argv[i+1],"string");
i++;
}
else if (cst_streq(argv[i],"-p") && (i+1 < argc))
{
filename = argv[i+1];
explicit_phones = TRUE;
i++;
}
else if (cst_streq(argv[i],"-t") && (i+1 < argc))
{
filename = argv[i+1];
explicit_text = TRUE;
i++;
}
else if (filename)
outtype = argv[i];
else
filename = argv[i];
}
if (filename == NULL) filename = "-"; /* stdin */
if (flite_voice_list == NULL)
flite_set_voice_list(voicedir);
if (desired_voice == 0)
desired_voice = flite_voice_select(NULL);
v = desired_voice;
feat_copy_into(extra_feats,v->features);
durs = 0.0;
if (voicedumpfile != NULL)
{
flite_voice_dump(v,voicedumpfile);
exit(0);
}
if (lex_addenda_file)
flite_voice_add_lex_addenda(v,lex_addenda_file);
if (cst_streq("stream",outtype))
{
asi = new_audio_streaming_info();
asi->asc = audio_stream_chunk;
feat_set(v->features,"streaming_info",audio_streaming_info_val(asi));
}
if (flite_bench)
{
outtype = "none";
filename = "A whole joy was reaping, but they've gone south, you should fetch azure mike.";
explicit_text = TRUE;
}
loop:
gettimeofday(&tv,NULL);
time_start = (double)(tv.tv_sec)+(((double)tv.tv_usec)/1000000.0);
if (explicit_phones)
durs = flite_phones_to_speech(filename,v,outtype);
else if ((strchr(filename,' ') && !explicit_filename) || explicit_text)
{
if (ssml_mode)
durs = flite_ssml_text_to_speech(filename,v,outtype);
else
durs = flite_text_to_speech(filename,v,outtype);
}
else
{
if (ssml_mode)
durs = flite_ssml_file_to_speech(filename,v,outtype);
else
durs = flite_file_to_speech(filename,v,outtype);
}
gettimeofday(&tv,NULL);
time_end = ((double)(tv.tv_sec))+((double)tv.tv_usec/1000000.0);
if (flite_verbose || (flite_bench && bench_iter == ITER_MAX))
printf("times faster than real-time: %f\n(%f seconds of speech synthesized in %f)\n",
durs/(float)(time_end-time_start),
durs,
(float)(time_end-time_start));
if (flite_loop || (flite_bench && bench_iter++ < ITER_MAX))
goto loop;
delete_features(extra_feats);
delete_val(flite_voice_list); flite_voice_list=0;
/* cst_alloc_debug_summary(); */
return 0;
}
| 6,932 |
2,074 | <reponame>WxByte/core-plot<gh_stars>1000+
/// @file
typedef UIColor CPTNativeColor; ///< Platform-native color.
typedef UIImage CPTNativeImage; ///< Platform-native image format.
typedef UIEvent CPTNativeEvent; ///< Platform-native OS event.
typedef UIFont CPTNativeFont; ///< Platform-native font.
| 103 |
335 | {
"word": "Ped",
"definitions": [
"A performance-enhancing drug."
],
"parts-of-speech": "Noun"
} | 58 |
32,544 | package com.baeldung.attribute.override.entity;
import javax.persistence.Embeddable;
@Embeddable
public class Address {
private String name;
private String city;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCity() {
return city;
}
public void setCity(String city) {
this.city = city;
}
}
| 175 |
513 | <reponame>kevinw/amulet
void am_open_glob_module(lua_State *L);
| 29 |
2,144 | <reponame>aashitk/pinot
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pinot.segment.local.segment.index;
import java.io.File;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.io.FileUtils;
import org.apache.pinot.segment.local.indexsegment.immutable.ImmutableSegmentLoader;
import org.apache.pinot.segment.local.segment.creator.SegmentTestUtils;
import org.apache.pinot.segment.local.segment.creator.impl.SegmentCreationDriverFactory;
import org.apache.pinot.segment.spi.ColumnMetadata;
import org.apache.pinot.segment.spi.IndexSegment;
import org.apache.pinot.segment.spi.SegmentMetadata;
import org.apache.pinot.segment.spi.creator.SegmentGeneratorConfig;
import org.apache.pinot.segment.spi.creator.SegmentIndexCreationDriver;
import org.apache.pinot.segment.spi.partition.BoundedColumnValuePartitionFunction;
import org.apache.pinot.spi.config.table.ColumnPartitionConfig;
import org.apache.pinot.spi.config.table.SegmentPartitionConfig;
import org.apache.pinot.spi.data.DimensionFieldSpec;
import org.apache.pinot.spi.data.FieldSpec.DataType;
import org.apache.pinot.spi.utils.ReadMode;
import org.apache.pinot.util.TestUtils;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
public class ColumnMetadataTest {
private static final String AVRO_DATA = "data/test_data-mv.avro";
private static final File INDEX_DIR = new File(FileUtils.getTempDirectory(), "ColumnMetadataTest");
private static final String CREATOR_VERSION = "TestHadoopJar.1.1.1";
@BeforeMethod
public void setUp()
throws Exception {
FileUtils.deleteQuietly(INDEX_DIR);
}
@AfterMethod
public void tearDown() {
FileUtils.deleteQuietly(INDEX_DIR);
}
public SegmentGeneratorConfig createSegmentConfigWithoutCreator()
throws Exception {
final String filePath =
TestUtils.getFileFromResourceUrl(ColumnMetadataTest.class.getClassLoader().getResource(AVRO_DATA));
// Intentionally changed this to TimeUnit.Hours to make it non-default for testing.
SegmentGeneratorConfig config = SegmentTestUtils
.getSegmentGenSpecWithSchemAndProjectedColumns(new File(filePath), INDEX_DIR, "daysSinceEpoch", TimeUnit.HOURS,
"testTable");
config.setSegmentNamePostfix("1");
// The segment generation code in SegmentColumnarIndexCreator will throw
// exception if start and end time in time column are not in acceptable
// range. For this test, we first need to fix the input avro data
// to have the time column values in allowed range. Until then, the check
// is explicitly disabled
config.setSkipTimeValueCheck(true);
return config;
}
public SegmentGeneratorConfig createSegmentConfigWithCreator()
throws Exception {
SegmentGeneratorConfig config = createSegmentConfigWithoutCreator();
config.setCreatorVersion(CREATOR_VERSION);
return config;
}
public void verifySegmentAfterLoading(SegmentMetadata segmentMetadata) {
// Multi-value numeric dimension column.
ColumnMetadata col7Meta = segmentMetadata.getColumnMetadataFor("column7");
Assert.assertEquals(col7Meta.getFieldSpec(), new DimensionFieldSpec("column7", DataType.INT, false));
Assert.assertEquals(col7Meta.getCardinality(), 359);
Assert.assertEquals(col7Meta.getTotalDocs(), 100000);
Assert.assertEquals(col7Meta.getBitsPerElement(), 9);
Assert.assertEquals(col7Meta.getColumnMaxLength(), 0);
Assert.assertFalse(col7Meta.isSorted());
Assert.assertTrue(col7Meta.hasDictionary());
Assert.assertEquals(col7Meta.getMaxNumberOfMultiValues(), 24);
Assert.assertEquals(col7Meta.getTotalNumberOfEntries(), 134090);
Assert.assertFalse(col7Meta.isAutoGenerated());
// Single-value string dimension column.
ColumnMetadata col3Meta = segmentMetadata.getColumnMetadataFor("column3");
Assert.assertEquals(col3Meta.getFieldSpec(), new DimensionFieldSpec("column3", DataType.STRING, true));
Assert.assertEquals(col3Meta.getCardinality(), 5);
Assert.assertEquals(col3Meta.getTotalDocs(), 100000);
Assert.assertEquals(col3Meta.getBitsPerElement(), 3);
Assert.assertEquals(col3Meta.getColumnMaxLength(), 4);
Assert.assertFalse(col3Meta.isSorted());
Assert.assertTrue(col3Meta.hasDictionary());
Assert.assertEquals(col3Meta.getMaxNumberOfMultiValues(), 0);
Assert.assertEquals(col3Meta.getTotalNumberOfEntries(), 100000);
Assert.assertFalse(col3Meta.isAutoGenerated());
// Time column.
// FIXME: Currently it is modeled as dimension in the auto-generated schema
ColumnMetadata timeColumn = segmentMetadata.getColumnMetadataFor("daysSinceEpoch");
Assert.assertEquals(timeColumn.getFieldSpec(), new DimensionFieldSpec("daysSinceEpoch", DataType.INT, true));
Assert.assertEquals(timeColumn.getColumnName(), "daysSinceEpoch");
Assert.assertEquals(timeColumn.getCardinality(), 1);
Assert.assertEquals(timeColumn.getTotalDocs(), 100000);
Assert.assertEquals(timeColumn.getBitsPerElement(), 1);
Assert.assertEquals(timeColumn.getColumnMaxLength(), 0);
Assert.assertTrue(timeColumn.isSorted());
Assert.assertTrue(timeColumn.hasDictionary());
Assert.assertEquals(timeColumn.getMaxNumberOfMultiValues(), 0);
Assert.assertEquals(timeColumn.getTotalNumberOfEntries(), 100000);
Assert.assertFalse(timeColumn.isAutoGenerated());
}
@Test
public void testAllFieldsInitialized()
throws Exception {
// Build the Segment metadata.
SegmentGeneratorConfig config = createSegmentConfigWithCreator();
SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
driver.init(config);
driver.build();
// Load segment metadata.
IndexSegment segment = ImmutableSegmentLoader.load(INDEX_DIR.listFiles()[0], ReadMode.mmap);
SegmentMetadata segmentMetadata = segment.getSegmentMetadata();
verifySegmentAfterLoading(segmentMetadata);
// Make sure we got the creator name as well.
String creatorName = segmentMetadata.getCreatorName();
Assert.assertEquals(creatorName, CREATOR_VERSION);
}
@Test
public void testAllFieldsExceptCreatorName()
throws Exception {
// Build the Segment metadata.
SegmentGeneratorConfig config = createSegmentConfigWithoutCreator();
SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
driver.init(config);
driver.build();
// Load segment metadata.
IndexSegment segment = ImmutableSegmentLoader.load(INDEX_DIR.listFiles()[0], ReadMode.mmap);
SegmentMetadata segmentMetadata = segment.getSegmentMetadata();
verifySegmentAfterLoading(segmentMetadata);
// Make sure we get null for creator name.
Assert.assertNull(segmentMetadata.getCreatorName());
}
@Test
public void testPaddingCharacter()
throws Exception {
// Build the Segment metadata.
SegmentGeneratorConfig config = createSegmentConfigWithoutCreator();
SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
driver.init(config);
driver.build();
// Load segment metadata.
IndexSegment segment = ImmutableSegmentLoader.load(INDEX_DIR.listFiles()[0], ReadMode.mmap);
SegmentMetadata metadata = segment.getSegmentMetadata();
verifySegmentAfterLoading(metadata);
}
@Test
public void testSegmentPartitionedWithBoundedColumnValue()
throws Exception {
// Build the Segment metadata.
SegmentGeneratorConfig config = createSegmentConfigWithoutCreator();
Map<String, String> functionConfig = new HashMap<>();
functionConfig.put("columnValues", "P,w,L");
functionConfig.put("columnValuesDelimiter", ",");
SegmentPartitionConfig segmentPartitionConfig = new SegmentPartitionConfig(
Collections.singletonMap("column3", new ColumnPartitionConfig("BoundedColumnValue", 4, functionConfig)));
config.setSegmentPartitionConfig(segmentPartitionConfig);
SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
driver.init(config);
driver.build();
// Load segment metadata.
IndexSegment segment = ImmutableSegmentLoader.load(INDEX_DIR.listFiles()[0], ReadMode.mmap);
SegmentMetadata segmentMetadata = segment.getSegmentMetadata();
verifySegmentAfterLoading(segmentMetadata);
// Make sure we get null for creator name.
Assert.assertNull(segmentMetadata.getCreatorName());
// Verify segment partitioning metadata.
ColumnMetadata col3Meta = segmentMetadata.getColumnMetadataFor("column3");
Assert.assertNotNull(col3Meta.getPartitionFunction());
Assert.assertTrue(col3Meta.getPartitionFunction() instanceof BoundedColumnValuePartitionFunction);
Assert.assertEquals(col3Meta.getPartitionFunction().getNumPartitions(), 4);
Assert.assertEquals(col3Meta.getPartitionFunction().getFunctionConfig(), functionConfig);
Assert.assertEquals(col3Meta.getPartitions(), Stream.of(0, 1, 2, 3).collect(Collectors.toSet()));
}
}
| 3,195 |
605 | <reponame>AochongZhang/mqcloud
package com.sohu.tv.mq.cloud.task.monitor;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeMap;
import org.apache.rocketmq.common.MixAll;
import org.apache.rocketmq.common.message.MessageQueue;
import org.apache.rocketmq.common.protocol.body.Connection;
import org.apache.rocketmq.common.protocol.body.ConsumerConnection;
import org.apache.rocketmq.common.protocol.body.ConsumerRunningInfo;
import org.apache.rocketmq.common.protocol.body.ProcessQueueInfo;
import org.apache.rocketmq.common.protocol.heartbeat.ConsumeType;
import org.apache.rocketmq.common.protocol.heartbeat.SubscriptionData;
import org.apache.rocketmq.common.protocol.topic.OffsetMovedEvent;
import org.apache.rocketmq.tools.monitor.DeleteMsgsEvent;
import org.apache.rocketmq.tools.monitor.FailedMsgs;
import org.apache.rocketmq.tools.monitor.MonitorListener;
import org.apache.rocketmq.tools.monitor.UndoneMsgs;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.sohu.tv.mq.cloud.bo.ConsumerBlock;
import com.sohu.tv.mq.cloud.bo.ConsumerClientStat;
import com.sohu.tv.mq.cloud.bo.ConsumerStat;
import com.sohu.tv.mq.cloud.bo.Topic;
import com.sohu.tv.mq.cloud.bo.TypedUndoneMsgs;
import com.sohu.tv.mq.cloud.bo.User;
import com.sohu.tv.mq.cloud.bo.UserConsumer;
import com.sohu.tv.mq.cloud.dao.ConsumerStatDao;
import com.sohu.tv.mq.cloud.service.AlarmConfigBridingService;
import com.sohu.tv.mq.cloud.service.AlertService;
import com.sohu.tv.mq.cloud.service.ConsumerClientStatService;
import com.sohu.tv.mq.cloud.service.TopicService;
import com.sohu.tv.mq.cloud.service.UserConsumerService;
import com.sohu.tv.mq.cloud.service.UserService;
import com.sohu.tv.mq.cloud.util.DateUtil;
import com.sohu.tv.mq.cloud.util.MQCloudConfigHelper;
import com.sohu.tv.mq.cloud.util.Result;
import com.sohu.tv.mq.util.CommonUtil;
/**
* 监控搜狐实现
* @author yongfeigao
*
*/
@Component
public class SohuMonitorListener implements MonitorListener {
private final Logger log = LoggerFactory.getLogger(this.getClass());
@Autowired
private ConsumerStatDao consumerStatDao;
@Autowired
private UserConsumerService userConsumerService;
@Autowired
private TopicService topicService;
@Autowired
private AlertService alertService;
@Autowired
private UserService userService;
@Autowired
private AlarmConfigBridingService alarmConfigBridingService;
private long time;
@Autowired
private MQCloudConfigHelper mqCloudConfigHelper;
@Autowired
private ConsumerClientStatService consumerClientStatService;
@Override
public void beginRound() {
time = System.currentTimeMillis();
log.info("monitor begin");
}
@Override
public void reportUndoneMsgs(UndoneMsgs undoneMsgs) {
String topic = undoneMsgs.getTopic();
// 忽略topic
if(mqCloudConfigHelper.isIgnoreTopic(topic)) {
return;
}
try {
//保存堆积消息的consumer的状态
consumerStatDao.saveConsumerStat(undoneMsgs.getConsumerGroup(), topic,
(int)undoneMsgs.getUndoneMsgsTotal(),
(int)undoneMsgs.getUndoneMsgsSingleMQ(),
undoneMsgs.getUndoneMsgsDelayTimeMills());
} catch (Exception e) {
log.error("save {}",undoneMsgs ,e);
}
veriftAccumulateAlarm(undoneMsgs);
}
/**
* 校验是否发送报警邮件
* @param topic
* @param undoneMsgs
*/
private void veriftAccumulateAlarm(UndoneMsgs undoneMsgs) {
long accumulateTime = alarmConfigBridingService.getAccumulateTime(undoneMsgs.getConsumerGroup());
long accumulateCount = alarmConfigBridingService.getAccumulateCount(undoneMsgs.getConsumerGroup());
if (accumulateTime < 0 && accumulateCount < 0) {
return;
}
if (undoneMsgs instanceof TypedUndoneMsgs) {
// 广播模式消费者堆积,无法检测堆积时间
if (!((TypedUndoneMsgs) undoneMsgs).isClustering()) {
if (undoneMsgs.getUndoneMsgsTotal() > accumulateCount) {
accumulateWarn(undoneMsgs);
}
} else {
// 发送报警
if (undoneMsgs.getUndoneMsgsDelayTimeMills() > accumulateTime
&& undoneMsgs.getUndoneMsgsTotal() > accumulateCount) {
accumulateWarn(undoneMsgs);
}
}
}
}
/**
* 堆积报警
* @param undoneMsgs
*/
public void accumulateWarn(UndoneMsgs undoneMsgs) {
// 验证报警频率
if (!alarmConfigBridingService.needWarn("accumulate", undoneMsgs.getTopic(), undoneMsgs.getConsumerGroup())) {
return;
}
TopicExt topicExt = getUserEmail(undoneMsgs.getTopic(), undoneMsgs.getConsumerGroup());
if(topicExt == null) {
return;
}
String content = getAccumulateWarnContent(topicExt.getTopic(), undoneMsgs);
alertService.sendWarnMail(topicExt.getReceiver(), "堆积", content);
}
/**
* 获取用户邮件地址
* @param topic
* @param userID
* @return
*/
private TopicExt getUserEmail(String topic, String consumerGroup) {
// 获取topic
Result<Topic> topicResult = topicService.queryTopic(topic);
if(topicResult.isNotOK()) {
return null;
}
TopicExt topicExt = new TopicExt();
topicExt.setTopic(topicResult.getResult());
// 获取用户
Set<Long> userID = getUserID(topicResult.getResult().getId(), consumerGroup);
String receiver = null;
// 获取用户id
if(!userID.isEmpty()) {
// 获取用户信息
Result<List<User>> userListResult = userService.query(userID);
StringBuilder sb = new StringBuilder();
if(userListResult.isNotEmpty()) {
for(User u : userListResult.getResult()) {
sb.append(u.getEmail());
sb.append(",");
}
}
if(sb.length() > 0) {
sb.deleteCharAt(sb.length() - 1);
receiver = sb.toString();
}
}
topicExt.setReceiver(receiver);
return topicExt;
}
/**
* 获取用户ID
* @param topic
* @param consuemrGroup
* @return
*/
private Set<Long> getUserID(long tid, String consuemrGroup) {
// 获取用户id
Set<Long> uidList = new HashSet<Long>();
Result<List<UserConsumer>> udListResult = userConsumerService.queryByNameAndTid(tid, consuemrGroup);
if(udListResult.isNotEmpty()) {
for(UserConsumer uc : udListResult.getResult()) {
uidList.add(uc.getUid());
}
}
return uidList;
}
/**
* 获取堆积预警信息
* @param topic
* @param undoneMsgs
* @return
*/
private String getAccumulateWarnContent(Topic topic, UndoneMsgs undoneMsgs) {
StringBuilder content = new StringBuilder("详细如下:<br><br>");
content.append("topic:<b>");
content.append(topic.getName());
content.append("</b> 的消费者:<b>");
content.append(mqCloudConfigHelper.getTopicConsumeLink(topic.getId(), undoneMsgs.getConsumerGroup()));
content.append("</b> 检测到堆积,总堆积消息量:");
content.append(undoneMsgs.getUndoneMsgsTotal());
content.append(",单个队列最大堆积消息量:");
content.append(undoneMsgs.getUndoneMsgsSingleMQ());
if (undoneMsgs.getUndoneMsgsDelayTimeMills() > 0) {
content.append(",消费滞后时间(相对于broker最新消息时间):");
content.append(undoneMsgs.getUndoneMsgsDelayTimeMills() / 1000f);
content.append("秒");
}
return content.toString();
}
@Override
public void reportFailedMsgs(FailedMsgs failedMsgs) {
}
@Override
public void reportDeleteMsgsEvent(DeleteMsgsEvent deleteMsgsEvent) {
try {
log.warn("receive offset event:{}", deleteMsgsEvent);
OffsetMovedEvent event = deleteMsgsEvent.getOffsetMovedEvent();
String consumerGroup = event.getConsumerGroup();
if(MixAll.TOOLS_CONSUMER_GROUP.equals(consumerGroup)) {
return;
}
// 保存consume状态
ConsumerStat consumerStat = new ConsumerStat();
consumerStat.setConsumerGroup(consumerGroup);
consumerStat.setTopic(event.getMessageQueue().getTopic());
consumerStatDao.saveSimpleConsumerStat(consumerStat);
int id = consumerStat.getId();
// 保存block状态
long time = deleteMsgsEvent.getEventTimestamp();
String broker = event.getMessageQueue().getBrokerName();
int qid = event.getMessageQueue().getQueueId();
consumerStatDao.saveSomeConsumerBlock(id, broker, qid, time);
// 预警
offsetMoveWarn(deleteMsgsEvent);
} catch (Exception e) {
log.error("receive offset event:{}", deleteMsgsEvent, e);
}
}
/**
* 偏移量预警
*/
public void offsetMoveWarn(DeleteMsgsEvent deleteMsgsEvent) {
OffsetMovedEvent event = deleteMsgsEvent.getOffsetMovedEvent();
TopicExt topicExt = getUserEmail(event.getMessageQueue().getTopic(), event.getConsumerGroup());
if(topicExt == null) {
return;
}
// 验证报警频率
if (!alarmConfigBridingService.needWarn("offsetMove", event.getMessageQueue().getTopic(),
event.getConsumerGroup())) {
return;
}
StringBuilder content = new StringBuilder("详细如下:<br><br>");
content.append("消费者:<b>");
content.append(mqCloudConfigHelper.getTopicConsumeLink(topicExt.getTopic().getId(), event.getConsumerGroup()));
content.append("</b> 偏移量错误,broker时间:<b>");
content.append(DateUtil.getFormat(DateUtil.YMD_DASH_BLANK_HMS_COLON).format(
new Date(deleteMsgsEvent.getEventTimestamp())));
content.append("</b> ,请求偏移量:<b>");
content.append(event.getOffsetRequest());
content.append("</b>,broker偏移量:<b>");
content.append(event.getOffsetNew());
content.append("</b>。队列信息如下:<br>");
content.append("broker:");
content.append(event.getMessageQueue().getBrokerName());
content.append(" topic:");
content.append(event.getMessageQueue().getTopic());
content.append(" 队列:");
content.append(event.getMessageQueue().getQueueId());
alertService.sendWarnMail(topicExt.getReceiver(), "偏移量错误", content.toString());
}
@Override
public void reportConsumerRunningInfo(
TreeMap<String, ConsumerRunningInfo> criTable) {
if(criTable == null || criTable.size() == 0) {
return;
}
String consumerGroup = criTable.firstEntry().getValue().getProperties().getProperty("consumerGroup");
try {
// 分析订阅关系
boolean result = ConsumerRunningInfo.analyzeSubscription(criTable);
if (!result) {
log.warn("ConsumerGroup: {}, Subscription different", consumerGroup);
//同一个ConsumerGroup订阅了不同的topic,进行记录
Set<SubscriptionData> set = new HashSet<SubscriptionData>();
for(ConsumerRunningInfo info : criTable.values()) {
set.addAll(info.getSubscriptionSet());
}
StringBuilder sb = new StringBuilder();
Set<String> uniqSet = new HashSet<String>();
for(SubscriptionData s : set) {
if(CommonUtil.isRetryTopic(s.getTopic())) {
continue;
}
String tmp = s.getTopic()+":"+s.getSubString();
if(uniqSet.add(tmp)) {
sb.append(tmp);
sb.append(";");
}
}
String sbscription = sb.toString();
ConsumerStat consumerStat = new ConsumerStat();
consumerStat.setConsumerGroup(consumerGroup);
consumerStat.setSbscription(sbscription);
consumerStatDao.saveSimpleConsumerStat(consumerStat);
subscriptionWarn(consumerGroup, sbscription);
}
} catch (NumberFormatException e) {
log.warn("num parse err");
} catch (Exception e) {
log.error("save subscription:{}", criTable, e);
}
// 分析客户端卡主的情况
Map<TopicConsumer, List<ConsumerBlock>> map = new HashMap<TopicConsumer, List<ConsumerBlock>>();
for(String clientId : criTable.keySet()) {
ConsumerRunningInfo info = criTable.get(clientId);
String property = info.getProperties().getProperty(ConsumerRunningInfo.PROP_CONSUME_TYPE);
if (property == null) {
property = ((ConsumeType) info.getProperties().get(ConsumerRunningInfo.PROP_CONSUME_TYPE)).name();
}
// 只能分析push的情况
if(ConsumeType.valueOf(property) != ConsumeType.CONSUME_PASSIVELY) {
return;
}
String orderProperty = info.getProperties().getProperty(ConsumerRunningInfo.PROP_CONSUME_ORDERLY);
boolean orderMsg = Boolean.parseBoolean(orderProperty);
// 只分析非一致性消费
if(orderMsg) {
return;
}
Iterator<Entry<MessageQueue, ProcessQueueInfo>> it = info.getMqTable().entrySet().iterator();
while (it.hasNext()) {
Entry<MessageQueue, ProcessQueueInfo> next = it.next();
MessageQueue mq = next.getKey();
ProcessQueueInfo pq = next.getValue();
long diff = System.currentTimeMillis() - pq.getLastConsumeTimestamp();
if (diff < (1000 * 60) || pq.getCachedMsgCount() < 100) {
continue;
}
// 组装信息
TopicConsumer tc = new TopicConsumer();
tc.setTopic(mq.getTopic());
tc.setConsumer(consumerGroup);
List<ConsumerBlock> consumerBlockList = map.get(tc);
if(consumerBlockList == null) {
consumerBlockList = new ArrayList<ConsumerBlock>();
map.put(tc, consumerBlockList);
}
ConsumerBlock cb = new ConsumerBlock();
cb.setBlockTime(diff);
cb.setInstance(clientId);
cb.setBroker(mq.getBrokerName());
cb.setQid(mq.getQueueId());
consumerBlockList.add(cb);
}
}
if(map.size() <= 0) {
return;
}
for(TopicConsumer tc : map.keySet()) {
ConsumerStat consumerStat = new ConsumerStat();
consumerStat.setConsumerGroup(tc.getConsumer());
consumerStat.setTopic(tc.getTopic());
consumerStatDao.saveSimpleConsumerStat(consumerStat);
int id = consumerStat.getId();
List<ConsumerBlock> list = map.get(tc);
for(ConsumerBlock cb : list) {
consumerStatDao.saveConsumerBlock(id, cb.getInstance(), cb.getBroker(), cb.getQid(), cb.getBlockTime());
}
}
// 报警
blockWarn(map);
}
/**
* 订阅报警
*/
public void subscriptionWarn(String consumerGroup, String topics) {
// 验证报警频率
if (!alarmConfigBridingService.needWarn("subscribe", topics, consumerGroup)) {
return;
}
StringBuilder content = new StringBuilder("详细如下:<br><br>");
content.append("消费者:<b>");
content.append(consumerGroup);
content.append("</b> 同时订阅了:<b>");
content.append(topics);
content.append("</b>。");
alertService.sendWarnMail(null, "订阅错误", content.toString());
}
/**
* 客户端阻塞预警
*/
public void blockWarn(Map<TopicConsumer, List<ConsumerBlock>> map) {
for (TopicConsumer tc : map.keySet()) {
Result<Topic> topicResult = topicService.queryTopic(tc.getTopic());
if (topicResult.isNotOK()) {
log.error("get topic err. topic:{}", tc.getTopic());
continue;
}
List<ConsumerBlock> list = map.get(tc);
// 获取预警配置
long blockTime = alarmConfigBridingService.getBlockTime(tc.getConsumer());
if (blockTime < 0) {
continue;
}
// 验证报警频率
if (!alarmConfigBridingService.needWarn("clientBlock", tc.getTopic(), tc.getConsumer())) {
continue;
}
// 是否报警
Iterator<ConsumerBlock> iterator = list.iterator();
while(iterator.hasNext()) {
ConsumerBlock consumerBlock = iterator.next();
if(consumerBlock.getBlockTime() < blockTime) {
iterator.remove();
}
}
if(list.size() <= 0) {
continue;
}
StringBuilder content = new StringBuilder("详细如下:<br><br>");
content.append("topic: <b>");
content.append(tc.getTopic());
content.append("</b> 的消费者:");
content.append(mqCloudConfigHelper.getTopicConsumeLink(topicResult.getResult().getId(), tc.getConsumer()));
content.append(" 检测到阻塞: <br>");
content.append("<table border=1>");
content.append("<thead>");
content.append("<tr>");
content.append("<th>clientId</th>");
content.append("<th>broker</th>");
content.append("<th>队列</th>");
content.append("<th>阻塞时间</th>");
content.append("</tr>");
content.append("</thead>");
content.append("<tbody>");
for (ConsumerBlock cb : list) {
content.append("<tr>");
content.append("<td>");
content.append(cb.getInstance());
content.append("</td>");
content.append("<td>");
content.append(cb.getBroker());
content.append("</td>");
content.append("<td>");
content.append(cb.getQid());
content.append("</td>");
content.append("<td>");
content.append(cb.getBlockTime() / 1000f);
content.append("秒</td>");
content.append("</tr>");
}
content.append("</tbody>");
content.append("</table>");
TopicExt topicExt = getUserEmail(tc.getTopic(), tc.getConsumer());
alertService.sendWarnMail(topicExt.getReceiver(), "客户端阻塞", content.toString());
}
}
/**
* 保存consumer-client信息
* @param consumerGroup
* @param cc
*/
public void saveConsumerGroupClientInfo(String consumerGroup, ConsumerConnection cc) {
for (Connection c : cc.getConnectionSet()) {
String clientId = c.getClientId();
// 解析出ip
if (clientId.contains("@")) {
clientId = clientId.split("@")[0];
if (clientId.contains("-")) {
String[] s = clientId.split("-");
if (s.length > 1) {
clientId = clientId.substring(s[0].length() + 1);
}
}
} else {
log.warn("consumer clientId is not recognized, clientId:{}", clientId);
}
ConsumerClientStat consumerClientStat = new ConsumerClientStat(consumerGroup, clientId);
consumerClientStatService.save(consumerClientStat);
}
}
@Override
public void endRound() {
long use = System.currentTimeMillis() - time;
log.info("monitor end use:{}ms", use);
}
private class TopicExt {
private Topic topic;
private String receiver;
public Topic getTopic() {
return topic;
}
public void setTopic(Topic topic) {
this.topic = topic;
}
public String getReceiver() {
return receiver;
}
public void setReceiver(String receiver) {
this.receiver = receiver;
}
}
private class TopicConsumer {
private String topic;
private String consumer;
public String getTopic() {
return topic;
}
public void setTopic(String topic) {
this.topic = topic;
}
public String getConsumer() {
return consumer;
}
public void setConsumer(String consumer) {
this.consumer = consumer;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + getOuterType().hashCode();
result = prime * result + ((consumer == null) ? 0 : consumer.hashCode());
result = prime * result + ((topic == null) ? 0 : topic.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
TopicConsumer other = (TopicConsumer) obj;
if (!getOuterType().equals(other.getOuterType()))
return false;
if (consumer == null) {
if (other.consumer != null)
return false;
} else if (!consumer.equals(other.consumer))
return false;
if (topic == null) {
if (other.topic != null)
return false;
} else if (!topic.equals(other.topic))
return false;
return true;
}
private SohuMonitorListener getOuterType() {
return SohuMonitorListener.this;
}
}
}
| 11,126 |
2,542 | // ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
#pragma once
namespace Naming
{
class EnumeratePropertiesToken
: public Serialization::FabricSerializable
, public Common::ISizeEstimator
{
public:
EnumeratePropertiesToken();
EnumeratePropertiesToken(
std::wstring const & lastEnumeratedPropertyName,
_int64 propertiesVersion);
static Common::ErrorCode Create(std::wstring const & escapedToken, __out EnumeratePropertiesToken & token);
__declspec(property(get=get_PropertyName)) std::wstring const & LastEnumeratedPropertyName;
std::wstring const & get_PropertyName() const { return lastEnumeratedPropertyName_; }
__declspec(property(get=get_PropertiesVersion)) _int64 PropertiesVersion;
_int64 get_PropertiesVersion() const { return propertiesVersion_; }
__declspec(property(get=get_IsValid)) bool IsValid;
bool get_IsValid() const { return isValid_; }
Common::ErrorCode ToEscapedString(__out std::wstring & escapedToken) const;
FABRIC_FIELDS_03(lastEnumeratedPropertyName_, propertiesVersion_, isValid_);
void WriteTo(__in Common::TextWriter & w, Common::FormatOptions const &) const;
BEGIN_DYNAMIC_SIZE_ESTIMATION()
DYNAMIC_SIZE_ESTIMATION_MEMBER(lastEnumeratedPropertyName_)
END_DYNAMIC_SIZE_ESTIMATION()
private:
static std::wstring const Delimiter;
std::wstring lastEnumeratedPropertyName_;
_int64 propertiesVersion_;
bool isValid_;
};
}
| 640 |
2,996 | // Copyright 2021 The Terasology Foundation
// SPDX-License-Identifier: Apache-2.0
package org.terasology.engine.core.module;
import org.terasology.gestalt.module.sandbox.API;
import java.io.IOException;
import java.io.OutputStream;
/**
* This class is a wrapper for {@link OutputStream} which should be used by modules.
* <p>
* By using this class, the caller is not allowed to close the stream at all.
* For security reasons, only {@link SandboxFileManager} have the permissions to close it,
* since the caller may completely forget to do so.
*/
@API
public class ModuleOutputStream extends OutputStream {
private OutputStream outputStream;
public ModuleOutputStream(OutputStream outputStream) {
this.outputStream = outputStream;
}
@Override
public void write(int b) throws IOException {
outputStream.write(b);
}
@Override
public void write(byte[] b) throws IOException {
outputStream.write(b);
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
outputStream.write(b, off, len);
}
@Override
public void flush() throws IOException {
outputStream.flush();
}
@Override
public void close() throws IOException {
throw new IOException("You must not close the stream. " +
"The SandboxFileManager will do it automatically.");
}
}
| 465 |
314 | package com.google.mu.util.concurrent;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.util.concurrent.CancellationException;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionStage;
import java.util.concurrent.ExecutionException;
import org.junit.jupiter.api.Assertions;
import com.google.common.truth.Subject;
import com.google.common.truth.ThrowableSubject;
/** Some common assertions for {@link CompletableFuture}, while Truth doesn't have them yet. */
public final class FutureAssertions {
public static ThrowableSubject assertCauseOf(
Class<? extends Throwable> exceptionType, CompletionStage<?> stage) {
CompletableFuture<?> future = stage.toCompletableFuture();
Throwable thrown = Assertions.assertThrows(exceptionType, future::get);
assertThat(future.isDone()).isTrue();
assertThat(future.isCompletedExceptionally()).isTrue();
return assertThat(thrown.getCause());
}
public static CancellationException assertCancelled(CompletionStage<?> stage) {
CompletableFuture<?> future = stage.toCompletableFuture();
assertThat(future.isDone()).isTrue();
assertThat(future.isCompletedExceptionally()).isTrue();
CancellationException cancelled = assertThrows(CancellationException.class, future::get);
assertThat(future.isCancelled()).isTrue();
return cancelled;
}
public static Subject<?, Object> assertCompleted(CompletionStage<?> stage)
throws InterruptedException, ExecutionException {
assertThat(stage.toCompletableFuture().isDone()).isTrue();
Object result = stage.toCompletableFuture().get();
assertThat(stage.toCompletableFuture().isCancelled()).isFalse();
assertThat(stage.toCompletableFuture().isCompletedExceptionally()).isFalse();
return assertThat(result);
}
public static Subject<?, Object> assertAfterCompleted(CompletionStage<?> stage)
throws InterruptedException, ExecutionException {
Object result = stage.toCompletableFuture().get();
assertThat(stage.toCompletableFuture().isDone()).isTrue();
assertThat(stage.toCompletableFuture().isCancelled()).isFalse();
assertThat(stage.toCompletableFuture().isCompletedExceptionally()).isFalse();
return assertThat(result);
}
public static void assertPending(CompletionStage<?> stage) {
assertThat(stage.toCompletableFuture().isDone()).isFalse();
assertThat(stage.toCompletableFuture().isCancelled()).isFalse();
assertThat(stage.toCompletableFuture().isCompletedExceptionally()).isFalse();
}
}
| 801 |
2,937 | /*
-----------------------------------------------------------------------------
This source file is part of OGRE
(Object-oriented Graphics Rendering Engine)
For the latest info, see http://www.ogre3d.org/
Copyright (c) 2000-2014 Tor<NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-----------------------------------------------------------------------------
*/
#ifndef __RotationalSpline_H__
#define __RotationalSpline_H__
#include "OgrePrerequisites.h"
#include "OgreQuaternion.h"
#include "OgreHeaderPrefix.h"
namespace Ogre {
/** \addtogroup Core
* @{
*/
/** \addtogroup Math
* @{
*/
/** This class interpolates orientations (rotations) along a spline using
derivatives of quaternions.
@remarks
Like the SimpleSpline class, this class is about interpolating values
smoothly over a spline. Whilst SimpleSpline deals with positions (the normal
sense we think about splines), this class interpolates orientations. The
theory is identical, except we're now in 4-dimensional space instead of 3.
@par
In positional splines, we use the points and tangents on those points to generate
control points for the spline. In this case, we use quaternions and derivatives
of the quaternions (i.e. the rate and direction of change at each point). This is the
same as SimpleSpline since a tangent is a derivative of a position. We effectively
generate an extra quaternion in between each actual quaternion which when take with
the original quaternion forms the 'tangent' of that quaternion.
*/
class _OgreExport RotationalSpline
{
public:
RotationalSpline();
~RotationalSpline();
/** Adds a control point to the end of the spline. */
void addPoint(const Quaternion& p);
/** Gets the detail of one of the control points of the spline. */
const Quaternion& getPoint(unsigned short index) const;
/** Gets the number of control points in the spline. */
unsigned short getNumPoints(void) const;
/** Clears all the points in the spline. */
void clear(void);
/** Updates a single point in the spline.
@remarks
This point must already exist in the spline.
*/
void updatePoint(unsigned short index, const Quaternion& value);
/** Returns an interpolated point based on a parametric value over the whole series.
@remarks
Given a t value between 0 and 1 representing the parametric distance along the
whole length of the spline, this method returns an interpolated point.
@param t Parametric value.
@param useShortestPath Defines if rotation should take the shortest possible path
*/
Quaternion interpolate(Real t, bool useShortestPath=true);
/** Interpolates a single segment of the spline given a parametric value.
@param fromIndex The point index to treat as t=0. fromIndex + 1 is deemed to be t=1
@param t Parametric value
@param useShortestPath Defines if rotation should take the shortest possible path
*/
Quaternion interpolate(unsigned int fromIndex, Real t, bool useShortestPath=true);
/** Tells the spline whether it should automatically calculate tangents on demand
as points are added.
@remarks
The spline calculates tangents at each point automatically based on the input points.
Normally it does this every time a point changes. However, if you have a lot of points
to add in one go, you probably don't want to incur this overhead and would prefer to
defer the calculation until you are finished setting all the points. You can do this
by calling this method with a parameter of 'false'. Just remember to manually call
the recalcTangents method when you are done.
@param autoCalc If true, tangents are calculated for you whenever a point changes. If false,
you must call reclacTangents to recalculate them when it best suits.
*/
void setAutoCalculate(bool autoCalc);
/** Recalculates the tangents associated with this spline.
@remarks
If you tell the spline not to update on demand by calling setAutoCalculate(false)
then you must call this after completing your updates to the spline points.
*/
void recalcTangents(void);
private:
bool mAutoCalc;
std::vector<Quaternion> mPoints;
std::vector<Quaternion> mTangents;
};
/** @} */
/** @} */
}
#include "OgreHeaderSuffix.h"
#endif
| 1,869 |
892 | {
"schema_version": "1.2.0",
"id": "GHSA-w7pw-g7cc-7j3p",
"modified": "2022-05-01T17:54:38Z",
"published": "2022-05-01T17:54:38Z",
"aliases": [
"CVE-2007-1535"
],
"details": "Microsoft Windows Vista establishes a Teredo address without user action upon connection to the Internet, contrary to documentation that Teredo is inactive without user action, which increases the attack surface and allows remote attackers to communicate via Teredo.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2007-1535"
},
{
"type": "WEB",
"url": "http://osvdb.org/33667"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/archive/1/462793/100/0/threaded"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/archive/1/464617/100/0/threaded"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/bid/23267"
},
{
"type": "WEB",
"url": "http://www.symantec.com/avcenter/reference/Vista_Network_Attack_Surface_RTM.pdf"
},
{
"type": "WEB",
"url": "http://www.symantec.com/enterprise/security_response/weblog/2007/04/microsofts_inaccurate_teredo_d.html"
}
],
"database_specific": {
"cwe_ids": [
],
"severity": "HIGH",
"github_reviewed": false
}
} | 630 |
28,056 | package com.alibaba.json.bvt.bug;
import org.junit.Assert;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.annotation.JSONField;
import com.alibaba.fastjson.annotation.JSONType;
import com.alibaba.fastjson.parser.Feature;
import junit.framework.TestCase;
public class Bug_for_issue_465 extends TestCase {
public void test_for_issue() throws Exception {
String json = "[\"abc\",\"efg\",\"sss\",[1,2]]";
TestBean testBean = JSON.parseObject(json, TestBean.class);
Assert.assertEquals("abc", testBean.name);
Assert.assertEquals("efg", testBean.country);
Assert.assertEquals("sss", testBean.password);
Assert.assertEquals(2, testBean.location.length);
Assert.assertEquals(1, testBean.location[0]);
Assert.assertEquals(2, testBean.location[1]);
}
public void f_test_for_issue_private() throws Exception {
String json = "[\"abc\",\"efg\",\"sss\",[1,2]]";
TestBean1 testBean = JSON.parseObject(json, TestBean1.class);
Assert.assertEquals("abc", testBean.name);
Assert.assertEquals("efg", testBean.country);
Assert.assertEquals("sss", testBean.password);
Assert.assertEquals(2, testBean.location.length);
Assert.assertEquals(1, testBean.location[0]);
Assert.assertEquals(2, testBean.location[1]);
}
@JSONType(parseFeatures = Feature.SupportArrayToBean)
public static class TestBean {
private String name;
private String password;
private String country;
private int[] location;
public String getName() {
return name;
}
@JSONField(ordinal = 0)
public void setName(String name) {
this.name = name;
}
public String getPassword() {
return password;
}
@JSONField(ordinal = 2)
public void setPassword(String password) {
this.password = password;
}
public String getCountry() {
return country;
}
@JSONField(ordinal = 1)
public void setCountry(String country) {
this.country = country;
}
public int[] getLocation() {
return location;
}
@JSONField(ordinal = 3)
public void setLocation(int[] location) {
this.location = location;
}
}
@JSONType(parseFeatures = Feature.SupportArrayToBean)
private static class TestBean1 {
private String name;
private String password;
private String country;
private int[] location;
public String getName() {
return name;
}
@JSONField(ordinal = 0)
public void setName(String name) {
this.name = name;
}
public String getPassword() {
return password;
}
@JSONField(ordinal = 2)
public void setPassword(String password) {
this.password = password;
}
public String getCountry() {
return country;
}
@JSONField(ordinal = 1)
public void setCountry(String country) {
this.country = country;
}
public int[] getLocation() {
return location;
}
@JSONField(ordinal = 3)
public void setLocation(int[] location) {
this.location = location;
}
}
}
| 1,402 |
1,539 | {"desktop":{"Blog.url":null,"DOS.url":null,"Doom.url":null,"Explorer.url":null},"docs":{"welcome.odt":null},"favicon.ico":null,"fonts":{"SF-Regular.woff2":null,"segmdl2.woff2":null,"segoeui.woff2":null},"games":{"doom.jsdos":null,"jazz.jsdos":null,"keen4.jsdos":null},"icons":{"files":{"js.svg":null,"unknown.svg":null},"games":{"doom.png":null,"jazz.png":null,"keen.png":null},"programs":{"blog.jpeg":null,"dos.png":null,"explorer.png":null,"webodf.png":null,"winamp.png":null}},"libs":{"js-dos":{"dos.jsdos":null,"js-dos.js":null,"wworker.js":null,"wworker.wasm":null},"vanta.waves.min.js":null,"webodf.js":null},"mp3":{"demo.mp3":null},"skins":{"SpyAMP_Pro.wsz":null},"start":{"<NAME>.url":null,"<NAME>.url":null,"WebODF.url":null,"Winamp.url":null}} | 304 |
4,071 | package com.github.ompc.greys.core.util;
import org.apache.commons.io.IOUtils;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetSocketAddress;
import java.net.Socket;
/**
* GaNetCat封装<br/>
* 这个代码不要看,不要看,不要看...不是我写的...
* Created by vlinux on 16/2/4.
*/
public class GaNetCat {
public static void main(String... args) throws IOException {
final InputStream is = System.in;
final OutputStream os = System.out;
final Socket socket = new Socket();
try {
socket.connect(new InetSocketAddress(args[0], Integer.valueOf(args[1])));
final InputStream nis = socket.getInputStream();
final OutputStream nos = socket.getOutputStream();
final byte[] dataArray = new byte[1024];
int length;
// do write
do {
length = is.read(dataArray);
if (length <= 0) {
break;
}
nos.write(dataArray, 0, length);
nos.flush();
} while (length > 0);
// do read
do {
length = nis.read(dataArray);
if (length == 1
&& dataArray[0] == 0x04) {
os.flush();
break;
}
if (length <= 0) {
break;
}
os.write(dataArray, 0, length);
} while (length > 0);
} finally {
IOUtils.closeQuietly(is);
IOUtils.closeQuietly(os);
try {
socket.close();
} catch (IOException e) {
// ignore
}
}
}
}
| 977 |
1,428 | #include <iostream>
#include <fstream>
#include <vector>
#include <string>
#include <algorithm>
std::vector<std::string> readFile(std::string bookdir){
std::ifstream file;
file.open(bookdir);
if(!file){
std::cerr << "file does not exist" << std::endl;
exit(1);
}
std::string word;
std::vector<std::string> temp;
while(file>>word){
temp.push_back(word);
}
file.close();
return temp;
}
bool findInVector(std::vector<std::string> wordList, std::string find){
int count = std::count(wordList.begin(), wordList.end(), find);
return (count > 0);
}
int main() {
std::string filename, wordToFind;
std::cout << "Enter in a file name (with extension): ";
std::cin >> filename;
std::vector<std::string> words = readFile(filename);
std::cout << "Enter in a word/name to find: ";
std::cin >> wordToFind;
bool existsInFile = findInVector(words, wordToFind);
if(existsInFile)
std::cout << wordToFind << " is found in the file." << std::endl;
else
std::cout << wordToFind << " is not found in the file." << std::endl;
return 0;
}
| 460 |
2,360 | <reponame>kkauder/spack<gh_stars>1000+
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.util.string import plural
def test_plural():
assert plural(0, 'thing') == '0 things'
assert plural(1, 'thing') == '1 thing'
assert plural(2, 'thing') == '2 things'
assert plural(1, 'thing', 'wombats') == '1 thing'
assert plural(2, 'thing', 'wombats') == '2 wombats'
| 190 |
355 | // -*- tab-width: 4; -*-
// vi: set sw=2 ts=4 sts=4 expandtab:
// Copyright 2019-2020 The Khronos Group Inc.
// SPDX-License-Identifier: Apache-2.0
#include <zstd.h>
#include "ktxapp.h"
#include <unordered_map>
#include <algorithm>
template<typename T>
struct clampedOption
{
clampedOption(T& option, T min_v, T max_v) :
option(option),
min(min_v),
max(max_v)
{
}
void clear()
{
option = 0;
}
operator T() const
{
return option;
}
T operator= (T v)
{
option = clamp<T>(v, min, max);
return option;
}
T& option;
T min;
T max;
};
/**
* @memberof ktxTexture
* @ingroup write
* @~English
* @brief Creates valid ASTC block dimension from string.
*
* @return Valid ktx_pack_astc_block_dimension_e from string
*/
ktx_pack_astc_block_dimension_e
astcBlockDimension(const char* block_size) {
static std::unordered_map<std::string, ktx_pack_astc_block_dimension_e>
astc_blocks_mapping{{"4x4", KTX_PACK_ASTC_BLOCK_DIMENSION_4x4},
{"5x4", KTX_PACK_ASTC_BLOCK_DIMENSION_5x4},
{"5x5", KTX_PACK_ASTC_BLOCK_DIMENSION_5x5},
{"6x5", KTX_PACK_ASTC_BLOCK_DIMENSION_6x5},
{"6x6", KTX_PACK_ASTC_BLOCK_DIMENSION_6x6},
{"8x5", KTX_PACK_ASTC_BLOCK_DIMENSION_8x5},
{"8x6", KTX_PACK_ASTC_BLOCK_DIMENSION_8x6},
{"10x5", KTX_PACK_ASTC_BLOCK_DIMENSION_10x5},
{"10x6", KTX_PACK_ASTC_BLOCK_DIMENSION_10x6},
{"8x8", KTX_PACK_ASTC_BLOCK_DIMENSION_8x8},
{"10x8", KTX_PACK_ASTC_BLOCK_DIMENSION_10x8},
{"10x10", KTX_PACK_ASTC_BLOCK_DIMENSION_10x10},
{"12x10", KTX_PACK_ASTC_BLOCK_DIMENSION_12x10},
{"12x12", KTX_PACK_ASTC_BLOCK_DIMENSION_12x12},
{"3x3x3", KTX_PACK_ASTC_BLOCK_DIMENSION_3x3x3},
{"4x3x3", KTX_PACK_ASTC_BLOCK_DIMENSION_4x3x3},
{"4x4x3", KTX_PACK_ASTC_BLOCK_DIMENSION_4x4x3},
{"4x4x4", KTX_PACK_ASTC_BLOCK_DIMENSION_4x4x4},
{"5x4x4", KTX_PACK_ASTC_BLOCK_DIMENSION_5x4x4},
{"5x5x4", KTX_PACK_ASTC_BLOCK_DIMENSION_5x5x4},
{"5x5x5", KTX_PACK_ASTC_BLOCK_DIMENSION_5x5x5},
{"6x5x5", KTX_PACK_ASTC_BLOCK_DIMENSION_6x5x5},
{"6x6x5", KTX_PACK_ASTC_BLOCK_DIMENSION_6x6x5},
{"6x6x6", KTX_PACK_ASTC_BLOCK_DIMENSION_6x6x6}};
auto opt = astc_blocks_mapping.find(block_size);
if (opt != astc_blocks_mapping.end())
return opt->second;
return KTX_PACK_ASTC_BLOCK_DIMENSION_6x6;
}
/**
* @memberof ktxTexture
* @ingroup write
* @~English
* @brief Creates valid ASTC quality from string.
*
* @return Valid ktx_pack_astc_quality_e from string
*/
ktx_pack_astc_quality_levels_e
astcQualityLevel(const char *quality) {
static std::unordered_map<std::string,
ktx_pack_astc_quality_levels_e> astc_quality_mapping{
{"fastest", KTX_PACK_ASTC_QUALITY_LEVEL_FASTEST},
{"fast", KTX_PACK_ASTC_QUALITY_LEVEL_FAST},
{"medium", KTX_PACK_ASTC_QUALITY_LEVEL_MEDIUM},
{"thorough", KTX_PACK_ASTC_QUALITY_LEVEL_THOROUGH},
{"exhaustive", KTX_PACK_ASTC_QUALITY_LEVEL_EXHAUSTIVE}
};
auto opt = astc_quality_mapping.find(quality);
if (opt != astc_quality_mapping.end())
return opt->second;
return KTX_PACK_ASTC_QUALITY_LEVEL_MEDIUM;
}
/**
* @memberof ktxTexture
* @ingroup write
* @~English
* @brief Creates valid ASTC mode from string.
*
* @return Valid ktx_pack_astc_mode_e from string
*/
ktx_pack_astc_encoder_mode_e
astcEncoderMode(const char* mode) {
if (strcmp(mode, "ldr") == 0)
return KTX_PACK_ASTC_ENCODER_MODE_LDR;
else if (strcmp(mode, "hdr") == 0)
return KTX_PACK_ASTC_ENCODER_MODE_HDR;
return KTX_PACK_ASTC_ENCODER_MODE_DEFAULT;
}
/*
// Markdown doesn't work in files included by snipped{doc} or include{doc}
// so the table below has to be laboriously done in html.
//! [scApp options]
<dl>
<dt>--encode <astc|etc1s|uastc></dt>
<dd>Compress the image data to ASTC, transcodable ETC1S / BasisLZ or
high-quality transcodable UASTC format. Implies @b --t2.
With each encoding option the following encoder specific options
become valid, otherwise they are ignored.</dd>
<dl>
<dt>astc:</dt>
<dd>Create a texture in high-quality ASTC format.</dd>
<dt>--astc_blk_d <XxY|XxYxZ></dt>
<dd>Specify which block dimension to use for compressing the textures.
e.g. @b --astc_blk_d 6x5 for 2D or @b --astc_blk_d 6x6x6 for 3D.
6x6 is default for 2D.
<table>
<tr><th>Supported 2D block dimensions are:</th></tr>
<tr><td>4x4</td> <td>8.00 bpp</td></tr>
<tr><td>5x4</td> <td>6.40 bpp</td></tr>
<tr><td>5x5</td> <td>5.12 bpp</td></tr>
<tr><td>6x5</td> <td>4.27 bpp</td></tr>
<tr><td>6x6</td> <td>3.56 bpp</td></tr>
<tr><td>8x5</td> <td>3.20 bpp</td></tr>
<tr><td>8x6</td> <td>2.67 bpp</td></tr>
<tr><td>10x5</td> <td>2.56 bpp</td></tr>
<tr><td>10x6</td> <td>2.13 bpp</td></tr>
<tr><td>8x8</td> <td>2.00 bpp</td></tr>
<tr><td>10x8</td> <td>1.60 bpp</td></tr>
<tr><td>10x10</td> <td>1.28 bpp</td></tr>
<tr><td>12x10</td> <td>1.07 bpp</td></tr>
<tr><td>12x12</td> <td>0.89 bpp</td></tr>
<tr><th>Supported 3D block dimensions are:</th></tr>
<tr><td>3x3x3</td> <td>4.74 bpp</td></tr>
<tr><td>4x3x3</td> <td>3.56 bpp</td></tr>
<tr><td>4x4x3</td> <td>2.67 bpp</td></tr>
<tr><td>4x4x4</td> <td>2.00 bpp</td></tr>
<tr><td>5x4x4</td> <td>1.60 bpp</td></tr>
<tr><td>5x5x4</td> <td>1.28 bpp</td></tr>
<tr><td>5x5x5</td> <td>1.02 bpp</td></tr>
<tr><td>6x5x5</td> <td>0.85 bpp</td></tr>
<tr><td>6x6x5</td> <td>0.71 bpp</td></tr>
<tr><td>6x6x6</td> <td>0.59 bpp</td></tr>
</table></dd>
<dt>--astc_mode <ldr|hdr></dt>
<dd>Specify which encoding mode to use. LDR is the default unless the input.
image is 16-bit in which case the default is HDR.</dd>
<dt>--astc_quality <level></dt>
<dd>The quality level configures the quality-performance tradeoff for
the compressor; more complete searches of the search space improve
image quality at the expense of compression time. Default is 'medium'
The quality level can be set to fastest (0) and thorough (100) via the
following fixed quality presets:
<table>
<tr><th>Level </th> <th> Quality </th></tr>
<tr><td>fastest </td> <td>(equivalent to quality = 0) </td></tr>
<tr><td>fast </td> <td>(equivalent to quality = 10) </td></tr>
<tr><td>medium </td> <td>(equivalent to quality = 60) </td></tr>
<tr><td>thorough </td> <td>(equivalent to quality = 98) </td></tr>
<tr><td>exhaustive </td> <td>(equivalent to quality = 100) </td></tr>
</table>
</dd>
</dl>
<dl>
<dt>etc1s:</dt>
<dd>Supercompress the image data with ETC1S / BasisLZ.
RED images will become RGB with RED in each component. RG images
will have R in the RGB part and G in the alpha part of the
compressed texture. When set, the following BasisLZ-related
options become valid, otherwise they are ignored.</dd>
<dt>--no_multithreading</dt>
<dd>Disable multithreading. Deprecated. For backward compatibility.
Use @b --threads 1 instead.</dd>
<dt>--clevel <level></dt>
<dd>ETC1S / BasisLZ compression level, an encoding speed vs. quality
tradeoff. Range is [0,5], default is 1. Higher values are slower,
but give higher quality.</dd>
<dt>--qlevel <level></dt>
<dd>ETC1S / BasisLZ quality level. Range is [1,255]. Lower gives
better compression/lower quality/faster. Higher gives less
compression/higher quality/slower. @b --qlevel automatically
determines values for @b --max_endpoints, @b --max-selectors,
@b --endpoint_rdo_threshold and @b --selector_rdo_threshold for the
target quality level. Setting these options overrides the values
determined by -qlevel which defaults to 128 if neither it nor
both of @b --max_endpoints and @b --max_selectors have been set.
Note that both of @b --max_endpoints and @b --max_selectors
must be set for them to have any effect. If all three options
are set, a warning will be issued that @b --qlevel will be ignored.
Note also that @b --qlevel will only determine values for
@b --endpoint_rdo_threshold and @b --selector_rdo_threshold when
its value exceeds 128, otherwise their defaults will be used.</dd>
<dt>--max_endpoints <arg></dt>
<dd>Manually set the maximum number of color endpoint clusters. Range
is [1,16128]. Default is 0, unset.</dd>
<dt>--endpoint_rdo_threshold <arg></dt>
<dd>Set endpoint RDO quality threshold. The default is 1.25. Lower
is higher quality but less quality per output bit (try
[1.0,3.0]). This will override the value chosen by @b --qlevel.</dd>
<dt>--max_selectors <arg></dt>
<dd>Manually set the maximum number of color selector clusters from
[1,16128]. Default is 0, unset.</dd>
<dt>--selector_rdo_threshold <arg></dt>
<dd>Set selector RDO quality threshold. The default is 1.25. Lower
is higher quality but less quality per output bit (try
[1.0,3.0]). This will override the value chosen by @b --qlevel.</dd>
<dt>--separate_rg_to_color_alpha</dt>
<dd>Separates the input R and G channels to RGB and A (for tangent
space XY normal maps). Only needed with 3 or 4 component input
images.</dd>
<dt>--no_endpoint_rdo</dt>
<dd>Disable endpoint rate distortion optimizations. Slightly faster,
less noisy output, but lower quality per output bit. Default is
to do endpoint RDO.</dd>
<dt>--no_selector_rdo</dt>
<dd>Disable selector rate distortion optimizations. Slightly faster,
less noisy output, but lower quality per output bit. Default is
to do selector RDO.</dd>
</dl>
<dl>
<dt>uastc:</dt>
<dd>Create a texture in high-quality transcodable UASTC format.</dd>
<dt>--uastc_quality <level></dt>
<dd>This optional parameter selects a speed vs quality
tradeoff as shown in the following table:
<table>
<tr><th>Level</th> <th>Speed</th> <th>Quality</th></tr>
<tr><td>0 </td><td> Fastest </td><td> 43.45dB</td></tr>
<tr><td>1 </td><td> Faster </td><td> 46.49dB</td></tr>
<tr><td>2 </td><td> Default </td><td> 47.47dB</td></tr>
<tr><td>3 </td><td> Slower </td><td> 48.01dB</td></tr>
<tr><td>4 </td><td> Very slow </td><td> 48.24dB</td></tr>
</table>
You are strongly encouraged to also specify @b --zcmp to losslessly
compress the UASTC data. This and any LZ-style compression can
be made more effective by conditioning the UASTC texture data
using the Rate Distortion Optimization (RDO) post-process stage.
When uastc encoding is set the following options become available
for controlling RDO:</dd>
<dt>--uastc_rdo_l [<lambda>]</dt>
<dd>Enable UASTC RDO post-processing and optionally set UASTC RDO
quality scalar (lambda) to @e lambda. Lower values yield higher
quality/larger LZ compressed files, higher values yield lower
quality/smaller LZ compressed files. A good range to try is
[.25,10]. For normal maps a good range is [.25,.75]. The full
range is [.001,10.0]. Default is 1.0.
Note that previous versions used the @b --uastc_rdo_q option which
was removed because the RDO algorithm changed.</dd>
<dt>--uastc_rdo_d <dictsize></dt>
<dd>Set UASTC RDO dictionary size in bytes. Default is 4096. Lower
values=faster, but give less compression. Range is [64,65536].</dd>
<dt>--uastc_rdo_b <scale></dt>
<dd>Set UASTC RDO max smooth block error scale. Range is [1.0,300.0].
Default is 10.0, 1.0 is disabled. Larger values suppress more
artifacts (and allocate more bits) on smooth blocks.</dd>
<dt>--uastc_rdo_s <deviation></dt>
<dd>Set UASTC RDO max smooth block standard deviation. Range is
[.01,65536.0]. Default is 18.0. Larger values expand the range
of blocks considered smooth.</dd>
<dt>--uastc_rdo_f</dt>
<dd>Do not favor simpler UASTC modes in RDO mode.</dd>
<dt>--uastc_rdo_m</dt>
<dd>Disable RDO multithreading (slightly higher compression,
deterministic).</dd>
</dl>
<dt>--normal_mode</dt>
<dd>For ASTC encoder '@b --encode astc' assumes the input texture is
a three component linear LDR normal map storing unit length
normals as (R=X, G=Y, B=Z). The output will be a two component
X+Y normal map stored as (RGB=X, A=Y), optimized for angular
error instead of simple PSNR. The Z component can be recovered
programmatically in shader code by using the equation:
<pre>
nml.xy = texture(...).ga; // Load in [0,1]
nml.xy = nml.xy * 2.0 - 1.0; // Unpack to [-1,1]
nml.z = sqrt(1 - dot(nml.xy, nml.xy)); // Compute Z
</pre>
For ETC1S encoder '@b --encode etc1s' tunes codec parameters for
better quality on normal maps (no selector RDO, no endpoint RDO).
Only valid for linear textures.</dd>
<dt>--no_sse</dt>
<dd>Forbid use of the SSE instruction set. Ignored if CPU does not
support SSE. Only the Basis Universal compressor uses SSE.</dd>
<dt>--bcmp</dt>
<dd>Deprecated. Use '@b --encode etc1s' instead.</dd>
<dt>--uastc [<level>]</dt>
<dd>Deprecated. Use '@b --encode uastc' instead.</dd>
<dt>--zcmp [<compressionLevel>]</dt>
<dd>Supercompress the data with Zstandard. Implies @b --t2. Can be used
with data in any format except ETC1S / BasisLZ. Most
effective with RDO-conditioned UASTC or uncompressed formats. The
optional compressionLevel range is 1 - 22 and the default is 3.
Lower values=faster but give less compression. Values above 20
should be used with caution as they require more memory.</dd>
<dt>--threads <count></dt>
<dd>Explicitly set the number of threads to use during compression.
By default, ETC1S / BasisLZ and ASTC compression will use the number of
threads reported by thread::hardware_concurrency or 1 if value
returned is 0.</dd>
<dt>--verbose</dt>
<dd>Print encoder/compressor activity status to stdout. Currently
only the astc, etc1s and uastc encoders emit status.</dd>
</dl>
@snippet{doc} ktxapp.h ktxApp options
In case of ambiguity, such as when the last option is one with an optional
parameter, separate options from file names with " -- ".
Any specified ETC1S / BasisLZ and supercompression options are recorded in
the metadata item @c KTXwriterScParams in the output file.
//! [scApp options]
*/
class scApp : public ktxApp {
protected:
struct commandOptions : public ktxApp::commandOptions {
struct basisOptions : public ktxBasisParams {
// The remaining numeric fields are clamped within the Basis
// library.
clampedOption<ktx_uint32_t> threadCount;
clampedOption<ktx_uint32_t> qualityLevel;
clampedOption<ktx_uint32_t> maxEndpoints;
clampedOption<ktx_uint32_t> maxSelectors;
clampedOption<ktx_uint32_t> uastcRDODictSize;
clampedOption<float> uastcRDOQualityScalar;
clampedOption<float> uastcRDOMaxSmoothBlockErrorScale;
clampedOption<float> uastcRDOMaxSmoothBlockStdDev;
basisOptions() :
threadCount(ktxBasisParams::threadCount, 1, 10000),
qualityLevel(ktxBasisParams::qualityLevel, 1, 255),
maxEndpoints(ktxBasisParams::maxEndpoints, 1, 16128),
maxSelectors(ktxBasisParams::maxSelectors, 1, 16128),
uastcRDODictSize(ktxBasisParams::uastcRDODictSize, 256, 65536),
uastcRDOQualityScalar(ktxBasisParams::uastcRDOQualityScalar,
0.001f, 50.0f),
uastcRDOMaxSmoothBlockErrorScale(
ktxBasisParams::uastcRDOMaxSmoothBlockErrorScale,
1.0f, 300.0f),
uastcRDOMaxSmoothBlockStdDev(
ktxBasisParams::uastcRDOMaxSmoothBlockStdDev,
0.01f, 65536.0f)
{
uint32_t tc = thread::hardware_concurrency();
if (tc == 0) tc = 1;
threadCount.max = tc;
threadCount = tc;
structSize = sizeof(ktxBasisParams);
// - 1 is to match what basisu_tool does (since 1.13).
compressionLevel = KTX_ETC1S_DEFAULT_COMPRESSION_LEVEL - 1;
qualityLevel.clear();
maxEndpoints.clear();
endpointRDOThreshold = 0.0f;
maxSelectors.clear();
selectorRDOThreshold = 0.0f;
normalMap = false;
separateRGToRGB_A = false;
preSwizzle = false;
noEndpointRDO = false;
noSelectorRDO = false;
uastc = false; // Default to ETC1S.
uastcRDO = false;
uastcFlags = KTX_PACK_UASTC_LEVEL_DEFAULT;
uastcRDODictSize.clear();
uastcRDOQualityScalar.clear();
uastcRDODontFavorSimplerModes = false;
uastcRDONoMultithreading = false;
noSSE = false;
verbose = false; // Default to quiet operation.
for (int i = 0; i < 4; i++) inputSwizzle[i] = 0;
}
};
struct astcOptions : public ktxAstcParams {
clampedOption<ktx_uint32_t> threadCount;
clampedOption<ktx_uint32_t> blockDimension;
clampedOption<ktx_uint32_t> mode;
clampedOption<ktx_uint32_t> qualityLevel;
astcOptions() :
threadCount(ktxAstcParams::threadCount, 1, 10000),
blockDimension(ktxAstcParams::blockDimension, 0, KTX_PACK_ASTC_BLOCK_DIMENSION_MAX),
mode(ktxAstcParams::mode, 0, KTX_PACK_ASTC_ENCODER_MODE_MAX),
qualityLevel(ktxAstcParams::qualityLevel, 0, KTX_PACK_ASTC_QUALITY_LEVEL_MAX)
{
uint32_t tc = thread::hardware_concurrency();
if (tc == 0) tc = 1;
threadCount.max = tc;
threadCount = tc;
structSize = sizeof(ktxAstcParams);
blockDimension.clear();
blockDimension = KTX_PACK_ASTC_BLOCK_DIMENSION_6x6;
mode.clear();
qualityLevel.clear();
normalMap = false;
}
};
int ktx2;
int etc1s;
int zcmp;
int astc;
ktx_bool_t normalMode;
clamped<ktx_uint32_t> zcmpLevel;
clamped<ktx_uint32_t> threadCount;
struct basisOptions bopts;
struct astcOptions astcopts;
commandOptions() :
zcmpLevel(ZSTD_CLEVEL_DEFAULT, 1U, 22U),
threadCount(std::max(1U, thread::hardware_concurrency()) , 1U, 10000U)
{
ktx2 = false;
etc1s = false;
zcmp = false;
astc = false;
normalMode = false;
}
};
commandOptions& options;
const string scparamKey = "KTXwriterScParams";
string scparams;
virtual bool processOption(argparser& parser, int opt);
enum HasArg { eNone, eOptional, eRequired };
void captureOption(const argparser& parser, HasArg hasArg);
void validateOptions();
public:
scApp(string& version, string& defaultVersion, scApp::commandOptions& options);
const string& getParamsStr() {
if (!scparams.empty() && *(scparams.end()-1) == ' ')
scparams.erase(scparams.end()-1);
return scparams;
}
void setEncoder(string encoding) {
if (encoding == "astc")
options.astc = 1;
else if (encoding == "etc1s")
options.etc1s = 1;
else if (encoding == "uastc")
options.bopts.uastc = 1;
}
void usage()
{
cerr <<
" --encode <astc|etc1s|uastc>\n"
" Compress the image data to ASTC, transcodable ETC1S / BasisLZ or\n"
" high-quality transcodable UASTC format. Implies --t2.\n"
" With each encoding option the following encoder specific options\n"
" become valid, otherwise they are ignored.\n\n"
" astc:\n"
" Create a texture in high-quality ASTC format.\n"
" --astc_blk_d <XxY|XxYxZ>\n"
" Specify which block dimension to use for compressing the textures.\n"
" e.g. --astc_blk_d 6x5 for 2D or --astc_blk_d 6x6x6 for 3D.\n"
" 6x6 is default for 2D.\n\n"
" Supported 2D block dimensions are:\n\n"
" 4x4: 8.00 bpp 10x5: 2.56 bpp\n"
" 5x4: 6.40 bpp 10x6: 2.13 bpp\n"
" 5x5: 5.12 bpp 8x8: 2.00 bpp\n"
" 6x5: 4.27 bpp 10x8: 1.60 bpp\n"
" 6x6: 3.56 bpp 10x10: 1.28 bpp\n"
" 8x5: 3.20 bpp 12x10: 1.07 bpp\n"
" 8x6: 2.67 bpp 12x12: 0.89 bpp\n\n"
" Supported 3D block dimensions are:\n\n"
" 3x3x3: 4.74 bpp 5x5x4: 1.28 bpp\n"
" 4x3x3: 3.56 bpp 5x5x5: 1.02 bpp\n"
" 4x4x3: 2.67 bpp 6x5x5: 0.85 bpp\n"
" 4x4x4: 2.00 bpp 6x6x5: 0.71 bpp\n"
" 5x4x4: 1.60 bpp 6x6x6: 0.59 bpp\n"
" --astc_mode <ldr|hdr>\n"
" Specify which encoding mode to use. LDR is the default unless the input.\n"
" image is 16-bit in which case the default is HDR.\n"
" --astc_quality <level>\n"
" The quality level configures the quality-performance tradeoff for\n"
" the compressor; more complete searches of the search space improve\n"
" image quality at the expense of compression time. Default is 'medium'\n"
" The quality level can be set to fastest (0) and thorough (100) via the \n"
" following fixed quality presets:\n\n"
" Level | Quality\n"
" ---------- | -----------------------------\n"
" fastest | (equivalent to quality = 0)\n"
" fast | (equivalent to quality = 10)\n"
" medium | (equivalent to quality = 60)\n"
" thorough | (equivalent to quality = 98)\n"
" exhaustive | (equivalent to quality = 100)\n"
" etc1s:\n"
" Supercompress the image data with ETC1S / BasisLZ.\n"
" RED images will become RGB with RED in each component. RG images\n"
" will have R in the RGB part and G in the alpha part of the\n"
" compressed texture. When set, the following BasisLZ-related\n"
" options become valid, otherwise they are ignored.\n\n"
" --no_multithreading\n"
" Disable multithreading. Deprecated. For backward compatibility.\n"
" Use --threads 1 instead.\n"
" --clevel <level>\n"
" ETC1S / BasisLZ compression level, an encoding speed vs. quality\n"
" tradeoff. Range is [0,5], default is 1. Higher values are slower,\n"
" but give higher quality.\n"
" --qlevel <level>\n"
" ETC1S / BasisLZ quality level. Range is [1,255]. Lower gives\n"
" better compression/lower quality/faster. Higher gives less\n"
" compression/higher quality/slower. --qlevel automatically\n"
" determines values for --max_endpoints, --max-selectors,\n"
" --endpoint_rdo_threshold and --selector_rdo_threshold for the\n"
" target quality level. Setting these options overrides the values\n"
" determined by -qlevel which defaults to 128 if neither it nor\n"
" both of --max_endpoints and --max_selectors have been set.\n"
"\n"
" Note that both of --max_endpoints and --max_selectors\n"
" must be set for them to have any effect. If all three options\n"
" are set, a warning will be issued that --qlevel will be ignored.\n"
"\n"
" Note also that --qlevel will only determine values for\n"
" --endpoint_rdo_threshold and --selector_rdo_threshold when\n"
" its value exceeds 128, otherwise their defaults will be used.\n"
" --max_endpoints <arg>\n"
" Manually set the maximum number of color endpoint clusters. Range\n"
" is [1,16128]. Default is 0, unset.\n"
" --endpoint_rdo_threshold <arg>\n"
" Set endpoint RDO quality threshold. The default is 1.25. Lower\n"
" is higher quality but less quality per output bit (try\n"
" [1.0,3.0]). This will override the value chosen by --qlevel.\n"
" --max_selectors <arg>\n"
" Manually set the maximum number of color selector clusters from\n"
" [1,16128]. Default is 0, unset.\n"
" --selector_rdo_threshold <arg>\n"
" Set selector RDO quality threshold. The default is 1.25. Lower\n"
" is higher quality but less quality per output bit (try\n"
" [1.0,3.0]). This will override the value chosen by --qlevel.\n"
" --separate_rg_to_color_alpha\n"
" Separates the input R and G channels to RGB and A (for tangent\n"
" space XY normal maps). Only needed with 3 or 4 component input\n"
" images.\n"
" --no_endpoint_rdo\n"
" Disable endpoint rate distortion optimizations. Slightly faster,\n"
" less noisy output, but lower quality per output bit. Default is\n"
" to do endpoint RDO.\n"
" --no_selector_rdo\n"
" Disable selector rate distortion optimizations. Slightly faster,\n"
" less noisy output, but lower quality per output bit. Default is\n"
" to do selector RDO.\n\n"
" uastc:\n"
" Create a texture in high-quality transcodable UASTC format.\n"
" --uastc_quality <level>\n"
" This optional parameter selects a speed vs quality\n"
" tradeoff as shown in the following table:\n"
"\n"
" Level | Speed | Quality\n"
" ----- | --------- | -------\n"
" 0 | Fastest | 43.45dB\n"
" 1 | Faster | 46.49dB\n"
" 2 | Default | 47.47dB\n"
" 3 | Slower | 48.01dB\n"
" 4 | Very slow | 48.24dB\n"
"\n"
" You are strongly encouraged to also specify --zcmp to losslessly\n"
" compress the UASTC data. This and any LZ-style compression can\n"
" be made more effective by conditioning the UASTC texture data\n"
" using the Rate Distortion Optimization (RDO) post-process stage.\n"
" When uastc encoding is set the following options become available\n"
" for controlling RDO:\n\n"
" --uastc_rdo_l [<lambda>]\n"
" Enable UASTC RDO post-processing and optionally set UASTC RDO\n"
" quality scalar (lambda) to @e lambda. Lower values yield higher\n"
" quality/larger LZ compressed files, higher values yield lower\n"
" quality/smaller LZ compressed files. A good range to try is\n"
" [.25,10]. For normal maps a good range is [.25,.75]. The full\n"
" range is [.001,10.0]. Default is 1.0.\n"
"\n"
" Note that previous versions used the --uastc_rdo_q option which\n"
" was removed because the RDO algorithm changed.\n"
" --uastc_rdo_d <dictsize>\n"
" Set UASTC RDO dictionary size in bytes. Default is 4096. Lower\n"
" values=faster, but give less compression. Range is [64,65536].\n"
" --uastc_rdo_b <scale>\n"
" Set UASTC RDO max smooth block error scale. Range is [1.0,300.0].\n"
" Default is 10.0, 1.0 is disabled. Larger values suppress more\n"
" artifacts (and allocate more bits) on smooth blocks.\n"
" --uastc_rdo_s <deviation>\n"
" Set UASTC RDO max smooth block standard deviation. Range is\n"
" [.01,65536.0]. Default is 18.0. Larger values expand the range\n"
" of blocks considered smooth.<dd>\n"
" --uastc_rdo_f\n"
" Do not favor simpler UASTC modes in RDO mode.\n"
" --uastc_rdo_m\n"
" Disable RDO multithreading (slightly higher compression,\n"
" deterministic).\n\n"
" --normal_mode\n"
" For ASTC encoder '--encode astc' assumes the input texture is\n"
" a three component linear LDR normal map storing unit length\n"
" normals as (R=X, G=Y, B=Z). The output will be a two component\n"
" X+Y normal map stored as (RGB=X, A=Y), optimized for angular\n"
" error instead of simple PSNR. The Z component can be recovered\n"
" programmatically in shader code by using the equation:\n\n"
" nml.xy = texture(...).ga; // Load in [0,1]\n"
" nml.xy = nml.xy * 2.0 - 1.0; // Unpack to [-1,1]\n"
" nml.z = sqrt(1 - dot(nml.xy, nml.xy)); // Compute Z\n\n"
" For ETC1S encoder '--encode etc1s' tunes codec parameters for \n"
" better quality on normal maps (no selector RDO, no endpoint RDO).\n"
" Only valid for linear textures.\n"
" --no_sse\n"
" Forbid use of the SSE instruction set. Ignored if CPU does not\n"
" support SSE. Only the Basis Universal compressor uses SSE.\n"
" --bcmp\n"
" Deprecated. Use '--encode etc1s' instead.\n"
" --uastc [<level>]\n"
" Deprecated. Use '--encode uastc' instead.\n"
" --zcmp [<compressionLevel>]\n"
" Supercompress the data with Zstandard. Implies --t2. Can be used\n"
" with data in any format except ETC1S / BasisLZ. Most\n"
" effective with RDO-conditioned UASTC or uncompressed formats. The\n"
" optional compressionLevel range is 1 - 22 and the default is 3.\n"
" Lower values=faster but give less compression. Values above 20\n"
" should be used with caution as they require more memory.\n"
" --threads <count>\n"
" Explicitly set the number of threads to use during compression.\n"
" By default, ETC1S / BasisLZ and ASTC compression will use the number of\n"
" threads reported by thread::hardware_concurrency or 1 if value\n"
" returned is 0.\n"
" --verbose\n"
" Print encoder/compressor activity status to stdout. Currently\n"
" only the astc, etc1s and uastc encoders emit status.\n"
"\n";
ktxApp::usage();
cerr << endl <<
"In case of ambiguity, such as when the last option is one with an optional\n"
"parameter, options can be separated from file names with \" -- \".\n"
"\n"
"Any specified ETC1S / BasisLZ and supercompression options are recorded in\n"
"the metadata item @c KTXwriterScParams in the output file.\n"
<< endl;
}
};
scApp::scApp(string& version, string& defaultVersion,
scApp::commandOptions& options)
: ktxApp(version, defaultVersion, options), options(options)
{
argparser::option my_option_list[] = {
{ "zcmp", argparser::option::optional_argument, NULL, 'z' },
{ "no_multithreading", argparser::option::no_argument, NULL, 'N' },
{ "threads", argparser::option::required_argument, NULL, 't' },
{ "clevel", argparser::option::required_argument, NULL, 'c' },
{ "qlevel", argparser::option::required_argument, NULL, 'q' },
{ "max_endpoints", argparser::option::required_argument, NULL, 'e' },
{ "endpoint_rdo_threshold", argparser::option::required_argument, NULL, 'E' },
{ "max_selectors", argparser::option::required_argument, NULL, 'u' },
{ "selector_rdo_threshold", argparser::option::required_argument, NULL, 'S' },
{ "normal_mode", argparser::option::no_argument, NULL, 'n' },
{ "separate_rg_to_color_alpha", argparser::option::no_argument, NULL, 1000 },
{ "no_endpoint_rdo", argparser::option::no_argument, NULL, 1001 },
{ "no_selector_rdo", argparser::option::no_argument, NULL, 1002 },
{ "no_sse", argparser::option::no_argument, NULL, 1011 },
{ "uastc_quality", argparser::option::required_argument, NULL, 1003 },
{ "uastc_rdo_l", argparser::option::optional_argument, NULL, 1004 },
{ "uastc_rdo_d", argparser::option::required_argument, NULL, 1005 },
{ "uastc_rdo_b", argparser::option::optional_argument, NULL, 1006 },
{ "uastc_rdo_s", argparser::option::optional_argument, NULL, 1007 },
{ "uastc_rdo_f", argparser::option::no_argument, NULL, 1008 },
{ "uastc_rdo_m", argparser::option::no_argument, NULL, 1009 },
{ "verbose", argparser::option::no_argument, NULL, 1010 },
{ "astc_blk_d", argparser::option::required_argument, NULL, 1012 },
{ "astc_mode", argparser::option::required_argument, NULL, 1013 },
{ "astc_quality", argparser::option::required_argument, NULL, 1014 },
{ "encode", argparser::option::required_argument, NULL, 1015 },
// Deprecated options
{ "bcmp", argparser::option::no_argument, NULL, 'b' },
{ "uastc", argparser::option::optional_argument, NULL, 1016 }
};
const int lastOptionIndex = sizeof(my_option_list)
/ sizeof(argparser::option);
option_list.insert(option_list.begin(), my_option_list,
my_option_list + lastOptionIndex);
short_opts += "z;Nt:c:q:e:E:u:S:nb";
}
void
scApp::captureOption(const argparser& parser, HasArg hasArg)
{
uint32_t indexDecrement = 1;
bool captureArg = false;
if ((hasArg == eOptional && parser.optarg.size() > 0) || hasArg == eRequired)
indexDecrement = 2;
scparams += parser.argv[parser.optind - indexDecrement] + " ";
if (captureArg)
scparams += parser.optarg + " ";
}
void
scApp::validateOptions() {
if ((options.bopts.maxEndpoints == 0) ^ (options.bopts.maxSelectors == 0)) {
cerr << name << ": Both or neither of --max_endpoints and"
<< " --max_selectors must be specified." << endl;
usage();
exit(1);
}
if (options.bopts.qualityLevel
&& (options.bopts.maxEndpoints + options.bopts.maxSelectors)) {
cerr << name << ": Warning: ignoring --qlevel as it, --max_endpoints"
<< " and --max_selectors are all set." << endl;
}
}
// Derived classes' processOption will have to explicitly call this one
// and should call it after processing their own options.
bool
scApp::processOption(argparser& parser, int opt)
{
bool hasArg = false;
bool capture = true;
switch (opt) {
case 'z':
if (options.etc1s) {
cerr << "Only one of '--encode etc1s|--bcmp' and --zcmp can be specified."
<< endl;
usage();
exit(1);
}
options.zcmp = 1;
options.ktx2 = 1;
if (parser.optarg.size() > 0) {
options.zcmpLevel = strtoi(parser.optarg.c_str());
hasArg = true;
}
break;
case 'c':
options.bopts.compressionLevel = strtoi(parser.optarg.c_str());
hasArg = true;
break;
case 'e':
options.bopts.maxEndpoints = strtoi(parser.optarg.c_str());
hasArg = true;
break;
case 'E':
options.bopts.endpointRDOThreshold = strtof(parser.optarg.c_str(), nullptr);
hasArg = true;
break;
case 'N':
options.threadCount = 1;
capture = false;
break;
case 'n':
options.normalMode = true;
break;
case 1001:
options.bopts.noEndpointRDO = 1;
break;
case 1002:
options.bopts.noSelectorRDO = 1;
break;
case 'q':
options.bopts.qualityLevel = strtoi(parser.optarg.c_str());
hasArg = true;
break;
case 1000:
options.bopts.separateRGToRGB_A = 1;
break;
case 'u':
options.bopts.maxSelectors = strtoi(parser.optarg.c_str());
hasArg = true;
break;
case 'S':
options.bopts.selectorRDOThreshold = strtof(parser.optarg.c_str(), nullptr);
hasArg = true;
break;
case 't':
options.threadCount = strtoi(parser.optarg.c_str());
capture = false;
break;
case 1003:
{
ktx_uint32_t level = strtoi(parser.optarg.c_str());
level = clamp<ktx_uint32_t>(level, 0, KTX_PACK_UASTC_MAX_LEVEL);
// Ensure the last one wins in case of multiple of these args.
options.bopts.uastcFlags = (unsigned int)~KTX_PACK_UASTC_LEVEL_MASK;
options.bopts.uastcFlags |= level;
hasArg = true;
}
break;
case 1004:
options.bopts.uastcRDO = true;
if (parser.optarg.size() > 0) {
options.bopts.uastcRDOQualityScalar =
strtof(parser.optarg.c_str(), nullptr);
hasArg = true;
}
break;
case 1005:
options.bopts.uastcRDODictSize = strtoi(parser.optarg.c_str());
hasArg = true;
break;
case 1006:
options.bopts.uastcRDOMaxSmoothBlockErrorScale =
strtof(parser.optarg.c_str(), nullptr);
hasArg = true;
break;
case 1007:
options.bopts.uastcRDOMaxSmoothBlockStdDev =
strtof(parser.optarg.c_str(), nullptr);
hasArg = true;
break;
case 1008:
options.bopts.uastcRDODontFavorSimplerModes = true;
break;
case 1009:
options.bopts.uastcRDONoMultithreading = true;
break;
case 1010:
options.bopts.verbose = true;
options.astcopts.verbose = true;
capture = false;
break;
case 1011:
options.bopts.noSSE = true;
capture = true;
break;
case 1012: // astc_blk_d
options.astcopts.blockDimension = astcBlockDimension(parser.optarg.c_str());
hasArg = true;
break;
case 1013: // astc_mode
options.astcopts.mode = astcEncoderMode(parser.optarg.c_str());
hasArg = true;
break;
case 1014: // astc_quality
options.astcopts.qualityLevel = astcQualityLevel(parser.optarg.c_str());
hasArg = true;
break;
case 'b':
if (options.zcmp) {
cerr << "Only one of --bcmp and --zcmp can be specified.\n"
<< "--bcmp is deprecated, use '--encode etc1s' instead."
<< endl;
usage();
exit(1);
}
if (options.bopts.uastc) {
cerr << "Only one of --bcmp and '--encode etc1s|--uastc' can be specified.\n"
<< "--bcmp is deprecated, use '--encode etc1s' instead."
<< endl;
usage();
exit(1);
}
options.etc1s = 1;
options.ktx2 = 1;
break;
case 1015:
setEncoder(parser.optarg);
options.ktx2 = 1;
break;
case 1016:
if (options.etc1s) {
cerr << "Only one of `--encode etc1s|--bcmp` and `--uastc [<level>]` can be specified."
<< endl;
usage();
exit(1);
}
options.bopts.uastc = 1;
options.ktx2 = 1;
if (parser.optarg.size() > 0) {
ktx_uint32_t level = strtoi(parser.optarg.c_str());
level = clamp<ktx_uint32_t>(level, 0, KTX_PACK_UASTC_MAX_LEVEL);
// Ensure the last one wins in case of multiple of these args.
options.bopts.uastcFlags = (unsigned int)~KTX_PACK_UASTC_LEVEL_MASK;
options.bopts.uastcFlags |= level;
hasArg = true;
}
break;
default:
return false;
}
if (capture) {
scparams += parser.argv[parser.optind - (hasArg ? 2 : 1)] + " ";
if (hasArg)
scparams += parser.optarg + " ";
}
return true;
}
| 23,826 |
27,066 | <filename>demo-oauth/oauth-authorization-server/src/test/java/com/xkcoding/oauth/oauth/ResourceOwnerPasswordGrantTests.java
package com.xkcoding.oauth.oauth;
import org.junit.jupiter.api.Test;
import org.springframework.security.oauth2.client.OAuth2RestTemplate;
import org.springframework.security.oauth2.client.token.grant.password.ResourceOwnerPasswordResourceDetails;
import org.springframework.security.oauth2.common.OAuth2AccessToken;
import java.util.Arrays;
import static com.xkcoding.oauth.oauth.AuthorizationServerInfo.getUrl;
import static org.junit.jupiter.api.Assertions.assertNotNull;
/**
* .
*
* @author <a href="https://echocow.cn">EchoCow</a>
* @date 2020-01-06 21:14
*/
public class ResourceOwnerPasswordGrantTests {
@Test
void testConnectDirectlyToResourceServer() {
assertNotNull(accessToken());
}
public static String accessToken() {
ResourceOwnerPasswordResourceDetails resource = new ResourceOwnerPasswordResourceDetails();
resource.setAccessTokenUri(getUrl("/oauth/token"));
resource.setClientId("oauth2");
resource.setClientSecret("oauth2");
resource.setId("oauth2");
resource.setScope(Arrays.asList("READ", "WRITE"));
resource.setUsername("admin");
resource.setPassword("<PASSWORD>");
OAuth2RestTemplate template = new OAuth2RestTemplate(resource);
OAuth2AccessToken accessToken = template.getAccessToken();
return accessToken.getValue();
}
}
| 536 |
453 | <gh_stars>100-1000
/*
* C library support files for the Blackfin processor
*
* Copyright (C) 2006 Analog Devices, Inc.
*
* The authors hereby grant permission to use, copy, modify, distribute,
* and license this software and its documentation for any purpose, provided
* that existing copyright notices are retained in all copies and that this
* notice is included verbatim in any distributions. No written agreement,
* license, or royalty fee is required for any of the authorized uses.
* Modifications to this software may be copyrighted by their authors
* and need not follow the licensing terms described here, provided that
* the new terms are clearly indicated on the first page of each file where
* they apply.
*/
#include <_ansi.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/fcntl.h>
#include <stdio.h>
#include <time.h>
#include <sys/time.h>
#include <sys/times.h>
#include "syscall.h"
#include <errno.h>
#include <reent.h>
#include <unistd.h>
register char *stack_ptr asm ("SP");
static inline int
do_syscall (int reason, void *arg)
{
int result, result2, errcode;
asm volatile ("excpt 0;"
: "=q0" (result),
"=q1" (result2),
"=q2" (errcode)
: "qA" (reason),
"q0" (arg)
: "memory", "CC");
errno = errcode;
return result;
}
int
_read (int file, char *ptr, int len)
{
int block[3];
block[0] = file;
block[1] = (int) ptr;
block[2] = len;
return do_syscall (SYS_read, block);
}
int
_lseek (int file, int ptr, int whence)
{
int block[3];
block[0] = file;
block[1] = ptr;
block[2] = whence;
return do_syscall (SYS_lseek, block);
}
int
_write (int file, char *ptr, int len)
{
int block[3];
block[0] = file;
block[1] = (int) ptr;
block[2] = len;
return do_syscall (SYS_write, block);
}
int
_open (const char *path, int flags)
{
int block[2];
block[0] = (int) path;
block[1] = flags;
return do_syscall (SYS_open, block);
}
int
_close (int file)
{
return do_syscall (SYS_close, &file);
}
void
_exit (int n)
{
do_syscall (SYS_exit, &n);
}
int
_kill (int n, int m)
{
int block[2];
block[0] = n;
block[1] = m;
return do_syscall (SYS_kill, block);
}
int
_getpid (int n)
{
return do_syscall (SYS_getpid, &n);
}
caddr_t
_sbrk (int incr)
{
extern char end; /* Defined by the linker. */
static char *heap_end;
char *prev_heap_end;
if (heap_end == NULL)
heap_end = &end;
prev_heap_end = heap_end;
if (heap_end + incr > stack_ptr)
{
/* Some of the libstdc++-v3 tests rely upon detecting
out of memory errors, so do not abort here. */
#if 0
extern void abort (void);
_write (1, "_sbrk: Heap and stack collision\n", 32);
abort ();
#else
errno = ENOMEM;
return (caddr_t) -1;
#endif
}
heap_end += incr;
return (caddr_t) prev_heap_end;
}
extern void memset (struct stat *, int, unsigned int);
int
_fstat (int file, struct stat *st)
{
int block[2];
block[0] = file;
block[1] = (int) st;
return do_syscall (SYS_fstat, block);
}
int _stat (const char *fname, struct stat *st)
{
int block[2];
block[0] = (int) fname;
block[1] = (int) st;
return do_syscall (SYS_stat, block);
}
int
_link (const char *existing, const char *new)
{
int block[2];
block[0] = (int) existing;
block[1] = (int) new;
return do_syscall (SYS_link, block);
}
int
_unlink (const char *path)
{
return do_syscall (SYS_unlink, path);
}
void
_raise (void)
{
return;
}
int
_gettimeofday (struct timeval *tv, void *tz)
{
tv->tv_usec = 0;
tv->tv_sec = do_syscall (SYS_time, 0);
return 0;
}
/* Return a clock that ticks at 100Hz. */
clock_t
_times (struct tms * tp)
{
return -1;
}
int
_isatty (int fd)
{
return 1;
}
int
_system (const char *s)
{
if (s == NULL)
return 0;
errno = ENOSYS;
return -1;
}
int
_rename (const char * oldpath, const char * newpath)
{
errno = ENOSYS;
return -1;
}
static inline int
__setup_argv_for_main (int argc)
{
int block[2];
char **argv;
int i = argc;
argv = __builtin_alloca ((1 + argc) * sizeof (*argv));
argv[i] = NULL;
while (i--) {
block[0] = i;
argv[i] = __builtin_alloca (1 + do_syscall (SYS_argnlen, (void *)block));
block[1] = (int) argv[i];
do_syscall (SYS_argn, (void *)block);
}
return main (argc, argv);
}
int
__setup_argv_and_call_main ()
{
int argc = do_syscall (SYS_argc, 0);
if (argc <= 0)
return main (argc, NULL);
else
return __setup_argv_for_main (argc);
}
| 1,856 |
831 | /*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.tools.idea.memorysettings;
import static com.android.tools.idea.memorysettings.GradlePropertiesUtil.getGradleDaemonXmx;
import static com.android.tools.idea.memorysettings.GradlePropertiesUtil.getKotlinDaemonXmx;
import static com.android.tools.idea.memorysettings.GradlePropertiesUtil.setDaemonXmx;
import com.android.tools.idea.gradle.util.GradleProperties;
import com.intellij.testFramework.PlatformTestCase;
import java.io.File;
public class GradlePropertiesUtilTest extends PlatformTestCase {
public void testHasJvmArgs() throws Exception {
File propertiesFilePath = createTempFile("gradle.properties", "org.gradle.jvmargs=-Xms800M");
GradleProperties properties = new GradleProperties(propertiesFilePath);
assertTrue(GradlePropertiesUtil.hasJvmArgs(properties));
propertiesFilePath = createTempFile("gradle.properties", "");
properties = new GradleProperties(propertiesFilePath);
assertFalse(GradlePropertiesUtil.hasJvmArgs(properties));
propertiesFilePath = createTempFile("gradle.properties", "kotlin.code.style=official");
properties = new GradleProperties(propertiesFilePath);
assertFalse(GradlePropertiesUtil.hasJvmArgs(properties));
}
public void testNoDaemonXmx() throws Exception {
checkXmx("", MemorySettingsUtil.NO_XMX_IN_VM_ARGS, MemorySettingsUtil.NO_XMX_IN_VM_ARGS);
checkXmx("org.gradle.jvmargs=", MemorySettingsUtil.NO_XMX_IN_VM_ARGS, MemorySettingsUtil.NO_XMX_IN_VM_ARGS);
checkXmx("# org.gradle.jvmargs=-Xmx1024M", MemorySettingsUtil.NO_XMX_IN_VM_ARGS, MemorySettingsUtil.NO_XMX_IN_VM_ARGS);
checkXmx("org.gradle.jvmargs=-Dkotlin.daemon.jvm.options=\"-Xmx1G\"", MemorySettingsUtil.NO_XMX_IN_VM_ARGS, 1024);
checkXmx("org.gradle.jvmargs=-Xmx2048m", 2048, MemorySettingsUtil.NO_XMX_IN_VM_ARGS);
}
private void checkXmx(String text, int expectedGradleXmx, int expectedKotlinXmx) throws Exception {
File propertiesFilePath = createTempFile("gradle.properties", text);
GradleProperties properties = new GradleProperties(propertiesFilePath);
assertEquals(expectedGradleXmx, getGradleDaemonXmx(properties));
assertEquals(expectedKotlinXmx, getKotlinDaemonXmx(properties));
}
public void testGetDaemonXmx() throws Exception {
checkXmx("org.gradle.jvmargs=-Xmx", -1, -1);
checkXmx("org.gradle.jvmargs=-XmxT", -1, -1);
checkXmx("org.gradle.jvmargs=-Xmx1a", -1, -1);
checkXmx("org.gradle.jvmargs=-Xmx1T", 1024 * 1024, -1);
checkXmx("org.gradle.jvmargs=-Xmx2t", 2 * 1024 * 1024, -1);
checkXmx("org.gradle.jvmargs=-Xmx1G", 1024, -1);
checkXmx("org.gradle.jvmargs=-Xmx4g", 4 * 1024, -1);
checkXmx("org.gradle.jvmargs=-Xmx1M", 1, -1);
checkXmx("org.gradle.jvmargs=-Xmx10m", 10, -1);
checkXmx("org.gradle.jvmargs=-Xmx1024K", 1, -1);
checkXmx("org.gradle.jvmargs=-Xmx2048k", 2, -1);
checkXmx("org.gradle.jvmargs=-Dkotlin.daemon.jvm.options=\"-Xmx\"", -1, -1);
checkXmx("org.gradle.jvmargs=-Dkotlin.daemon.jvm.options=\"-XmxT\"", -1, -1);
checkXmx("org.gradle.jvmargs=-Dkotlin.daemon.jvm.options=\"-Xmx1a\"", -1, -1);
checkXmx("org.gradle.jvmargs=-Dkotlin.daemon.jvm.options=\"-Xms1G\"", -1, -1);
checkXmx("org.gradle.jvmargs=-Dkotlin.daemon.jvm.options=\"-Xmx1T\"", -1, 1024 * 1024);
checkXmx("org.gradle.jvmargs=-Dkotlin.daemon.jvm.options=\"-Xmx2G,-Xms1G\"", -1, 2048);
checkXmx("org.gradle.jvmargs=-Dkotlin.daemon.jvm.options=\"-Xmx1024m,-Xmx2G\"", -1, 2048);
checkXmx("org.gradle.jvmargs=-Xms1G -Xmx4G -Dkotlin.daemon.jvm.options=\"-Xms1G\"", 4096, -1);
checkXmx("org.gradle.jvmargs=-Xmx1G -Xmx4G -Dkotlin.daemon.jvm.options=\"-Xmx2G\"", 4096, 2048);
checkXmx("org.gradle.jvmargs=-Xmx4G -Dkotlin.daemon.jvm.options=\"-Xms1G,-Xmx2G\"", 4096, 2048);
checkXmx("org.gradle.jvmargs=-Dkotlin.daemon.jvm.options=\"-Xms1G,-Xmx2G\" -Xmx4G", 4096, 2048);
checkXmx("org.gradle.jvmargs=-Dkotlin.daemon.jvm.options=\"-Xmx1G,-Xmx2G\" -Xmx3G -Xmx4G", 4096, 2048);
}
public void testSetDaemonXmx() throws Exception {
checkXmxNewValue("", 10, -1);
checkXmxNewValue("#org.gradle.jvmargs=-Xms1280m", 10, -1);
checkXmxNewValue("org.gradle.jvmargs=-Xms1280m", 1024, -1);
checkXmxNewValue("org.gradle.jvmargs=-Xmx1280m", 2048, -1);
checkXmxNewValue("", -1, 1024);
checkXmxNewValue("#org.gradle.jvmargs=-Dkotlin.daemon.jvm.options=\"-Xms1G\"", -1, 20);
checkXmxNewValue("org.gradle.jvmargs=-Dkotlin.daemon.jvm.options=\"-Xms1G\"", -1, 1024);
checkXmxNewValue("org.gradle.jvmargs=-Dkotlin.daemon.jvm.options=\"-Xmx1G\"", -1, 2048);
checkXmxNewValue("org.gradle.jvmargs=-Dkotlin.daemon.jvm.options=\"-Xmx1G,-Xms512m\"", -1, 2048);
checkXmxNewValue("org.gradle.jvmargs=-Dkotlin.daemon.jvm.options=\"-Xmx512m,-Xmx1G\"", -1, 2048);
checkXmxNewValue("", 10, 20);
checkXmxNewValue("#org.gradle.jvmargs=-Xms1280m", 10, 20);
checkXmxNewValue("#org.gradle.jvmargs=-Dkotlin.daemon.jvm.options=\"-Xms1G\"", 10, 20);
checkXmxNewValue("org.gradle.jvmargs=-Dkotlin.daemon.jvm.options=\"-Xms1G\"", 512, 2048);
checkXmxNewValue("org.gradle.jvmargs=-Dkotlin.daemon.jvm.options=\"-Xmx1G\"", 512, 2048);
checkXmxNewValue("org.gradle.jvmargs=-Dkotlin.daemon.jvm.options=\"-Xmx1G,-Xms512m\"", 512, 2048);
checkXmxNewValue("org.gradle.jvmargs=-Dkotlin.daemon.jvm.options=\"-Xmx512m,-Xmx1G\"", 512, 2048);
checkXmxNewValue("org.gradle.jvmargs=-Xmx1G -Dkotlin.daemon.jvm.options=\"-Xmx1G,-Xms512m\"", 512, 2048);
checkXmxNewValue("org.gradle.jvmargs=-Dkotlin.daemon.jvm.options=\"-Xmx512m,-Xmx1G\" -Xmx1G", 512, 2048);
}
private void checkXmxNewValue(String text, int newGradleXmx, int newKotlinXmx) throws Exception {
File propertiesFilePath = createTempFile("gradle.properties", text);
GradleProperties properties = new GradleProperties(propertiesFilePath);
setDaemonXmx(properties, newGradleXmx, newKotlinXmx);
assertEquals(newGradleXmx, getGradleDaemonXmx(properties));
assertEquals(newKotlinXmx, getKotlinDaemonXmx(properties));
}
public void testJvmArgsAfterSetDaemonXmx() throws Exception {
assertEquals("-Xmx1024M", setDaemonXmx("", 1024, -1));
assertEquals("-Xms512m -Xmx1024M", setDaemonXmx("-Xms512m", 1024, -1));
assertEquals("-Xmx1024M", setDaemonXmx("-Xmx512m", 1024, -1));
assertEquals("-Xms512m -Xmx2048M",
setDaemonXmx("-Xms512m -Xmx1G", 2048, -1));
assertEquals("-Xms512m -Xmx768m -Xmx2048M",
setDaemonXmx("-Xms512m -Xmx768m -Xmx1G", 2048, -1));
assertEquals("-Dkotlin.daemon.jvm.options=\"-Xmx1024M\"",
setDaemonXmx("", -1, 1024));
assertEquals("-Dkotlin.daemon.jvm.options=\"-Xms512m,-Xmx1024M\"",
setDaemonXmx("-Dkotlin.daemon.jvm.options=\"-Xms512m\"", -1, 1024));
assertEquals("-Dkotlin.daemon.jvm.options=\"-Xmx1024M\"",
setDaemonXmx("-Dkotlin.daemon.jvm.options=\"-Xmx512m\"", -1, 1024));
assertEquals("-Dkotlin.daemon.jvm.options=\"-Xms512m,-Xmx2048M\"",
setDaemonXmx("-Dkotlin.daemon.jvm.options=\"-Xms512m,-Xmx1G\"", -1, 2048));
assertEquals("-Dkotlin.daemon.jvm.options=\"-Xms512m,-Xmx768m,-Xmx2048M\"",
setDaemonXmx("-Dkotlin.daemon.jvm.options=\"-Xms512m,-Xmx768m,-Xmx1G\"", -1, 2048));
assertEquals("-Xmx512M -Dkotlin.daemon.jvm.options=\"-Xmx1024M\"",
setDaemonXmx("", 512, 1024));
assertEquals("-Dkotlin.daemon.jvm.options=\"-Xms512m,-Xmx1024M\" -Xmx512M",
setDaemonXmx("-Dkotlin.daemon.jvm.options=\"-Xms512m\"", 512, 1024));
assertEquals("-Dkotlin.daemon.jvm.options=\"-Xmx1024M\" -Xmx512M",
setDaemonXmx("-Dkotlin.daemon.jvm.options=\"-Xmx512m\"", 512, 1024));
assertEquals("-Dkotlin.daemon.jvm.options=\"-Xms512m,-Xmx2048M\" -Xmx1024M",
setDaemonXmx("-Dkotlin.daemon.jvm.options=\"-Xms512m,-Xmx1G\"", 1024, 2048));
assertEquals("-Xmx1024M -Dkotlin.daemon.jvm.options=\"-Xms512m,-Xmx2048M\"",
setDaemonXmx("-Xmx512M -Dkotlin.daemon.jvm.options=\"-Xms512m,-Xmx1G\"", 1024, 2048));
assertEquals("-Xms512m -Xmx1024M -Dkotlin.daemon.jvm.options=\"-Xms512m,-Xmx768m,-Xmx2048M\"",
setDaemonXmx("-Xms512m -Xmx768m -Dkotlin.daemon.jvm.options=\"-Xms512m,-Xmx768m,-Xmx1G\"", 1024, 2048));
}
}
| 3,904 |
513 | <gh_stars>100-1000
/*
* Copyright 2016-2020 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
package com.apps.adrcotfas.goodtime.Main;
import android.annotation.SuppressLint;
import android.app.Dialog;
import android.content.Context;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AlertDialog;
import androidx.fragment.app.DialogFragment;
import androidx.lifecycle.ViewModelProvider;
import com.apps.adrcotfas.goodtime.BL.SessionType;
import com.apps.adrcotfas.goodtime.R;
public class FinishedSessionDialog extends DialogFragment {
public interface Listener {
void onFinishedSessionDialogPositiveButtonClick(SessionType sessionType);
void onFinishedSessionDialogNeutralButtonClick(SessionType sessionType);
}
private Listener listener;
public FinishedSessionDialog() {
}
@Override
public void onAttach(@NonNull Context context) {
super.onAttach(context);
try {
listener = (Listener) getActivity();
} catch(ClassCastException e) {
throw new ClassCastException("hosting activity must implement FinishedSessionDialog::Listener");
}
}
@Override
public void onDetach() {
super.onDetach();
listener = null;
}
public static FinishedSessionDialog newInstance(Listener listener) {
FinishedSessionDialog dialog = new FinishedSessionDialog();
dialog.listener = listener;
return dialog;
}
@SuppressLint("ResourceType")
@NonNull
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
TimerActivityViewModel viewModel = new ViewModelProvider(requireActivity()).get(TimerActivityViewModel.class);
setCancelable(false);
AlertDialog.Builder builder = new AlertDialog.Builder(requireContext());
final SessionType sessionType = viewModel.dialogPendingType;
if (sessionType == SessionType.WORK) {
builder.setTitle(R.string.action_finished_session)
.setPositiveButton(R.string.action_start_break, (dialog, which)
-> listener.onFinishedSessionDialogPositiveButtonClick(sessionType))
.setNeutralButton(R.string.dialog_close, (dialog, which)
-> listener.onFinishedSessionDialogNeutralButtonClick(sessionType));
} else {
builder.setTitle(R.string.action_finished_break)
.setPositiveButton(R.string.action_start_work, (dialog, which)
-> listener.onFinishedSessionDialogPositiveButtonClick(sessionType))
.setNeutralButton(android.R.string.cancel, (dialog, which)
-> listener.onFinishedSessionDialogNeutralButtonClick(sessionType));
}
final Dialog d = builder
.setCancelable(false)
.create();
d.setCanceledOnTouchOutside(false);
return d;
}
}
| 1,311 |
436 | package com.qiyi.tm.demo.test;
import android.util.Log;
import com.qiyi.tm.demo.SumChecker;
import org.qiyi.basecore.taskmanager.ParallelTask;
import org.qiyi.basecore.taskmanager.TM;
import org.qiyi.basecore.taskmanager.Task;
import org.qiyi.basecore.taskmanager.other.TMLog;
/**
* 用于验证并发任务是否会多次执行,绘制没有执行的问题。
*/
public class ParaTest extends Test implements Test.RunCallback {
private final String TAG = "TM_ParaTest";
@Override
public void doTest() {
// testA();
// testParaM();
testPam2();
// testParam3();
}
private void testA() {
// 在子线程进行500次并发任务执行
new Task() {
@Override
public void doTask() {
int p = 0;
while (p < 500) {
testCount();
p++;
}
}
}.postAsync();
}
private void testCount() {
final StateCheck sumChecker = new StateCheck(12);
ParallelTask parallelTask = new ParallelTask() {
}.addSubTask(new Runnable() {
@Override
public void run() {
sumChecker.add(1);
TMLog.e(TAG, "[[[[" + Thread.currentThread().getPriority());
try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
TMLog.e(TAG, Thread.currentThread().getPriority() + "task run " + 1);
}
}).addSubTask(new Runnable() {
@Override
public void run() {
sumChecker.add(2);
TMLog.e(TAG, "[[[[" + Thread.currentThread().getPriority());
try {
Thread.sleep(20);
} catch (InterruptedException e) {
e.printStackTrace();
}
TMLog.e(TAG, Thread.currentThread().getPriority() + "task run " + 2);
}
}).addSubTask(new Runnable() {
@Override
public void run() {
sumChecker.add(3);
TMLog.e(TAG, "[[[[" + Thread.currentThread().getPriority());
try {
Thread.sleep(30);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}).addSubTask(new Runnable() {
@Override
public void run() {
sumChecker.add(4);
TMLog.e(TAG, "[[[[" + Thread.currentThread().getPriority());
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
TMLog.e(TAG, Thread.currentThread().getPriority() + "task run " + 3);
}
}).addSubTask(new Runnable() {
@Override
public void run() {
sumChecker.add(5);
TMLog.e(TAG, "[[[[" + Thread.currentThread().getPriority());
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
TMLog.e(TAG, Thread.currentThread().getPriority() + "task run " + 4);
}
}).addSubTask(new Runnable() {
@Override
public void run() {
sumChecker.add(6);
TMLog.e(TAG, "[[[[" + Thread.currentThread().getPriority());
try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
TMLog.e(TAG, Thread.currentThread().getPriority() + "task run " + 5);
}
}).addSubTask(new Runnable() {
@Override
public void run() {
sumChecker.add(7);
TMLog.e(TAG, "[[[[" + Thread.currentThread().getPriority());
try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
TMLog.e(TAG, Thread.currentThread().getPriority() + "task run " + 6);
}
}).addSubTask(new Runnable() {
@Override
public void run() {
sumChecker.add(8);
TMLog.e(TAG, "[[[[" + Thread.currentThread().getPriority());
try {
Thread.sleep(50);
} catch (InterruptedException e) {
e.printStackTrace();
}
TMLog.e(TAG, Thread.currentThread().getPriority() + "task run " + 7);
}
}).addSubTask(new Runnable() {
@Override
public void run() {
sumChecker.add(9);
TMLog.e(TAG, "[[[[" + Thread.currentThread().getPriority());
try {
Thread.sleep(30);
} catch (InterruptedException e) {
e.printStackTrace();
}
TMLog.e(TAG, Thread.currentThread().getPriority() + "task run " + 8);
}
}).addSubTask(new Runnable() {
@Override
public void run() {
sumChecker.add(10);
TMLog.e(TAG, "[[[[" + Thread.currentThread().getPriority());
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
TMLog.e(TAG, Thread.currentThread().getPriority() + "task run " + 9);
}
}).addSubTask(new Runnable() {
@Override
public void run() {
sumChecker.add(11);
TMLog.e(TAG, "[[[[" + Thread.currentThread().getPriority());
try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
TMLog.e(TAG, Thread.currentThread().getPriority() + "task run " + 10);
}
}).addSubTask(new Runnable() {
@Override
public void run() {
sumChecker.add(0);
TMLog.e(TAG, "[[[[" + Thread.currentThread().getPriority());
try {
Thread.sleep(60);
} catch (InterruptedException e) {
e.printStackTrace();
}
TMLog.e(TAG, Thread.currentThread().getPriority() + "task run " + 11);
}
});
parallelTask.execute();
sumChecker.checkValids();
Log.d(TAG, "data is valid");
}
/**
* 事件依赖,并发执行。
*/
private void testPam2() {
int p = 0;
while (p < 1) {
// getTask(time()).postAsync();
// getTask(10).postAsync();
int id = TM.genNewEventId();
getTask("A", time(), this).dependOn(id).executeSync();
getTask("B", time(), this).dependOn(id).executeSyncUI();
getTask("C", time(), this).dependOn(id).executeSync();
getTask("D", time(), this).dependOn(id).executeSyncUI();
getTask("E", time(), this).dependOn(id).executeSync();
getTask("F", time(), this).dependOn(id).executeSyncUI();
getTask("G", time(), this).dependOn(id).executeSync();
getTask("H", time(), this).dependOn(id).executeSync();
TM.triggerEvent(id);
Log.e(TAG, ">>>>> on finish >>>>" + cc);
// getTask(time()).postAsync();
// TaskManager.getInstance().dumpInfo();
p++;
}
}
private void testParam3() {
// TM.setMaxRunningThreadCount(2);
Task taskA = getTask("A", 20);
Task taskB = getTask("B", 10);
Task taskC = getTask("C", 2000);
Task taskD = getTask("D", time());
new ParallelTask()
.addSubTask(taskA)
.addSubTask(taskB)
.addSubTask(taskC)
.addSubTask(taskD)
.setTimeOut(-1)
.execute();
Log.d(TAG, "test param 3 done");
}
/**
* 多次验证并发任务是否能全部执行完成
*/
private void testParaM() {
int p = 0;
while (p < 50) {
SumChecker checker = new SumChecker();
getTask(20).postAsync();
getTask(10).postAsync();
getTask(time()).postAsync();
getTask(10).postAsync();
Task taskA = getTask("A", time()).register(checker);
Task taskB = getTask("B", time()).register(checker);
Task taskC = getTask("C", time()).register(checker);
Task taskD = getTask("D", time()).register(checker);
Task taskE = getTask("E", time()).register(checker);
Task taskF = getTask("F", time()).register(checker);
Task taskG = getTask("G", time()).register(checker);
Task taskH = getTask("H", time()).register(checker);
Task taskI = getTask("E", time()).register(checker);
Task taskJ = getTask("F", time()).register(checker);
Task taskK = getTask("G", time()).register(checker);
Task taskL = getTask("H", time()).register(checker);
new ParallelTask()
.addSubTask(taskA)
.addSubTask(taskB)
.addSubTask(taskC)
.addSubTask(taskD)
.addSubTask(taskE)
.addSubTask(taskF)
.addSubTask(taskG)
.addSubTask(taskH)
.addSubTask(taskI)
.addSubTask(taskJ)
.addSubTask(taskK)
.addSubTask(taskL)
.setTimeOut(-1)
.execute();
checker.verriyfy();
Log.d(TAG, "DONE ----");
getTask(time()).postAsync();
getTask(200).postAsync();
p++;
}
}
int cc;
@Override
public synchronized void onRun() {
cc++;
}
}
| 5,812 |
1,163 | <filename>iree/compiler/Dialect/Modules/VMVX/IR/VMVXTypes.h<gh_stars>1000+
// Copyright 2021 The IREE Authors
//
// Licensed under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
#ifndef IREE_COMPILER_DIALECT_MODULES_VMVX_IR_VMVXTYPES_H_
#define IREE_COMPILER_DIALECT_MODULES_VMVX_IR_VMVXTYPES_H_
#include <cstdint>
#include "iree/compiler/Dialect/Util/IR/UtilTypes.h"
#include "llvm/ADT/DenseMap.h"
#include "llvm/ADT/DenseMapInfo.h"
#include "llvm/ADT/Optional.h"
#include "llvm/ADT/SmallVector.h"
#include "llvm/ADT/StringSwitch.h"
#include "mlir/IR/BuiltinTypes.h"
#include "mlir/IR/OpDefinition.h"
#include "mlir/IR/TypeSupport.h"
#include "mlir/IR/Types.h"
#include "mlir/Support/LLVM.h"
// clang-format off: must be included after all LLVM/MLIR headers.
#include "iree/compiler/Dialect/Modules/VMVX/IR/VMVXEnums.h.inc" // IWYU pragma: export
// clang-format on
namespace mlir {
namespace iree_compiler {
namespace IREE {
namespace VMVX {
#include "iree/compiler/Dialect/Modules/VMVX/IR/VMVXOpInterface.h.inc" // IWYU pragma: export
} // namespace VMVX
} // namespace IREE
} // namespace iree_compiler
} // namespace mlir
#endif // IREE_COMPILER_DIALECT_MODULES_VMVX_IR_VMVXTYPES_H_
| 566 |
1,144 | // Generated Model - DO NOT CHANGE
package org.compiere.model;
import java.sql.ResultSet;
import java.util.Properties;
import javax.annotation.Nullable;
/** Generated Model for C_BPartner_Attribute
* @author metasfresh (generated)
*/
@SuppressWarnings("unused")
public class X_C_BPartner_Attribute extends org.compiere.model.PO implements I_C_BPartner_Attribute, org.compiere.model.I_Persistent
{
private static final long serialVersionUID = 14649709L;
/** Standard Constructor */
public X_C_BPartner_Attribute (final Properties ctx, final int C_BPartner_Attribute_ID, @Nullable final String trxName)
{
super (ctx, C_BPartner_Attribute_ID, trxName);
}
/** Load Constructor */
public X_C_BPartner_Attribute (final Properties ctx, final ResultSet rs, @Nullable final String trxName)
{
super (ctx, rs, trxName);
}
/** Load Meta Data */
@Override
protected org.compiere.model.POInfo initPO(final Properties ctx)
{
return org.compiere.model.POInfo.getPOInfo(Table_Name);
}
/**
* Attribute AD_Reference_ID=540749
* Reference name: C_BPartner_Attribute
*/
public static final int ATTRIBUTE_AD_Reference_ID=540749;
/** Verband D = K_Verband_D */
public static final String ATTRIBUTE_VerbandD = "K_Verband_D";
/** Firma = K_Firma */
public static final String ATTRIBUTE_Firma = "K_Firma";
/** Bildungsinstitut = K_Bildungsinstitut */
public static final String ATTRIBUTE_Bildungsinstitut = "K_Bildungsinstitut";
/** Schule = K_Schule */
public static final String ATTRIBUTE_Schule = "K_Schule";
/** Oberstufe = K_Oberstufe */
public static final String ATTRIBUTE_Oberstufe = "K_Oberstufe";
/** Gymnasium = K_Gymnasium */
public static final String ATTRIBUTE_Gymnasium = "K_Gymnasium";
/** BIZ_Berufsberatung = K_BIZ_Berufsberatung */
public static final String ATTRIBUTE_BIZ_Berufsberatung = "K_BIZ_Berufsberatung";
/** Partner = K_Partner */
public static final String ATTRIBUTE_Partner = "K_Partner";
/** Lieferant = K_Lieferant */
public static final String ATTRIBUTE_Lieferant = "K_Lieferant";
/** Behörden = K_Behörden */
public static final String ATTRIBUTE_Behoerden = "K_Behörden";
/** Sponsor = K_Sponsor */
public static final String ATTRIBUTE_Sponsor = "K_Sponsor";
/** Massenmedien = K_Massenmedien */
public static final String ATTRIBUTE_Massenmedien = "K_Massenmedien";
@Override
public void setAttribute (final java.lang.String Attribute)
{
set_Value (COLUMNNAME_Attribute, Attribute);
}
@Override
public java.lang.String getAttribute()
{
return get_ValueAsString(COLUMNNAME_Attribute);
}
@Override
public void setC_BPartner_Attribute_ID (final int C_BPartner_Attribute_ID)
{
if (C_BPartner_Attribute_ID < 1)
set_ValueNoCheck (COLUMNNAME_C_BPartner_Attribute_ID, null);
else
set_ValueNoCheck (COLUMNNAME_C_BPartner_Attribute_ID, C_BPartner_Attribute_ID);
}
@Override
public int getC_BPartner_Attribute_ID()
{
return get_ValueAsInt(COLUMNNAME_C_BPartner_Attribute_ID);
}
@Override
public void setC_BPartner_ID (final int C_BPartner_ID)
{
if (C_BPartner_ID < 1)
set_ValueNoCheck (COLUMNNAME_C_BPartner_ID, null);
else
set_ValueNoCheck (COLUMNNAME_C_BPartner_ID, C_BPartner_ID);
}
@Override
public int getC_BPartner_ID()
{
return get_ValueAsInt(COLUMNNAME_C_BPartner_ID);
}
} | 1,300 |
308 | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#import <CommonCrypto/CommonCryptor.h>
#import <Foundation/Foundation.h>
#import "MSACEncrypter.h"
NS_ASSUME_NONNULL_BEGIN
static int const kMSACEncryptionAlgorithm = kCCAlgorithmAES;
static NSString *const kMSACEncryptionAlgorithmName = @"AES";
static NSString *const kMSACEncryptionCipherMode = @"CBC";
static NSString *const kMSACEncryptionAlgorithmAesAndEtmName = @"AES/HmacSHA256";
static const int kMSACEncryptionSubkeyLength = 32;
static const int kMSACAuthenticationSubkeyLength = 16;
// One year.
static NSTimeInterval const kMSACEncryptionKeyLifetimeInSeconds = 365 * 24 * 60 * 60;
static int const kMSACEncryptionKeySize = kCCKeySizeAES256;
static NSString *const kMSACEncryptionKeyMetadataKey = @"EncryptionKeyMetadata";
static NSString *const kMSACEncryptionKeyTagAlternate = @"kMSEncryptionKeyTagAlternate";
static NSString *const kMSACEncryptionKeyTagOriginal = @"kMSEncryptionKeyTag";
// This separator is used for key metadata, as well as between metadata that is prepended to the cipher text.
static NSString *const kMSACEncryptionMetadataInternalSeparator = @"/";
// This separator is only used between the metadata and cipher text of the encryption result.
static NSString *const kMSACEncryptionMetadataSeparator = @":";
static NSString *const kMSACEncryptionPaddingMode = @"PKCS7";
@interface MSACEncrypter ()
@end
NS_ASSUME_NONNULL_END
| 458 |
651 | package org.spongycastle.jsse.provider;
import javax.net.ssl.HandshakeCompletedEvent;
import javax.net.ssl.HandshakeCompletedListener;
class HandshakeCompletedListenerAdapter
implements HandshakeCompletedListener
{
protected final HandshakeCompletedListener listener;
HandshakeCompletedListenerAdapter(HandshakeCompletedListener listener)
{
this.listener = listener;
}
public void handshakeCompleted(final HandshakeCompletedEvent event)
{
CallbackUtil.safeCallback(new Runnable(){
public void run()
{
listener.handshakeCompleted(event);
}
});
}
@Override
public boolean equals(Object obj)
{
return (obj instanceof HandshakeCompletedListenerAdapter)
&& ((HandshakeCompletedListenerAdapter)obj).listener == listener;
}
@Override
public int hashCode()
{
return System.identityHashCode(listener);
}
}
| 359 |
319 | #!/usr/bin/env python
# we're using python 3.x style print but want it to work in python 2.x,
from __future__ import print_function
import os
import argparse
import shlex
import sys
import warnings
import copy
import imp
import ast
nodes = imp.load_source('', 'steps/ctc/nnet2/components.py')
nnet3_train_lib = imp.load_source('ntl', 'steps/nnet3/nnet3_train_lib.py')
chain_lib = imp.load_source('ncl', 'steps/nnet3/chain/nnet3_chain_lib.py')
def GetArgs():
# we add compulsary arguments as named arguments for readability
parser = argparse.ArgumentParser(description="Writes config files and variables "
"for RNN's creation and training",
epilog="See steps/ctc/nnet2_train.sh for example.")
# Only one of these arguments can be specified, and one of them has to
# be compulsarily specified
feat_group = parser.add_mutually_exclusive_group(required = True)
feat_group.add_argument("--feat-dim", type=int,
help="Raw feature dimension, e.g. 13")
feat_group.add_argument("--feat-dir", type=str,
help="Feature directory, from which we derive the feat-dim")
# only one of these arguments can be specified
ivector_group = parser.add_mutually_exclusive_group(required = False)
ivector_group.add_argument("--ivector-dim", type=int,
help="iVector dimension, e.g. 100", default=0)
ivector_group.add_argument("--ivector-dir", type=str,
help="iVector dir, which will be used to derive the ivector-dim ", default=None)
num_target_group = parser.add_mutually_exclusive_group(required = True)
num_target_group.add_argument("--num-targets", type=int,
help="number of network targets (e.g. num-pdf-ids/num-leaves)")
num_target_group.add_argument("--ali-dir", type=str,
help="alignment directory, from which we derive the num-targets")
num_target_group.add_argument("--tree-dir", type=str,
help="directory with final.mdl, from which we derive the num-targets")
# CNN options
parser.add_argument('--cnn.layer', type=str, action='append', dest = "cnn_layer",
help="CNN parameters at each CNN layer, e.g. --filt-x-dim=3 --filt-y-dim=8 "
"--filt-x-step=1 --filt-y-step=1 --num-filters=256 --pool-x-size=1 --pool-y-size=3 "
"--pool-z-size=1 --pool-x-step=1 --pool-y-step=3 --pool-z-step=1, "
"when CNN layers are used, no LDA will be added", default = None)
parser.add_argument("--cnn.bottleneck-dim", type=int, dest = "cnn_bottleneck_dim",
help="Output dimension of the linear layer at the CNN output "
"for dimension reduction, e.g. 256."
"The default zero means this layer is not needed.", default=0)
parser.add_argument("--cnn.cepstral-lifter", type=float, dest = "cepstral_lifter",
help="The factor used for determining the liftering vector in the production of MFCC. "
"User has to ensure that it matches the lifter used in MFCC generation, "
"e.g. 22.0", default=22.0)
# General neural network options
parser.add_argument("--splice-indexes", type=str, required = True,
help="Splice indexes at each layer, e.g. '-3,-2,-1,0,1,2,3' "
"If CNN layers are used the first set of splice indexes will be used as input "
"to the first CNN layer and later splice indexes will be interpreted as indexes "
"for the TDNNs.")
parser.add_argument("--add-lda", type=str, action=nnet3_train_lib.StrToBoolAction,
help="If \"true\" an LDA matrix computed from the input features "
"(spliced according to the first set of splice-indexes) will be used as "
"the first Affine layer. This affine layer's parameters are fixed during training. "
"If --cnn.layer is specified this option will be forced to \"false\".",
default=False, choices = ["false", "true"])
parser.add_argument("--lda-dim", type=int, default=0,
help="dimension of lda transform.")
parser.add_argument("--include-log-softmax", type=str, action=nnet3_train_lib.StrToBoolAction,
help="add the final softmax layer ", default=False, choices = ["false", "true"])
parser.add_argument("--xent-regularize", type=float,
help="For chain models, if nonzero, add a separate output for cross-entropy "
"regularization (with learning-rate-factor equal to the inverse of this)",
default=0.0)
parser.add_argument("--xent-separate-forward-affine", type=str, action=nnet3_train_lib.StrToBoolAction,
help="if using --xent-regularize, gives it separate last-but-one weight matrix",
default=False, choices = ["false", "true"])
parser.add_argument("--final-layer-normalize-target", type=float,
help="RMS target for final layer (set to <1 if final layer learns too fast",
default=1.0)
parser.add_argument("--subset-dim", type=int, default=0,
help="dimension of the subset of units to be sent to the central frame")
parser.add_argument("--self-repair-scale", type=float,
help="A non-zero value activates the self-repair mechanism in the sigmoid and tanh non-linearities of the LSTM", default=None)
parser.add_argument("--use-presoftmax-prior-scale", type=str, action=nnet3_train_lib.StrToBoolAction,
help="if true, a presoftmax-prior-scale is added",
choices=['true', 'false'], default = True)
parser.add_argument("config_dir",
help="Directory to write config files and variables")
parser.add_argument("--batch-normalize", type=str, action=nnet3_train_lib.StrToBoolAction,
help="If \"true\" use batch normalize before nonlinearity(only config ReLU now).",
default=False, choices = ["false", "true"])
parser.add_argument("--objective-type", type=str,
help = "the type of objective; i.e. CTC or linear",
default="CTC", choices = ["linear", "CTC"])
parser.add_argument("--affine-type", type=str,
help = "the type of active; i.e. AffineComponent or NaturalGradientAffineComponent",
default="native", choices = ["native", "natural"])
parser.add_argument("--hidden-dim", type=int, default=1024,
help="dimension of DNN layers")
parser.add_argument("--active-type", type=str,
help = "the type of active; i.e. ReLU or Sigmoid",
default="relu", choices = ["relu", "sigmoid"])
parser.add_argument("--model.type", type=str, dest = "model_type",
help="model type, google|DS2|FT",
default="goole", choices = ["google", "DS2", "FT"])
parser.add_argument("--model.bidirectional", type=str, action=nnet3_train_lib.StrToBoolAction, dest = 'rnn_bidirectional',
help="bidirectional.", default=True, choices = ["false", "true"])
parser.add_argument("--model.rnn-mode", type=int, dest = "rnn_mode",
help="CUDNN_RNN_RELU = 0, CUDNN_RNN_TANH = 1, CUDNN_LSTM = 2, CUDNN_GRU = 3", default = 2)
parser.add_argument("--model.rnn-first", type=int, dest = "rnn_first", action=nnet3_train_lib.StrToBoolAction,
help="", default = True, choices = ["false", "true"])
parser.add_argument("--model.rnn-layers", type=int, dest = "rnn_layers",
help="RNN layers number", default = 2)
parser.add_argument("--model.rnn-max-seq-length", type=int, dest = "rnn_max_seq_length",
help="RNN layer max seq length", default = 1000)
parser.add_argument("--model.cudnn-layers", type=int, dest = "cudnn_layers",
help="RNN layers In one CuDNNRecurrentComponent", default = 1)
parser.add_argument("--model.cell-dim", type=int, dest = "rnn_cell_dim",
help="RNN layers number", default = 512)
parser.add_argument("--model.param-stddev", type=float, dest = "param_stddev",
help="RNN params stddev", default = 0.02)
parser.add_argument("--model.bias-stddev", type=float, dest = "bias_stddev",
help="RNN bias stddev", default = 0.2)
parser.add_argument("--model.clipping-threshold", type=float, dest = "clipping_threshold",
help="clipping threshold value", default = 30.0)
parser.add_argument("--model.norm-based-clipping", type=str, action=nnet3_train_lib.StrToBoolAction,
dest = 'norm_based_clipping',
help="norm_based_clipping.", default=True, choices = ["false", "true"])
parser.add_argument("--dropout-proportion", type=float, dest = "dropout_proportion",
help="dropout proportion value", default = 0.0)
print(' '.join(sys.argv))
args = parser.parse_args()
args = CheckArgs(args)
return args
def CheckArgs(args):
if not os.path.exists(args.config_dir):
os.makedirs(args.config_dir)
## Check arguments.
if args.feat_dir is not None:
args.feat_dim = nnet3_train_lib.GetFeatDim(args.feat_dir)
if args.ali_dir is not None:
args.num_targets = nnet3_train_lib.GetNumberOfLeaves(args.ali_dir)
elif args.tree_dir is not None:
args.num_targets = chain_lib.GetNumberOfLeaves(args.tree_dir)
if args.ivector_dir is not None:
args.ivector_dim = nnet3_train_lib.GetIvectorDim(args.ivector_dir)
if not args.feat_dim > 0:
raise Exception("feat-dim has to be postive")
if not args.num_targets > 0:
print(args.num_targets)
raise Exception("num_targets has to be positive")
if not args.ivector_dim >= 0:
raise Exception("ivector-dim has to be non-negative")
if (args.subset_dim < 0):
raise Exception("--subset-dim has to be non-negative")
if not args.hidden_dim is None:
args.hidden_dim = args.hidden_dim
if args.xent_separate_forward_affine and args.add_final_sigmoid:
raise Exception("It does not make sense to have --add-final-sigmoid=true when xent-separate-forward-affine is true")
if args.add_lda and args.cnn_layer is not None:
args.add_lda = False
warnings.warn("--add-lda is set to false as CNN layers are used.")
return args
def PrintConfig(file_name, config_lines):
f = open(file_name, 'w')
f.write("\n".join(config_lines['components'])+"\n")
f.close()
def ParseSpliceString(splice_indexes):
splice_array = []
left_context = 0
right_context = 0
split1 = splice_indexes.split(); # we already checked the string is nonempty.
if len(split1) < 1:
raise Exception("invalid splice-indexes argument, too short: "
+ splice_indexes)
try:
for string in split1:
split2 = string.split(",")
if len(split2) < 1:
raise Exception("invalid splice-indexes argument, too-short element: "
+ splice_indexes)
int_list = []
for int_str in split2:
int_list.append(int(int_str))
if not int_list == sorted(int_list):
raise Exception("elements of splice-indexes must be sorted: "
+ splice_indexes)
left_context += -int_list[0]
right_context += int_list[-1]
splice_array.append(int_list)
except ValueError as e:
raise Exception("invalid splice-indexes argument " + splice_indexes + str(e))
left_context = max(0, left_context)
right_context = max(0, right_context)
return {'left_context':left_context,
'right_context':right_context,
'splice_indexes':splice_array,
'num_hidden_layers':len(splice_array)
}
# The function signature of MakeConfigs is changed frequently as it is intended for local use in this script.
def MakeConfigs(config_dir, splice_indexes_string,
feat_dim, ivector_dim, num_targets, add_lda, lda_dim,
affine_type, active_type, hidden_dim,
cudnn_layers,
dropout_proportion,
param_stddev, bias_stddev,
self_repair_scale,
batch_normalize, objective_type,
model_type, rnn_bidirectional, rnn_mode, rnn_layers, rnn_max_seq_length, rnn_cell_dim,
clipping_threshold, norm_based_clipping, rnn_first = True):
parsed_splice_output = ParseSpliceString(splice_indexes_string.strip())
num_hidden_layers = parsed_splice_output['num_hidden_layers']
splice_indexes = parsed_splice_output['splice_indexes']
config_lines = {'components':[]}
config_files = {}
# Add the init config lines for estimating the preconditioning matrices
init_config_lines = copy.deepcopy(config_lines)
init_config_lines['components'].insert(0, '# Config file for initializing neural network prior to')
init_config_lines['components'].insert(0, '# preconditioning matrix computation')
prev_layer_output = nodes.AddInputLayer(init_config_lines, feat_dim, splice_indexes[0], ivector_dim)
if add_lda:
if lda_dim <= 0:
lda_dim = prev_layer_output['dimension']
prev_layer_output = nodes.AddLdaLayer(init_config_lines, lda_dim, config_dir + '/../lda.mat')
rnn_layer_output = None
if model_type != 'DS2':
if model_type == 'FT':
if active_type.lower() == 'relu':
prev_layer_output = nodes.AddAffRelNormLayer(init_config_lines, prev_layer_output, hidden_dim,
affine_type = affine_type,
self_repair_scale = self_repair_scale,
batch_normalize = batch_normalize)
else:
assert False
prev_layer_output = nodes.AddClipGradientLayer(init_config_lines, prev_layer_output,
clipping_threshold = clipping_threshold,
norm_based_clipping = norm_based_clipping,
self_repair_scale_clipgradient = None)
first_rnn_layer = nodes.AddRnnLayer(init_config_lines, prev_layer_output, rnn_cell_dim,
num_layers = cudnn_layers,
max_seq_length = rnn_max_seq_length,
bidirectional = rnn_bidirectional,
rnn_mode = rnn_mode,
clipping_threshold = clipping_threshold,
dropout_proportion = dropout_proportion,
norm_based_clipping = norm_based_clipping,
self_repair_scale_clipgradient = None)
rnn_layer_output = first_rnn_layer
nodes.AddAffineLayer(init_config_lines, first_rnn_layer, num_targets)
else:
assert False, "Not sppourt DS2, now."
config_files[config_dir + '/init.config'] = init_config_lines
# if cnn_layer is not None:
# prev_layer_output = AddCnnLayers(config_lines, cnn_layer, cnn_bottleneck_dim, cepstral_lifter, config_dir,
# feat_dim, splice_indexes[0], ivector_dim)
left_context = 0
right_context = 0
# we moved the first splice layer to before the LDA..
# so the input to the first affine layer is going to [0] index
splice_indexes[0] = [0]
for i in range(0, num_hidden_layers-1):
# make the intermediate config file for layerwise discriminative training
local_output_dim = hidden_dim
first_add_rnn = rnn_first and i < rnn_layers - 1
last_add_rnn = not rnn_first and i >= num_hidden_layers - rnn_layers
if model_type == 'google' or first_add_rnn or last_add_rnn:
# add RNN layer
assert rnn_layer_output
rnn_layer_output = nodes.AddRnnLayer(config_lines, rnn_layer_output, rnn_cell_dim,
num_layers = cudnn_layers,
max_seq_length = rnn_max_seq_length,
bidirectional = rnn_bidirectional,
rnn_mode = rnn_mode,
param_stddev = param_stddev, bias_stddev = bias_stddev,
clipping_threshold = clipping_threshold,
dropout_proportion = dropout_proportion,
norm_based_clipping = norm_based_clipping,
self_repair_scale_clipgradient = None)
elif model_type == "FT":
assert False, "Error here."
if active_type.lower() == 'relu':
prev_layer_output = nodes.AddAffRelNormLayer(config_lines, prev_layer_output, local_output_dim,
affine_type = affine_type,
self_repair_scale = self_repair_scale,
batch_normalize = batch_normalize)
else:
assert False
else:
assert False, "Error here."
# elif model_type == "DS2":
# prev_layer_output = nodes.AddAffRelNormLayer(config_lines, "Tdnn_{0}".format(i),
# prev_layer_output, local_nonlin_output_dim,
# self_repair_scale = self_repair_scale,
# norm_target_rms = 1.0 if i < num_hidden_layers -1 else final_layer_normalize_target,
# batch_normalize = batch_normalize)
config_files['{0}/layer{1}.config'.format(config_dir, i+1)] = config_lines
config_lines = {'components':[]}
config_lines = {'components':['SoftmaxComponent dim={0}'.format(num_targets)]}
config_files['{0}/softmax.config'.format(config_dir)] = config_lines
left_context += int(parsed_splice_output['left_context'])
right_context += int(parsed_splice_output['right_context'])
# write the files used by other scripts like steps/nnet3/get_egs.sh
f = open(config_dir + "/vars", "w")
print('left_context=' + str(left_context), file=f)
print('right_context=' + str(right_context), file=f)
print('num_hidden_layers=' + str(num_hidden_layers), file=f)
print('num_targets=' + str(num_targets), file=f)
print('add_lda=' + ('true' if add_lda else 'false'), file=f)
print('objective_type=' + objective_type, file=f)
f.close()
# printing out the configs
# init.config used to train lda-mllt train
for key in config_files.keys():
PrintConfig(key, config_files[key])
def Main():
args = GetArgs()
MakeConfigs(config_dir = args.config_dir,
splice_indexes_string = args.splice_indexes,
feat_dim = args.feat_dim, ivector_dim = args.ivector_dim,
num_targets = args.num_targets,
add_lda = args.add_lda, lda_dim = args.lda_dim,
affine_type = args.affine_type, active_type = args.active_type, hidden_dim = args.hidden_dim,
dropout_proportion = args.dropout_proportion, cudnn_layers = args.cudnn_layers,
rnn_max_seq_length = args.rnn_max_seq_length,
param_stddev = args.param_stddev, bias_stddev = args.bias_stddev,
self_repair_scale = args.self_repair_scale,
objective_type = args.objective_type,
batch_normalize = args.batch_normalize,
model_type = args.model_type, rnn_bidirectional = args.rnn_bidirectional,
rnn_mode = args.rnn_mode, rnn_layers = args.rnn_layers,
rnn_cell_dim = args.rnn_cell_dim,
clipping_threshold = args.clipping_threshold, norm_based_clipping = args.norm_based_clipping,
rnn_first = args.rnn_first)
if __name__ == "__main__":
Main()
| 10,083 |
10,225 | package io.quarkus.panache.common.exception;
public class PanacheQueryException extends RuntimeException {
public PanacheQueryException(String s) {
super(s);
}
}
| 61 |
1,040 | <filename>drivers/radio_labjack_ch2/src/lcm_i.c
// file: lmc_i.c
//
// lcm interface program.
//
#include <stdio.h>
#include <stdlib.h>
#include "globalvar.h"
#include "lcm_i.h"
#include "main.h"
lcm_t * pLCM;
lcm_t * lcm;
void lcm_hotrod_u_handler(const lcm_recv_buf_t *rbuf, const char* channel, const lcmt_hotrod_u *msg, void *user)
{
lock();
memcpy(&lcm_hotrod_u,msg,sizeof(lcmt_hotrod_u));
unlock();
}
int lcm_get_servo()
{
pLCM = lcm_create ("udpm://192.168.127.12:7667?ttl=0");
if (!pLCM)
return 1;
lcmt_hotrod_u_subscription_t * hotrod_sub = lcmt_hotrod_u_subscribe (pLCM, "hotrod_u", &lcm_hotrod_u_handler, NULL);
while(1)
lcm_handle (pLCM);
lcmt_hotrod_u_unsubscribe (pLCM, hotrod_sub);
lcm_destroy (pLCM);
return 0;
}
void lcm_publish_u_actual(double timestamp, double aileron, double elevator, double throttle, double rudder, double kp_height, double kd_height, double kp_yaw, double kd_yaw, double kp_pitch, double kd_pitch, double kp_roll, double kd_roll )
{
/////////////// THIS CODE IS NOT USED AT THE MOMENT //////////////
lcmt_hotrod_u msg;
// msg.timestamp=(int32_t) timestamp;
msg.timestamp= timestamp;
/*for(k=0;k<STATESIZE;k++)
{
printf("%f ",u[k]);
}
printf("\n");*/
printf("throttle: %f\n", throttle);
msg.elevator = elevator;
msg.throttle = throttle;
msg.aileron = aileron;
msg.rudder = rudder;
msg.kp_height = kp_height;
msg.kd_height = kd_height;
msg.kp_yaw = kp_yaw;
msg.kd_yaw = kd_yaw;
msg.kp_pitch = kp_pitch;
msg.kd_pitch = kd_pitch;
msg.kp_roll = kp_roll;
msg.kd_roll = kd_roll;
lcmt_hotrod_u_publish (lcm, "hotrod_u_actual", &msg);
}
int lcm_publish_init()
{
lcm = lcm_create ("udpm://192.168.127.12:7667?ttl=0");
if (!lcm)
return 1;
return 0;
}
| 945 |
2,151 | <reponame>zipated/src
package org.mockito.release.notes.util;
import java.io.*;
import java.util.Scanner;
/**
* IO utils. A bit of reinventing the wheel but we don't want extra dependencies at this stage and we want to be java.
*/
public class IOUtil {
/**
* Reads string from the file
*/
public static String readFully(File input) {
try {
return readNow(new FileInputStream(input));
} catch (Exception e) {
throw new RuntimeException("Problems reading file: " + input, e);
}
}
/**
* Reads string from the stream and closes it
*/
public static String readFully(InputStream stream) {
try {
return readNow(stream);
} catch (Exception e) {
throw new RuntimeException("Problems reading stream", e);
}
}
/**
* Closes the target. Does nothing when target is null. Is not silent, throws exception on IOException.
*
* @param closeable the target, may be null
*/
public static void close(Closeable closeable) {
if (closeable != null) {
try {
closeable.close();
} catch (IOException e) {
throw new RuntimeException("Problems closing stream", e);
}
}
}
private static String readNow(InputStream is) {
Scanner s = new Scanner(is).useDelimiter("\\A");
try {
return s.hasNext() ? s.next() : "";
} finally {
s.close();
}
}
public static void writeFile(File target, String content) {
PrintWriter p = null;
try {
p = new PrintWriter(new FileWriter(target));
p.write(content);
} catch (Exception e) {
throw new RuntimeException("Problems writing text to file: " + target);
} finally {
close(p);
}
}
}
| 823 |
14,668 | // Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/modules/peerconnection/testing/internals_rtc_peer_connection.h"
#include "third_party/blink/renderer/bindings/core/v8/script_promise_resolver.h"
namespace blink {
int InternalsRTCPeerConnection::peerConnectionCount(Internals& internals) {
return RTCPeerConnection::PeerConnectionCount();
}
int InternalsRTCPeerConnection::peerConnectionCountLimit(Internals& internals) {
return RTCPeerConnection::PeerConnectionCountLimit();
}
ScriptPromise
InternalsRTCPeerConnection::waitForPeerConnectionDispatchEventsTaskCreated(
ScriptState* script_state,
Internals& internals,
RTCPeerConnection* connection) {
auto* resolver = MakeGarbageCollected<ScriptPromiseResolver>(script_state);
ScriptPromise promise = resolver->Promise();
CHECK(!connection->dispatch_events_task_created_callback_for_testing_);
connection->dispatch_events_task_created_callback_for_testing_ =
WTF::Bind([](ScriptPromiseResolver* resolver) { resolver->Resolve(); },
WrapPersistent(resolver));
return promise;
}
} // namespace blink
| 396 |
12,278 | <gh_stars>1000+
/*=============================================================================
Copyright (c) 2017 <NAME>
rotate.cpp
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
==============================================================================*/
#include <boost/hof/rotate.hpp>
#include <boost/hof/placeholders.hpp>
#include <boost/hof/compose.hpp>
#include <boost/hof/repeat.hpp>
#include "test.hpp"
struct head
{
template<class T, class... Ts>
constexpr T operator()(T x, Ts&&...) const
BOOST_HOF_RETURNS_DEDUCE_NOEXCEPT(x)
{
return x;
}
};
#if BOOST_HOF_HAS_NOEXCEPT_DEDUCTION
BOOST_HOF_TEST_CASE()
{
static_assert(noexcept(boost::hof::rotate(head{})(1, 2, 3, 4)), "noexcept rotate");
static_assert(noexcept(boost::hof::repeat(std::integral_constant<int, 5>{})(boost::hof::rotate)(head{})(1, 2, 3, 4, 5, 6, 7, 8, 9)), "noexcept rotate");
}
#endif
BOOST_HOF_TEST_CASE()
{
BOOST_HOF_TEST_CHECK(2 == boost::hof::rotate(head{})(1, 2, 3, 4));
BOOST_HOF_STATIC_TEST_CHECK(2 == boost::hof::rotate(head{})(1, 2, 3, 4));
}
BOOST_HOF_TEST_CASE()
{
BOOST_HOF_TEST_CHECK(3 == boost::hof::compose(boost::hof::rotate, boost::hof::rotate)(head{})(1, 2, 3, 4));
BOOST_HOF_STATIC_TEST_CHECK(3 == boost::hof::compose(boost::hof::rotate, boost::hof::rotate)(head{})(1, 2, 3, 4));
}
BOOST_HOF_TEST_CASE()
{
BOOST_HOF_TEST_CHECK(6 == boost::hof::repeat(std::integral_constant<int, 5>{})(boost::hof::rotate)(head{})(1, 2, 3, 4, 5, 6, 7, 8, 9));
BOOST_HOF_STATIC_TEST_CHECK(6 == boost::hof::repeat(std::integral_constant<int, 5>{})(boost::hof::rotate)(head{})(1, 2, 3, 4, 5, 6, 7, 8, 9));
}
BOOST_HOF_TEST_CASE()
{
BOOST_HOF_TEST_CHECK(3 == boost::hof::rotate(boost::hof::_ - boost::hof::_)(2, 5));
BOOST_HOF_STATIC_TEST_CHECK(3 == boost::hof::rotate(boost::hof::_ - boost::hof::_)(2, 5));
}
#if defined(__GNUC__) && !defined (__clang__) && __GNUC__ == 4 && __GNUC_MINOR__ < 7
#define FINAL
#else
#define FINAL final
#endif
struct f FINAL {
int operator()(int i, void *) const {
return i;
}
};
BOOST_HOF_TEST_CASE()
{
BOOST_HOF_TEST_CHECK(boost::hof::rotate(f())(nullptr, 2) == 2);
}
| 1,010 |
1,755 | <reponame>txwhhny/vtk<gh_stars>1000+
/*=========================================================================
Program: Visualization Toolkit
Module: vtkTreeWriter.cxx
Copyright (c) <NAME>, <NAME>, <NAME>
All rights reserved.
See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notice for more information.
=========================================================================*/
#include "vtkTreeWriter.h"
#include "vtkByteSwap.h"
#include "vtkCellArray.h"
#include "vtkInformation.h"
#include "vtkObjectFactory.h"
#include "vtkTree.h"
#if !defined(_WIN32) || defined(__CYGWIN__)
#include <unistd.h> /* unlink */
#else
#include <io.h> /* unlink */
#endif
vtkStandardNewMacro(vtkTreeWriter);
void vtkTreeWriter::WriteEdges(ostream& Stream, vtkTree* Tree)
{
for (vtkIdType e = 0; e < Tree->GetNumberOfEdges(); ++e)
{
vtkIdType parent = Tree->GetSourceVertex(e);
vtkIdType child = Tree->GetTargetVertex(e);
Stream << child << " " << parent << "\n";
}
}
void vtkTreeWriter::WriteData()
{
ostream* fp;
vtkTree* const input = this->GetInput();
vtkDebugMacro(<< "Writing vtk tree data...");
if (!(fp = this->OpenVTKFile()) || !this->WriteHeader(fp))
{
if (fp)
{
if (this->FileName)
{
vtkErrorMacro("Ran out of disk space; deleting file: " << this->FileName);
this->CloseVTKFile(fp);
unlink(this->FileName);
}
else
{
this->CloseVTKFile(fp);
vtkErrorMacro("Could not read memory header. ");
}
}
return;
}
*fp << "DATASET TREE\n";
bool error_occurred = false;
if (!this->WriteFieldData(fp, input->GetFieldData()))
{
error_occurred = true;
}
if (!error_occurred && !this->WritePoints(fp, input->GetPoints()))
{
error_occurred = true;
}
if (!error_occurred)
{
const vtkIdType edge_count = input->GetNumberOfEdges();
*fp << "EDGES " << edge_count << "\n";
this->WriteEdges(*fp, input);
}
if (!error_occurred && !this->WriteEdgeData(fp, input))
{
error_occurred = true;
}
if (!error_occurred && !this->WriteVertexData(fp, input))
{
error_occurred = true;
}
if (error_occurred)
{
if (this->FileName)
{
vtkErrorMacro("Ran out of disk space; deleting file: " << this->FileName);
this->CloseVTKFile(fp);
unlink(this->FileName);
}
else
{
vtkErrorMacro("Error writing data set to memory");
this->CloseVTKFile(fp);
}
return;
}
this->CloseVTKFile(fp);
}
int vtkTreeWriter::FillInputPortInformation(int, vtkInformation* info)
{
info->Set(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkTree");
return 1;
}
vtkTree* vtkTreeWriter::GetInput()
{
return vtkTree::SafeDownCast(this->Superclass::GetInput());
}
vtkTree* vtkTreeWriter::GetInput(int port)
{
return vtkTree::SafeDownCast(this->Superclass::GetInput(port));
}
void vtkTreeWriter::PrintSelf(ostream& os, vtkIndent indent)
{
this->Superclass::PrintSelf(os, indent);
}
| 1,237 |
650 | <gh_stars>100-1000
/*
* Copyright (c) 2021, <NAME> <<EMAIL>>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <AK/TypeCasts.h>
#include <LibJS/Runtime/Array.h>
#include <LibJS/Runtime/GlobalObject.h>
#include <LibJS/Runtime/Temporal/AbstractOperations.h>
#include <LibJS/Runtime/Temporal/Calendar.h>
#include <LibJS/Runtime/Temporal/Instant.h>
#include <LibJS/Runtime/Temporal/PlainDateTime.h>
#include <LibJS/Runtime/Temporal/TimeZone.h>
#include <LibJS/Runtime/Temporal/TimeZonePrototype.h>
namespace JS::Temporal {
// 11.4 Properties of the Temporal.TimeZone Prototype Object, https://tc39.es/proposal-temporal/#sec-properties-of-the-temporal-timezone-prototype-object
TimeZonePrototype::TimeZonePrototype(GlobalObject& global_object)
: PrototypeObject(*global_object.object_prototype())
{
}
void TimeZonePrototype::initialize(GlobalObject& global_object)
{
Object::initialize(global_object);
auto& vm = this->vm();
u8 attr = Attribute::Writable | Attribute::Configurable;
define_native_accessor(vm.names.id, id_getter, {}, Attribute::Configurable);
define_native_function(vm.names.getOffsetNanosecondsFor, get_offset_nanoseconds_for, 1, attr);
define_native_function(vm.names.getOffsetStringFor, get_offset_string_for, 1, attr);
define_native_function(vm.names.getPlainDateTimeFor, get_plain_date_time_for, 1, attr);
define_native_function(vm.names.getInstantFor, get_instant_for, 1, attr);
define_native_function(vm.names.getPossibleInstantsFor, get_possible_instants_for, 1, attr);
define_native_function(vm.names.getNextTransition, get_next_transition, 1, attr);
define_native_function(vm.names.getPreviousTransition, get_previous_transition, 1, attr);
define_native_function(vm.names.toString, to_string, 0, attr);
define_native_function(vm.names.toJSON, to_json, 0, attr);
// 11.4.2 Temporal.TimeZone.prototype[ @@toStringTag ], https://tc39.es/proposal-temporal/#sec-temporal.timezone.prototype-@@tostringtag
define_direct_property(*vm.well_known_symbol_to_string_tag(), js_string(vm, "Temporal.TimeZone"), Attribute::Configurable);
}
// 11.4.3 get Temporal.TimeZone.prototype.id, https://tc39.es/proposal-temporal/#sec-get-temporal.timezone.prototype.id
JS_DEFINE_NATIVE_FUNCTION(TimeZonePrototype::id_getter)
{
// 1. Let timeZone be the this value.
auto time_zone = vm.this_value(global_object);
// 2. Return ? ToString(timeZone).
return js_string(vm, TRY(time_zone.to_string(global_object)));
}
// 11.4.4 Temporal.TimeZone.prototype.getOffsetNanosecondsFor ( instant ), https://tc39.es/proposal-temporal/#sec-temporal.timezone.prototype.getoffsetnanosecondsfor
JS_DEFINE_NATIVE_FUNCTION(TimeZonePrototype::get_offset_nanoseconds_for)
{
// 1. Let timeZone be the this value.
// 2. Perform ? RequireInternalSlot(timeZone, [[InitializedTemporalTimeZone]]).
auto* time_zone = TRY(typed_this_object(global_object));
// 3. Set instant to ? ToTemporalInstant(instant).
auto* instant = TRY(to_temporal_instant(global_object, vm.argument(0)));
// 4. If timeZone.[[OffsetNanoseconds]] is not undefined, return 𝔽(timeZone.[[OffsetNanoseconds]]).
if (time_zone->offset_nanoseconds().has_value())
return Value(*time_zone->offset_nanoseconds());
// 5. Return ! GetIANATimeZoneOffsetNanoseconds(instant.[[Nanoseconds]], timeZone.[[Identifier]]).
return Value((double)get_iana_time_zone_offset_nanoseconds(instant->nanoseconds(), time_zone->identifier()));
}
// 11.4.5 Temporal.TimeZone.prototype.getOffsetStringFor ( instant ), https://tc39.es/proposal-temporal/#sec-temporal.timezone.prototype.getoffsetstringfor
JS_DEFINE_NATIVE_FUNCTION(TimeZonePrototype::get_offset_string_for)
{
// 1. Let timeZone be the this value.
// 2. Perform ? RequireInternalSlot(timeZone, [[InitializedTemporalTimeZone]]).
auto* time_zone = TRY(typed_this_object(global_object));
// 3. Set instant to ? ToTemporalInstant(instant).
auto* instant = TRY(to_temporal_instant(global_object, vm.argument(0)));
// 4. Return ? BuiltinTimeZoneGetOffsetStringFor(timeZone, instant).
auto offset_string = TRY(builtin_time_zone_get_offset_string_for(global_object, time_zone, *instant));
return js_string(vm, move(offset_string));
}
// 11.4.6 Temporal.TimeZone.prototype.getPlainDateTimeFor ( instant [ , calendarLike ] ), https://tc39.es/proposal-temporal/#sec-temporal.timezone.prototype.getplaindatetimefor
JS_DEFINE_NATIVE_FUNCTION(TimeZonePrototype::get_plain_date_time_for)
{
// 1. Let timeZone be the this value.
auto time_zone = vm.this_value(global_object);
// 2. Set instant to ? ToTemporalInstant(instant).
auto* instant = TRY(to_temporal_instant(global_object, vm.argument(0)));
// 3. Let calendar be ? ToTemporalCalendarWithISODefault(calendarLike).
auto* calendar = TRY(to_temporal_calendar_with_iso_default(global_object, vm.argument(1)));
// 4. Return ? BuiltinTimeZoneGetPlainDateTimeFor(timeZone, instant, calendar).
return TRY(builtin_time_zone_get_plain_date_time_for(global_object, time_zone, *instant, *calendar));
}
// 11.4.7 Temporal.TimeZone.prototype.getInstantFor ( dateTime [ , options ] ), https://tc39.es/proposal-temporal/#sec-temporal.timezone.prototype.getinstantfor
JS_DEFINE_NATIVE_FUNCTION(TimeZonePrototype::get_instant_for)
{
// 1. Let timeZone be the this value.
// 2. Perform ? RequireInternalSlot(timeZone, [[InitializedTemporalTimeZone]]).
auto* time_zone = TRY(typed_this_object(global_object));
// 3. Set dateTime to ? ToTemporalDateTime(dateTime).
auto* date_time = TRY(to_temporal_date_time(global_object, vm.argument(0)));
// 4. Set options to ? GetOptionsObject(options).
auto* options = TRY(get_options_object(global_object, vm.argument(1)));
// 5. Let disambiguation be ? ToTemporalDisambiguation(options).
auto disambiguation = TRY(to_temporal_disambiguation(global_object, *options));
// 6. Return ? BuiltinTimeZoneGetInstantFor(timeZone, dateTime, disambiguation).
return TRY(builtin_time_zone_get_instant_for(global_object, time_zone, *date_time, disambiguation));
}
// 11.4.8 Temporal.TimeZone.prototype.getPossibleInstantsFor ( dateTime ), https://tc39.es/proposal-temporal/#sec-temporal.timezone.prototype.getpossibleinstantsfor
JS_DEFINE_NATIVE_FUNCTION(TimeZonePrototype::get_possible_instants_for)
{
// 1. Let timeZone be the this value.
// 2. Perform ? RequireInternalSlot(timeZone, [[InitializedTemporalTimezone]]).
auto* time_zone = TRY(typed_this_object(global_object));
// 3. Set dateTime to ? ToTemporalDateTime(dateTime).
auto* date_time = TRY(to_temporal_date_time(global_object, vm.argument(0)));
// 4. If timeZone.[[OffsetNanoseconds]] is not undefined, then
if (time_zone->offset_nanoseconds().has_value()) {
// a. Let epochNanoseconds be ! GetEpochFromISOParts(dateTime.[[ISOYear]], dateTime.[[ISOMonth]], dateTime.[[ISODay]], dateTime.[[ISOHour]], dateTime.[[ISOMinute]], dateTime.[[ISOSecond]], dateTime.[[ISOMillisecond]], dateTime.[[ISOMicrosecond]], dateTime.[[ISONanosecond]]).
auto* epoch_nanoseconds = get_epoch_from_iso_parts(global_object, date_time->iso_year(), date_time->iso_month(), date_time->iso_day(), date_time->iso_hour(), date_time->iso_minute(), date_time->iso_second(), date_time->iso_millisecond(), date_time->iso_microsecond(), date_time->iso_nanosecond());
// b. Let instant be ! CreateTemporalInstant(ℤ(epochNanoseconds − timeZone.[[OffsetNanoseconds]])).
auto* instant = MUST(create_temporal_instant(global_object, *js_bigint(vm, epoch_nanoseconds->big_integer().minus(Crypto::SignedBigInteger::create_from(*time_zone->offset_nanoseconds())))));
// c. Return ! CreateArrayFromList(« instant »).
return Array::create_from(global_object, { instant });
}
// 5. Let possibleEpochNanoseconds be ? GetIANATimeZoneEpochValue(timeZone.[[Identifier]], dateTime.[[ISOYear]], dateTime.[[ISOMonth]], dateTime.[[ISODay]], dateTime.[[ISOHour]], dateTime.[[ISOMinute]], dateTime.[[ISOSecond]], dateTime.[[ISOMillisecond]], dateTime.[[ISOMicrosecond]], dateTime.[[ISONanosecond]]).
auto possible_epoch_nanoseconds = get_iana_time_zone_epoch_value(global_object, time_zone->identifier(), date_time->iso_year(), date_time->iso_month(), date_time->iso_day(), date_time->iso_hour(), date_time->iso_minute(), date_time->iso_second(), date_time->iso_millisecond(), date_time->iso_microsecond(), date_time->iso_nanosecond());
// 6. Let possibleInstants be a new empty List.
auto possible_instants = MarkedValueList { vm.heap() };
// 7. For each value epochNanoseconds in possibleEpochNanoseconds, do
for (auto& epoch_nanoseconds : possible_epoch_nanoseconds) {
// a. Let instant be ! CreateTemporalInstant(epochNanoseconds).
auto* instant = MUST(create_temporal_instant(global_object, epoch_nanoseconds.as_bigint()));
// b. Append instant to possibleInstants.
possible_instants.append(instant);
}
// 8. Return ! CreateArrayFromList(possibleInstants).
return Array::create_from(global_object, possible_instants);
}
// 11.4.9 Temporal.TimeZone.prototype.getNextTransition ( startingPoint ), https://tc39.es/proposal-temporal/#sec-temporal.timezone.prototype.getnexttransition
JS_DEFINE_NATIVE_FUNCTION(TimeZonePrototype::get_next_transition)
{
// 1. Let timeZone be the this value.
// 2. Perform ? RequireInternalSlot(timeZone, [[InitializedTemporalTimeZone]]).
auto* time_zone = TRY(typed_this_object(global_object));
// 3. Set startingPoint to ? ToTemporalInstant(startingPoint).
auto* starting_point = TRY(to_temporal_instant(global_object, vm.argument(0)));
// 4. If timeZone.[[OffsetNanoseconds]] is not undefined, return null.
if (!time_zone->offset_nanoseconds().has_value())
return js_null();
// 5. Let transition be ? GetIANATimeZoneNextTransition(startingPoint.[[Nanoseconds]], timeZone.[[Identifier]]).
auto* transition = get_iana_time_zone_next_transition(global_object, starting_point->nanoseconds(), time_zone->identifier());
// 6. If transition is null, return null.
if (!transition)
return js_null();
// 7. Return ! CreateTemporalInstant(transition).
return MUST(create_temporal_instant(global_object, *transition));
}
// 11.4.10 Temporal.TimeZone.prototype.getPreviousTransition ( startingPoint ), https://tc39.es/proposal-temporal/#sec-temporal.timezone.prototype.getprevioustransition
JS_DEFINE_NATIVE_FUNCTION(TimeZonePrototype::get_previous_transition)
{
// 1. Let timeZone be the this value.
// 2. Perform ? RequireInternalSlot(timeZone, [[InitializedTemporalTimeZone]]).
auto* time_zone = TRY(typed_this_object(global_object));
// 3. Set startingPoint to ? ToTemporalInstant(startingPoint).
auto* starting_point = TRY(to_temporal_instant(global_object, vm.argument(0)));
// 4. If timeZone.[[OffsetNanoseconds]] is not undefined, return null.
if (!time_zone->offset_nanoseconds().has_value())
return js_null();
// 5. Let transition be ? GetIANATimeZonePreviousTransition(startingPoint.[[Nanoseconds]], timeZone.[[Identifier]]).
auto* transition = get_iana_time_zone_previous_transition(global_object, starting_point->nanoseconds(), time_zone->identifier());
// 6. If transition is null, return null.
if (!transition)
return js_null();
// 7. Return ! CreateTemporalInstant(transition).
return MUST(create_temporal_instant(global_object, *transition));
}
// 11.4.11 Temporal.TimeZone.prototype.toString ( ), https://tc39.es/proposal-temporal/#sec-temporal.timezone.prototype.tostring
JS_DEFINE_NATIVE_FUNCTION(TimeZonePrototype::to_string)
{
// 1. Let timeZone be the this value.
// 2. Perform ? RequireInternalSlot(timeZone, [[InitializedTemporalTimeZone]]).
auto* time_zone = TRY(typed_this_object(global_object));
// 3. Return timeZone.[[Identifier]].
return js_string(vm, time_zone->identifier());
}
// 11.4.12 Temporal.TimeZone.prototype.toJSON ( ), https://tc39.es/proposal-temporal/#sec-temporal.timezone.prototype.tojson
JS_DEFINE_NATIVE_FUNCTION(TimeZonePrototype::to_json)
{
// 1. Let timeZone be the this value.
auto time_zone = vm.this_value(global_object);
// 2. Return ? ToString(timeZone).
return js_string(vm, TRY(time_zone.to_string(global_object)));
}
}
| 4,494 |
977 | <filename>src/main/java/io/leangen/graphql/execution/complexity/ComplexityFunction.java
package io.leangen.graphql.execution.complexity;
@FunctionalInterface
public interface ComplexityFunction {
int getComplexity(ResolvedField node, int childScore);
}
| 87 |
1,104 | {
"html": "Champions Now.html",
"css": "Champions Now.css",
"authors": "<NAME>",
"roll20userid": "118980",
"preview": "preview.png",
"instructions": "Create a new character and go! STUN, KO, and END totals are automatically computed from REC.",
"legacy": true
} | 97 |
338 | <filename>resources/devices/mobile-phone/beidou.json
{
"Beidou LA-M1": {
"type": "mobile-phone",
"properties": {
"Device_Name": "<NAME> LA-M1",
"Device_Code_Name": "LA-M1",
"Device_Maker": "Beidou",
"Device_Pointing_Method": "unknown",
"Device_Brand_Name": "Beidou"
},
"standard": false
}
}
| 158 |
323 | #ifndef _ROS_SERVICE_SolvePositionIK_h
#define _ROS_SERVICE_SolvePositionIK_h
#include <stdint.h>
#include <string.h>
#include <stdlib.h>
#include "ros/msg.h"
#include "geometry_msgs/PoseStamped.h"
#include "sensor_msgs/JointState.h"
namespace baxter_core_msgs
{
static const char SOLVEPOSITIONIK[] = "baxter_core_msgs/SolvePositionIK";
class SolvePositionIKRequest : public ros::Msg
{
public:
uint32_t pose_stamp_length;
typedef geometry_msgs::PoseStamped _pose_stamp_type;
_pose_stamp_type st_pose_stamp;
_pose_stamp_type * pose_stamp;
uint32_t seed_angles_length;
typedef sensor_msgs::JointState _seed_angles_type;
_seed_angles_type st_seed_angles;
_seed_angles_type * seed_angles;
typedef uint8_t _seed_mode_type;
_seed_mode_type seed_mode;
enum { SEED_AUTO = 0 };
enum { SEED_USER = 1 };
enum { SEED_CURRENT = 2 };
enum { SEED_NS_MAP = 3 };
SolvePositionIKRequest():
pose_stamp_length(0), pose_stamp(NULL),
seed_angles_length(0), seed_angles(NULL),
seed_mode(0)
{
}
virtual int serialize(unsigned char *outbuffer) const
{
int offset = 0;
*(outbuffer + offset + 0) = (this->pose_stamp_length >> (8 * 0)) & 0xFF;
*(outbuffer + offset + 1) = (this->pose_stamp_length >> (8 * 1)) & 0xFF;
*(outbuffer + offset + 2) = (this->pose_stamp_length >> (8 * 2)) & 0xFF;
*(outbuffer + offset + 3) = (this->pose_stamp_length >> (8 * 3)) & 0xFF;
offset += sizeof(this->pose_stamp_length);
for( uint32_t i = 0; i < pose_stamp_length; i++){
offset += this->pose_stamp[i].serialize(outbuffer + offset);
}
*(outbuffer + offset + 0) = (this->seed_angles_length >> (8 * 0)) & 0xFF;
*(outbuffer + offset + 1) = (this->seed_angles_length >> (8 * 1)) & 0xFF;
*(outbuffer + offset + 2) = (this->seed_angles_length >> (8 * 2)) & 0xFF;
*(outbuffer + offset + 3) = (this->seed_angles_length >> (8 * 3)) & 0xFF;
offset += sizeof(this->seed_angles_length);
for( uint32_t i = 0; i < seed_angles_length; i++){
offset += this->seed_angles[i].serialize(outbuffer + offset);
}
*(outbuffer + offset + 0) = (this->seed_mode >> (8 * 0)) & 0xFF;
offset += sizeof(this->seed_mode);
return offset;
}
virtual int deserialize(unsigned char *inbuffer)
{
int offset = 0;
uint32_t pose_stamp_lengthT = ((uint32_t) (*(inbuffer + offset)));
pose_stamp_lengthT |= ((uint32_t) (*(inbuffer + offset + 1))) << (8 * 1);
pose_stamp_lengthT |= ((uint32_t) (*(inbuffer + offset + 2))) << (8 * 2);
pose_stamp_lengthT |= ((uint32_t) (*(inbuffer + offset + 3))) << (8 * 3);
offset += sizeof(this->pose_stamp_length);
if(pose_stamp_lengthT > pose_stamp_length)
this->pose_stamp = (geometry_msgs::PoseStamped*)realloc(this->pose_stamp, pose_stamp_lengthT * sizeof(geometry_msgs::PoseStamped));
pose_stamp_length = pose_stamp_lengthT;
for( uint32_t i = 0; i < pose_stamp_length; i++){
offset += this->st_pose_stamp.deserialize(inbuffer + offset);
memcpy( &(this->pose_stamp[i]), &(this->st_pose_stamp), sizeof(geometry_msgs::PoseStamped));
}
uint32_t seed_angles_lengthT = ((uint32_t) (*(inbuffer + offset)));
seed_angles_lengthT |= ((uint32_t) (*(inbuffer + offset + 1))) << (8 * 1);
seed_angles_lengthT |= ((uint32_t) (*(inbuffer + offset + 2))) << (8 * 2);
seed_angles_lengthT |= ((uint32_t) (*(inbuffer + offset + 3))) << (8 * 3);
offset += sizeof(this->seed_angles_length);
if(seed_angles_lengthT > seed_angles_length)
this->seed_angles = (sensor_msgs::JointState*)realloc(this->seed_angles, seed_angles_lengthT * sizeof(sensor_msgs::JointState));
seed_angles_length = seed_angles_lengthT;
for( uint32_t i = 0; i < seed_angles_length; i++){
offset += this->st_seed_angles.deserialize(inbuffer + offset);
memcpy( &(this->seed_angles[i]), &(this->st_seed_angles), sizeof(sensor_msgs::JointState));
}
this->seed_mode = ((uint8_t) (*(inbuffer + offset)));
offset += sizeof(this->seed_mode);
return offset;
}
const char * getType(){ return SOLVEPOSITIONIK; };
const char * getMD5(){ return "2587e42983d0081d0a2288230991073b"; };
};
class SolvePositionIKResponse : public ros::Msg
{
public:
uint32_t joints_length;
typedef sensor_msgs::JointState _joints_type;
_joints_type st_joints;
_joints_type * joints;
uint32_t isValid_length;
typedef bool _isValid_type;
_isValid_type st_isValid;
_isValid_type * isValid;
uint32_t result_type_length;
typedef uint8_t _result_type_type;
_result_type_type st_result_type;
_result_type_type * result_type;
enum { RESULT_INVALID = 0 };
SolvePositionIKResponse():
joints_length(0), joints(NULL),
isValid_length(0), isValid(NULL),
result_type_length(0), result_type(NULL)
{
}
virtual int serialize(unsigned char *outbuffer) const
{
int offset = 0;
*(outbuffer + offset + 0) = (this->joints_length >> (8 * 0)) & 0xFF;
*(outbuffer + offset + 1) = (this->joints_length >> (8 * 1)) & 0xFF;
*(outbuffer + offset + 2) = (this->joints_length >> (8 * 2)) & 0xFF;
*(outbuffer + offset + 3) = (this->joints_length >> (8 * 3)) & 0xFF;
offset += sizeof(this->joints_length);
for( uint32_t i = 0; i < joints_length; i++){
offset += this->joints[i].serialize(outbuffer + offset);
}
*(outbuffer + offset + 0) = (this->isValid_length >> (8 * 0)) & 0xFF;
*(outbuffer + offset + 1) = (this->isValid_length >> (8 * 1)) & 0xFF;
*(outbuffer + offset + 2) = (this->isValid_length >> (8 * 2)) & 0xFF;
*(outbuffer + offset + 3) = (this->isValid_length >> (8 * 3)) & 0xFF;
offset += sizeof(this->isValid_length);
for( uint32_t i = 0; i < isValid_length; i++){
union {
bool real;
uint8_t base;
} u_isValidi;
u_isValidi.real = this->isValid[i];
*(outbuffer + offset + 0) = (u_isValidi.base >> (8 * 0)) & 0xFF;
offset += sizeof(this->isValid[i]);
}
*(outbuffer + offset + 0) = (this->result_type_length >> (8 * 0)) & 0xFF;
*(outbuffer + offset + 1) = (this->result_type_length >> (8 * 1)) & 0xFF;
*(outbuffer + offset + 2) = (this->result_type_length >> (8 * 2)) & 0xFF;
*(outbuffer + offset + 3) = (this->result_type_length >> (8 * 3)) & 0xFF;
offset += sizeof(this->result_type_length);
for( uint32_t i = 0; i < result_type_length; i++){
*(outbuffer + offset + 0) = (this->result_type[i] >> (8 * 0)) & 0xFF;
offset += sizeof(this->result_type[i]);
}
return offset;
}
virtual int deserialize(unsigned char *inbuffer)
{
int offset = 0;
uint32_t joints_lengthT = ((uint32_t) (*(inbuffer + offset)));
joints_lengthT |= ((uint32_t) (*(inbuffer + offset + 1))) << (8 * 1);
joints_lengthT |= ((uint32_t) (*(inbuffer + offset + 2))) << (8 * 2);
joints_lengthT |= ((uint32_t) (*(inbuffer + offset + 3))) << (8 * 3);
offset += sizeof(this->joints_length);
if(joints_lengthT > joints_length)
this->joints = (sensor_msgs::JointState*)realloc(this->joints, joints_lengthT * sizeof(sensor_msgs::JointState));
joints_length = joints_lengthT;
for( uint32_t i = 0; i < joints_length; i++){
offset += this->st_joints.deserialize(inbuffer + offset);
memcpy( &(this->joints[i]), &(this->st_joints), sizeof(sensor_msgs::JointState));
}
uint32_t isValid_lengthT = ((uint32_t) (*(inbuffer + offset)));
isValid_lengthT |= ((uint32_t) (*(inbuffer + offset + 1))) << (8 * 1);
isValid_lengthT |= ((uint32_t) (*(inbuffer + offset + 2))) << (8 * 2);
isValid_lengthT |= ((uint32_t) (*(inbuffer + offset + 3))) << (8 * 3);
offset += sizeof(this->isValid_length);
if(isValid_lengthT > isValid_length)
this->isValid = (bool*)realloc(this->isValid, isValid_lengthT * sizeof(bool));
isValid_length = isValid_lengthT;
for( uint32_t i = 0; i < isValid_length; i++){
union {
bool real;
uint8_t base;
} u_st_isValid;
u_st_isValid.base = 0;
u_st_isValid.base |= ((uint8_t) (*(inbuffer + offset + 0))) << (8 * 0);
this->st_isValid = u_st_isValid.real;
offset += sizeof(this->st_isValid);
memcpy( &(this->isValid[i]), &(this->st_isValid), sizeof(bool));
}
uint32_t result_type_lengthT = ((uint32_t) (*(inbuffer + offset)));
result_type_lengthT |= ((uint32_t) (*(inbuffer + offset + 1))) << (8 * 1);
result_type_lengthT |= ((uint32_t) (*(inbuffer + offset + 2))) << (8 * 2);
result_type_lengthT |= ((uint32_t) (*(inbuffer + offset + 3))) << (8 * 3);
offset += sizeof(this->result_type_length);
if(result_type_lengthT > result_type_length)
this->result_type = (uint8_t*)realloc(this->result_type, result_type_lengthT * sizeof(uint8_t));
result_type_length = result_type_lengthT;
for( uint32_t i = 0; i < result_type_length; i++){
this->st_result_type = ((uint8_t) (*(inbuffer + offset)));
offset += sizeof(this->st_result_type);
memcpy( &(this->result_type[i]), &(this->st_result_type), sizeof(uint8_t));
}
return offset;
}
const char * getType(){ return SOLVEPOSITIONIK; };
const char * getMD5(){ return "d9b0c2b3932e08421f5094cf62743b9f"; };
};
class SolvePositionIK {
public:
typedef SolvePositionIKRequest Request;
typedef SolvePositionIKResponse Response;
};
}
#endif
| 4,305 |
554 | import argparse
import json
import os
from datetime import datetime
from pathlib import Path
from matplotlib import pyplot as plt
from zoo.evaluation.metrics.utils import map_agent_to_json_file
def evaluation_data_visualize(scenario_path, result_path, agent_list, agent_groups):
# todo: get json file in scenario_path
scenario_name = os.path.basename(scenario_path)
json_data = []
all_json_file = list(Path(scenario_path).glob("**/*json"))
json_file_dict = map_agent_to_json_file(agent_list, all_json_file)
for actor_name, agent_names in agent_groups.items():
for agent_name in agent_names:
assert (
agent_name in json_file_dict
), f"{agent_name} has no entry in {json_file_dict}"
with json_file_dict[agent_name].open() as f:
json_result = json.load(f)
json_data.append(json_result)
agent_num = len(agent_names)
fig, ax = plt.subplots(nrows=agent_num, ncols=2, figsize=(14, 14))
fig.subplots_adjust(hspace=0.8)
axes = ax.flatten()
names = {}
fig.suptitle(scenario_name, fontsize=15)
for i in range(agent_num):
names["ax_path" + str(i)], names["ax_speed" + str(i)] = (
axes[2 * i + 0],
axes[2 * i + 1],
)
plt.subplots_adjust(hspace=0.3)
names["ax_path" + str(i)].tick_params(direction="in", top=True, right=True)
agent_name = agent_names[i]
names["ax_path" + str(i)].set_title(
"Path_Curve-%s" % agent_name, fontsize=10
)
names["ax_path" + str(i)].set_xlabel("x/m")
names["ax_path" + str(i)].set_ylabel("y/m")
names["ax_speed" + str(i)].set_title(
"Speed_Curve-%s" % agent_name, fontsize=10
)
names["ax_speed" + str(i)].set_xlabel("time/s")
names["ax_speed" + str(i)].set_ylabel("speed/m/s")
agent1_run_time = json_data[i]["agent"]["time_list"]
agent1_run_speed = [
j - j % 0.1 for j in json_data[i]["agent"]["speed_list"]
]
agent1_pos_x = [j[0] for j in json_data[i]["agent"]["cartesian_pos_list"]]
agent1_pos_y = [j[1] for j in json_data[i]["agent"]["cartesian_pos_list"]]
npc1_run_time, npc1_run_speed, npc1_pos_x, npc1_pos_y = [], [], [], []
if json_data[i]["npc"]:
npc_data = json_data[i]["npc"][0]
npc1_run_time = npc_data["time_list"]
npc1_run_speed = [j - j % 0.1 for j in npc_data["speed_list"]]
npc1_pos_x = [j[0] for j in npc_data["cartesian_pos_list"]]
npc1_pos_y = [j[1] for j in npc_data["cartesian_pos_list"]]
if npc1_run_time:
valmax_pos_x = max(max(agent1_pos_x), max(npc1_pos_x))
valmax_pos_y = max(max(agent1_pos_y), max(npc1_pos_y))
valmin_pos_x = min(min(agent1_pos_x), min(npc1_pos_x))
valmin_pos_y = min(min(agent1_pos_y), min(npc1_pos_y))
valgap_pos_x = valmax_pos_x - valmin_pos_x
valgap_pos_y = valmax_pos_y - valmin_pos_y
valmax_speed = max(max(agent1_run_speed), max(npc1_run_speed))
valmax_time = max(max(agent1_run_time), max(npc1_run_time))
valmin_speed = min(min(agent1_run_speed), min(npc1_run_speed))
valmin_time = min(min(agent1_run_time), min(npc1_run_time))
valgap_speed = valmax_speed - valmin_speed
valgap_time = valmax_time - valmin_time
else:
valmax_pos_x = max(agent1_pos_x)
valmax_pos_y = max(agent1_pos_y)
valmin_pos_x = min(agent1_pos_x)
valmin_pos_y = min(agent1_pos_y)
valgap_pos_x = valmax_pos_x - valmin_pos_x
valgap_pos_y = valmax_pos_y - valmin_pos_y
valmax_speed = max(agent1_run_speed)
valmax_time = max(agent1_run_time)
valmin_speed = min(agent1_run_speed)
valmin_time = min(agent1_run_time)
valgap_speed = valmax_speed - valmin_speed
valgap_time = valmax_time - valmin_time
if valgap_pos_y:
names["ax_path" + str(i)].set_ylim(
[
(valmin_pos_y - 0.2 * valgap_pos_y),
(valmax_pos_y + 0.25 * valgap_pos_y),
]
)
if valgap_speed:
names["ax_speed" + str(i)].set_ylim(
[
(valmin_speed - 0.2 * valgap_speed),
(valmax_speed + 0.25 * valgap_speed),
]
)
names["ax_path" + str(i)].set_xlim(
[
(valmin_pos_x - 0.15 * valgap_pos_x),
(valmax_pos_x + 0.15 * valgap_pos_x),
]
)
names["ax_speed" + str(i)].set_xlim(
[(valmin_time - 0.15 * valgap_time), (valmax_time + 0.15 * valgap_time)]
)
names["ax_path" + str(i)].plot(
agent1_pos_x, agent1_pos_y, ":g", label="ego"
)
names["ax_speed" + str(i)].plot(
agent1_run_time, agent1_run_speed, ":g", label="ego"
)
names["ax_path" + str(i)].legend(loc=0, ncol=2)
names["ax_speed" + str(i)].legend(loc=0, ncol=2)
vector_x1 = (
agent1_pos_x[int(len(agent1_pos_x) / 2) + 1]
- agent1_pos_x[int(len(agent1_pos_x) / 2)]
)
vector_y1 = (
agent1_pos_y[int(len(agent1_pos_y) / 2) + 1]
- agent1_pos_y[int(len(agent1_pos_y) / 2)]
)
if valgap_pos_y:
names["ax_path" + str(i)].quiver(
agent1_pos_x[int(len(agent1_pos_x) / 2)],
agent1_pos_y[int(len(agent1_pos_y) / 2)],
vector_x1 / (valgap_pos_x / valgap_pos_y),
vector_y1,
width=0.005,
headwidth=5,
color="g",
)
else:
names["ax_path" + str(i)].quiver(
agent1_pos_x[int(len(agent1_pos_x) / 2)],
agent1_pos_y[int(len(agent1_pos_y) / 2)],
vector_x1,
vector_y1,
width=0.005,
headwidth=5,
color="g",
)
names["ax_path" + str(i)].text(
agent1_pos_x[0],
agent1_pos_y[0],
"time:" + str(agent1_run_time[0]),
fontsize=6,
color="g",
style="normal",
)
names["ax_path" + str(i)].text(
agent1_pos_x[int(len(agent1_pos_x) / 2)],
agent1_pos_y[int(len(agent1_pos_y) / 2)],
"time:" + str(agent1_run_time[int(len(agent1_run_time) / 2)]),
fontsize=6,
color="g",
style="normal",
)
names["ax_path" + str(i)].text(
agent1_pos_x[-1],
agent1_pos_y[-1],
"time:" + str(agent1_run_time[-1]),
fontsize=6,
color="g",
style="normal",
)
if npc1_run_time:
names["ax_path" + str(i)].plot(
npc1_pos_x, npc1_pos_y, ":r", label="npc"
)
names["ax_speed" + str(i)].plot(
npc1_run_time, npc1_run_speed, ":r", label="npc"
)
names["ax_path" + str(i)].legend(loc=0, ncol=2)
names["ax_speed" + str(i)].legend(loc=0, ncol=2)
names["ax_path" + str(i)].text(
npc1_pos_x[0],
npc1_pos_y[0],
"time:" + str(npc1_run_time[0]),
fontsize=6,
color="r",
style="normal",
)
names["ax_path" + str(i)].text(
npc1_pos_x[int(len(npc1_pos_x) / 2)],
npc1_pos_y[int(len(npc1_pos_y) / 2)],
"time:" + str(npc1_run_time[int(len(npc1_run_time) / 2)]),
fontsize=6,
color="r",
style="normal",
)
names["ax_path" + str(i)].text(
npc1_pos_x[-1],
npc1_pos_y[-1],
"time:" + str(npc1_run_time[-1]),
fontsize=6,
color="r",
style="normal",
)
vector_x2 = (
npc1_pos_x[int(len(npc1_pos_x) / 2) + 1]
- npc1_pos_x[int(len(npc1_pos_x) / 2)]
)
vector_y2 = (
npc1_pos_y[int(len(npc1_pos_y) / 2) + 1]
- npc1_pos_y[int(len(npc1_pos_y) / 2)]
)
if valgap_pos_y:
names["ax_path" + str(i)].quiver(
npc1_pos_x[int(len(npc1_pos_x) / 2)],
npc1_pos_y[int(len(npc1_pos_y) / 2)],
vector_x2 / (valgap_pos_x / valgap_pos_y),
vector_y2,
width=0.005,
headwidth=5,
color="r",
)
else:
names["ax_path" + str(i)].quiver(
npc1_pos_x[int(len(npc1_pos_x) / 2)],
npc1_pos_y[int(len(npc1_pos_y) / 2)],
vector_x2,
vector_y2,
width=0.005,
headwidth=5,
color="r",
)
time_suffix = datetime.now().strftime("%Y%m%d-%H%M")
scenario_name_path = os.path.join(result_path + "/" + scenario_name)
if not os.path.exists(scenario_name_path):
os.mkdir(scenario_name_path)
result_json_path = os.path.join(scenario_name_path + "/" + actor_name)
os.mkdir(os.path.join(scenario_name_path + "/" + actor_name))
result_file = os.path.join(
result_json_path,
"evaluation-curve_%s_%s.png" % (scenario_name, time_suffix),
)
plt.savefig(result_file)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
prog="visualization",
description="Start run visualization.",
)
parser.add_argument(
"log_path", help="The path to the run all evaluation origin data", type=str
)
parser.add_argument(
"result_path", help="The path to the run all evaluation results", type=str
)
parser.add_argument("agent_list", help="All agent name list", type=str)
parser.add_argument("agent_groups", help="Agent groups", type=str)
args = parser.parse_args()
evaluation_data_visualize(
args.log_path, args.result_path, eval(args.agent_list), eval(args.agent_groups)
)
| 6,972 |
1,144 | <filename>backend/de.metas.adempiere.adempiere/base/src/main/java/org/adempiere/util/trxConstraints/api/ITrxConstraints.java<gh_stars>1000+
package org.adempiere.util.trxConstraints.api;
/*
* #%L
* de.metas.adempiere.adempiere.base
* %%
* Copyright (C) 2015 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
import java.util.Set;
/**
* Transaction constraints can be defined on a per-thread basis and are enforced by the system. A user can obtain her
* constraints instance by calling {@link org.compiere.util.DB#getConstraints()} and can then customize that instance
* for the particular thread's needs.
* <p>
* <b>Motivation:</b>
* <p>
* Within ADempiere, we should have a way to enforce trx-Constraints to guard against misbehaving code which might
* otherwise affect the overall runtime stability of the system.<br>
* <p>
* Examples:
* <ul>
* <li>When implementing a new process, we want to make sure that there is no unexpected stuff taking place outside of
* the "main" transaction (e.g. in custom model validators).
* <li>When working on complex issues (e.g. GUIs with a lot of callouts), we want to make sure that no open transactions
* are left behind to block further database access
* </ul>
* <p>
* Therefore we need to specify (e.g.) that there may only be a limited number of transactions opened from the current
* thread at the same time. Or a limited number of save points per transaction. Or, that all transactions opened by a
* given thread have to be finished (committed or rolled back) within a given timeout.
*
* @see org.compiere.util.DB#getConstraints()
*/
public interface ITrxConstraints
{
/**
* Activates or deactivates this constraints. Deactivated constrains will be ignored.
*
*/
ITrxConstraints setActive(boolean active);
/**
*
* @see #setActive(boolean)
*/
boolean isActive();
/**
* Sets if the constraints will allow new transactions only if their trxName starts with certain prefixes.
*
* If this is set to <code>true</code>, the user needs to specify those prefixes using
* {@link #addAllowedTrxNamePrefix(String)}.
*
*/
ITrxConstraints setOnlyAllowedTrxNamePrefixes(boolean onlyAllowedTrxNamePrefixes);
/**
*
* @see #setOnlyAllowedTrxNamePrefixes(boolean)
*/
boolean isOnlyAllowedTrxNamePrefixes();
/**
* Adds another trxName prefix to the list of allowed prefixes
*
* Note:
* <ul>
* <li>The given prefex doesn't have to be a real prefix. I.e. it may also be a complete trxName.
* <li> <code>null</code> is also a legal parameter. However, a null> prefix will only math a null trxName.
* </ul>
*
* @see #setOnlyAllowedTrxNamePrefixes(boolean)
*/
ITrxConstraints addAllowedTrxNamePrefix(String trxNamePrefix);
/**
* Removes the given string from the list of allowed trxName prefixes.
*
*
* @param trxNamePrefix
* @return this
* @see #setOnlyAllowedTrxNamePrefixes(boolean)
*/
ITrxConstraints removeAllowedTrxNamePrefix(String trxNamePrefix);
/**
*
* @see #setOnlyAllowedTrxNamePrefixes(boolean)
*/
Set<String> getAllowedTrxNamePrefixes();
/**
* Every new transaction must commit, close or rollback within the given number of seconds. If the timeout is
* exceeded, the trx is closed by force and a diagnostic message is written to the log. The message contains both
* the stacktrace as it was when the transaction was created and the <i>current</i> stacktrace of the thread that
* created the transaction. The latter might help to find out if the thread is still working on something reasonable
* (and therefore the timeout was too short) or if the thread just forgot to close the transaction.
*
* Note a value <=0 means "no timeout"
*
* @param secs
* @param logOnly
* if <code>true</code>, then the transactions are not closed, but only the diagnostic message is logged
* and the timer is started again.
*/
ITrxConstraints setTrxTimeoutSecs(int secs, boolean logOnly);
/**
*
* @see #setTrxTimeoutSecs(int, boolean)
*/
int getTrxTimeoutSecs();
/**
*
* @see #setTrxTimeoutSecs(int, boolean)
*/
boolean isTrxTimeoutLogOnly();
/**
* Sets the maximum number of concurrent transactions that a thread may open
*
*/
public ITrxConstraints setMaxTrx(int max);
/**
* Increased the maximum number of concurrent transactions that a thread may open by the given number
*
* @param i
* @return
*/
public ITrxConstraints incMaxTrx(int num);
/**
*
* @see #setMaxTrx(int)
*/
public int getMaxTrx();
/**
*
* @see #setMaxSavepoints(int)
*/
public int getMaxSavepoints();
/**
* Sets how many savepoints a transaction may set before it has to release one.
*
*/
public ITrxConstraints setMaxSavepoints(int maxSavePoints);
/**
*
* @see #setAllowTrxAfterThreadEnd(boolean)
*/
boolean isAllowTrxAfterThreadEnd();
/**
* Sets whether the current thread is allowed to end without having closed all transactions that it previously
* opened. If false (which is the default) and the thread ends without having closed all transactions opened, those
* transactions are closed by force and an error message is logged.
*/
ITrxConstraints setAllowTrxAfterThreadEnd(boolean allow);
/**
* Resets this instance back to its default values.
*/
void reset();
}
| 1,860 |
3,073 | <gh_stars>1000+
#include "all.h"
#include "vere/vere.h"
#include "ur/ur.h"
/* _setup(): prepare for tests.
*/
static void
_setup(void)
{
u3m_init();
u3m_pave(c3y);
}
/* _ames_writ_ex(): |hi packet from fake ~zod to fake ~nec
*/
static u3_noun
_ames_writ_ex(void)
{
c3_y bod_y[63] = {
0x30, 0x90, 0x2d, 0x0, 0x0, 0x0, 0x1, 0x0, 0x9, 0xc0, 0xd0,
0x0, 0x4, 0x40, 0x30, 0xf4, 0xa, 0x3d, 0x45, 0x86, 0x66, 0x2c,
0x2, 0x38, 0xf8, 0x72, 0xa3, 0x9, 0xf6, 0x6, 0xf3, 0x0, 0xbe,
0x67, 0x61, 0x49, 0x50, 0x4, 0x3c, 0x13, 0xb2, 0x96, 0x42, 0x1b,
0x62, 0xac, 0x97, 0xff, 0x24, 0xeb, 0x69, 0x1b, 0xb2, 0x60, 0x72,
0xa, 0x53, 0xdf, 0xe8, 0x8a, 0x9c, 0x6f, 0xb3
};
u3_noun lan = u3nc(0, 1);
u3_noun cad = u3nt(c3__send, lan, u3i_bytes(sizeof(bod_y), bod_y));
u3_noun wir = u3nt(c3__newt, 0x1234, u3_nul);
u3_noun ovo = u3nc(u3nc(u3_blip, wir), cad);
u3_noun wen;
{
struct timeval tim_u;
gettimeofday(&tim_u, 0);
wen = u3_time_in_tv(&tim_u);
}
return u3nt(c3__work, 0, u3nc(wen, ovo));
}
static void
_jam_bench(void)
{
struct timeval b4, f2, d0;
c3_w mil_w, i_w, max_w = 10000;
u3_noun wit = _ames_writ_ex();
fprintf(stderr, "\r\njam microbenchmark:\r\n");
{
gettimeofday(&b4, 0);
{
u3i_slab sab_u;
for ( i_w = 0; i_w < max_w; i_w++ ) {
u3s_jam_fib(&sab_u, wit);
u3i_slab_free(&sab_u);
}
}
gettimeofday(&f2, 0);
timersub(&f2, &b4, &d0);
mil_w = (d0.tv_sec * 1000) + (d0.tv_usec / 1000);
fprintf(stderr, " jam og: %u ms\r\n", mil_w);
}
{
gettimeofday(&b4, 0);
{
c3_d len_d;
c3_y* byt_y;
for ( i_w = 0; i_w < max_w; i_w++ ) {
u3s_jam_xeno(wit, &len_d, &byt_y);
c3_free(byt_y);
}
}
gettimeofday(&f2, 0);
timersub(&f2, &b4, &d0);
mil_w = (d0.tv_sec * 1000) + (d0.tv_usec / 1000);
fprintf(stderr, " jam xeno: %u ms\r\n", mil_w);
}
while ( 1 ) {
ur_root_t* rot_u = ur_root_init();
c3_d len_d;
c3_y* byt_y;
ur_nref ref;
u3s_jam_xeno(wit, &len_d, &byt_y);
if ( ur_cue_good != ur_cue(rot_u, len_d, byt_y, &ref) ) {
fprintf(stderr, " jam bench: cue failed wtf\r\n");
break;
}
c3_free(byt_y);
{
gettimeofday(&b4, 0);
for ( i_w = 0; i_w < max_w; i_w++ ) {
ur_jam(rot_u, ref, &len_d, &byt_y);
c3_free(byt_y);
}
gettimeofday(&f2, 0);
timersub(&f2, &b4, &d0);
mil_w = (d0.tv_sec * 1000) + (d0.tv_usec / 1000);
fprintf(stderr, " jam cons: %u ms\r\n", mil_w);
}
{
gettimeofday(&b4, 0);
{
ur_jam_t *jam_u = ur_jam_init(rot_u);
c3_d len_d;
c3_y* byt_y;
for ( i_w = 0; i_w < max_w; i_w++ ) {
ur_jam_with(jam_u, ref, &len_d, &byt_y);
c3_free(byt_y);
}
ur_jam_done(jam_u);
}
gettimeofday(&f2, 0);
timersub(&f2, &b4, &d0);
mil_w = (d0.tv_sec * 1000) + (d0.tv_usec / 1000);
fprintf(stderr, " jam cons with: %u ms\r\n", mil_w);
}
ur_root_free(rot_u);
break;
}
u3z(wit);
}
static void
_cue_bench(void)
{
struct timeval b4, f2, d0;
c3_w mil_w, i_w, max_w = 20000;
u3_atom vat = u3ke_jam(_ames_writ_ex());
fprintf(stderr, "\r\ncue microbenchmark:\r\n");
{
gettimeofday(&b4, 0);
for ( i_w = 0; i_w < max_w; i_w++ ) {
u3z(u3s_cue(vat));
}
gettimeofday(&f2, 0);
timersub(&f2, &b4, &d0);
mil_w = (d0.tv_sec * 1000) + (d0.tv_usec / 1000);
fprintf(stderr, " cue og: %u ms\r\n", mil_w);
}
{
gettimeofday(&b4, 0);
for ( i_w = 0; i_w < max_w; i_w++ ) {
u3z(u3s_cue_atom(vat));
}
gettimeofday(&f2, 0);
timersub(&f2, &b4, &d0);
mil_w = (d0.tv_sec * 1000) + (d0.tv_usec / 1000);
fprintf(stderr, " cue atom: %u ms\r\n", mil_w);
}
{
gettimeofday(&b4, 0);
{
c3_w len_w = u3r_met(3, vat);
// XX assumes little-endian
//
c3_y* byt_y = ( c3y == u3a_is_cat(vat) )
? (c3_y*)&vat
: (c3_y*)((u3a_atom*)u3a_to_ptr(vat))->buf_w;
for ( i_w = 0; i_w < max_w; i_w++ ) {
u3z(u3s_cue_xeno(len_w, byt_y));
}
}
gettimeofday(&f2, 0);
timersub(&f2, &b4, &d0);
mil_w = (d0.tv_sec * 1000) + (d0.tv_usec / 1000);
fprintf(stderr, " cue xeno: %u ms\r\n", mil_w);
}
{
gettimeofday(&b4, 0);
{
u3_cue_xeno* sil_u = u3s_cue_xeno_init();
c3_w len_w = u3r_met(3, vat);
// XX assumes little-endian
//
c3_y* byt_y = ( c3y == u3a_is_cat(vat) )
? (c3_y*)&vat
: (c3_y*)((u3a_atom*)u3a_to_ptr(vat))->buf_w;
for ( i_w = 0; i_w < max_w; i_w++ ) {
u3z(u3s_cue_xeno_with(sil_u, len_w, byt_y));
}
u3s_cue_xeno_done(sil_u);
}
gettimeofday(&f2, 0);
timersub(&f2, &b4, &d0);
mil_w = (d0.tv_sec * 1000) + (d0.tv_usec / 1000);
fprintf(stderr, " cue xeno with: %u ms\r\n", mil_w);
}
{
gettimeofday(&b4, 0);
{
c3_w len_w = u3r_met(3, vat);
// XX assumes little-endian
//
c3_y* byt_y = ( c3y == u3a_is_cat(vat) )
? (c3_y*)&vat
: (c3_y*)((u3a_atom*)u3a_to_ptr(vat))->buf_w;
for ( i_w = 0; i_w < max_w; i_w++ ) {
ur_cue_test(len_w, byt_y);
}
}
gettimeofday(&f2, 0);
timersub(&f2, &b4, &d0);
mil_w = (d0.tv_sec * 1000) + (d0.tv_usec / 1000);
fprintf(stderr, " cue test: %u ms\r\n", mil_w);
}
{
gettimeofday(&b4, 0);
{
ur_cue_test_t *t = ur_cue_test_init();
c3_w len_w = u3r_met(3, vat);
// XX assumes little-endian
//
c3_y* byt_y = ( c3y == u3a_is_cat(vat) )
? (c3_y*)&vat
: (c3_y*)((u3a_atom*)u3a_to_ptr(vat))->buf_w;
for ( i_w = 0; i_w < max_w; i_w++ ) {
ur_cue_test_with(t, len_w, byt_y);
}
ur_cue_test_done(t);
}
gettimeofday(&f2, 0);
timersub(&f2, &b4, &d0);
mil_w = (d0.tv_sec * 1000) + (d0.tv_usec / 1000);
fprintf(stderr, " cue test with: %u ms\r\n", mil_w);
}
{
gettimeofday(&b4, 0);
{
ur_root_t* rot_u = ur_root_init();
ur_nref ref;
c3_w len_w = u3r_met(3, vat);
// XX assumes little-endian
//
c3_y* byt_y = ( c3y == u3a_is_cat(vat) )
? (c3_y*)&vat
: (c3_y*)((u3a_atom*)u3a_to_ptr(vat))->buf_w;
for ( i_w = 0; i_w < max_w; i_w++ ) {
ur_cue(rot_u, len_w, byt_y, &ref);
}
ur_root_free(rot_u);
}
gettimeofday(&f2, 0);
timersub(&f2, &b4, &d0);
mil_w = (d0.tv_sec * 1000) + (d0.tv_usec / 1000);
fprintf(stderr, " cue cons: %u ms\r\n", mil_w);
}
{
gettimeofday(&b4, 0);
{
ur_root_t* rot_u;
ur_nref ref;
c3_w len_w = u3r_met(3, vat);
// XX assumes little-endian
//
c3_y* byt_y = ( c3y == u3a_is_cat(vat) )
? (c3_y*)&vat
: (c3_y*)((u3a_atom*)u3a_to_ptr(vat))->buf_w;
for ( i_w = 0; i_w < max_w; i_w++ ) {
rot_u = ur_root_init();
ur_cue(rot_u, len_w, byt_y, &ref);
ur_root_free(rot_u);
}
}
gettimeofday(&f2, 0);
timersub(&f2, &b4, &d0);
mil_w = (d0.tv_sec * 1000) + (d0.tv_usec / 1000);
fprintf(stderr, " cue re-cons: %u ms\r\n", mil_w);
}
u3z(vat);
}
static u3_noun
_cue_loop(u3_atom a)
{
c3_w i_w, max_w = 20000;
for ( i_w = 0; i_w < max_w; i_w++ ) {
u3z(u3s_cue(a));
}
return u3_blip;
}
static u3_noun
_cue_atom_loop(u3_atom a)
{
c3_w i_w, max_w = 20000;
for ( i_w = 0; i_w < max_w; i_w++ ) {
u3z(u3s_cue_atom(a));
}
return u3_blip;
}
static void
_cue_soft_bench(void)
{
struct timeval b4, f2, d0;
u3_atom vat = u3ke_jam(_ames_writ_ex());
c3_w mil_w;
fprintf(stderr, "\r\ncue virtual microbenchmark:\r\n");
{
gettimeofday(&b4, 0);
u3z(u3m_soft(0, _cue_loop, u3k(vat)));
gettimeofday(&f2, 0);
timersub(&f2, &b4, &d0);
mil_w = (d0.tv_sec * 1000) + (d0.tv_usec / 1000);
fprintf(stderr, " cue virtual og: %u ms\r\n", mil_w);
}
{
gettimeofday(&b4, 0);
u3z(u3m_soft(0, _cue_atom_loop, u3k(vat)));
gettimeofday(&f2, 0);
timersub(&f2, &b4, &d0);
mil_w = (d0.tv_sec * 1000) + (d0.tv_usec / 1000);
fprintf(stderr, " cue virtual atom: %u ms\r\n", mil_w);
}
u3z(vat);
}
/* main(): run all benchmarks
*/
int
main(int argc, char* argv[])
{
_setup();
_jam_bench();
_cue_bench();
_cue_soft_bench();
// GC
//
u3m_grab(u3_none);
return 0;
}
| 5,269 |
647 | <gh_stars>100-1000
#ifndef LOG_FORMATTER
#define LOG_FORMATTER
#include <stdio.h>
#include <stdint.h>
class LogFormatter {
public:
virtual void writeHeader(FILE* out_fp,
int version,
int endianness) = 0;
virtual void writeColumnLabel(FILE* out_fp,
const char* name,
const char* units) = 0;
virtual void writeColumnLabelSeparator(FILE* out_fp) = 0;
virtual void writeDatumSeparator( FILE* out_fp) = 0;
virtual void writeRecordSeparator(FILE* out_fp) = 0;
virtual void writeDatum(FILE* out_fp, int8_t datum) = 0;
virtual void writeDatum(FILE* out_fp, uint8_t datum) = 0;
virtual void writeDatum(FILE* out_fp, int16_t datum) = 0;
virtual void writeDatum(FILE* out_fp, uint16_t datum) = 0;
virtual void writeDatum(FILE* out_fp, int32_t datum) = 0;
virtual void writeDatum(FILE* out_fp, uint32_t datum) = 0;
virtual void writeDatum(FILE* out_fp, int64_t datum) = 0;
virtual void writeDatum(FILE* out_fp, uint64_t datum) = 0;
virtual void writeDatum(FILE* out_fp, float datum) = 0;
virtual void writeDatum(FILE* out_fp, double datum) = 0;
virtual const char* extension() = 0;
};
#endif
| 567 |
543 | <gh_stars>100-1000
package com.adobe.epubcheck.opf;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
/**
* Represents a set of linked resources (i.e. resources defined by
* <code>link</code> elements in a Package Document), with predictable iteration
* order.
*/
public final class LinkedResources
{
private final List<LinkedResource> resources;
private final Map<String, LinkedResource> resourcesById;
private final ListMultimap<String, LinkedResource> resourcesByPath;
/**
* Search the linked resource with the given ID.
*
* @param id
* the ID of the resource to search, can be <code>null</code>.
* @return An {@link Optional} containing the linked resource if found, or
* {@link Optional#absent()} if not found.
*/
public Optional<LinkedResource> getById(String id)
{
return Optional.fromNullable(resourcesById.get(id));
}
/**
* Search the linked resource with the given path. All resource whose
* <code>href</code> URI minus fragment is equal to the given path are
* returned, in document order.
*
* @param path
* the URI (without fragment) of the resource to search, can be
* <code>null</code>.
* @return A list of linked resources referencing the resource at
* <code>path</code> or a fragment thereof ; an empty list is returned
* if no such resource is found.
*/
public List<LinkedResource> getByPath(String path)
{
return resourcesByPath.get(path);
}
/**
* Returns the list of all linked resources in this set, in document order.
*
* @return the list of all linked resources in this set.
*/
public List<LinkedResource> asList()
{
return resources;
}
/**
* Returns <code>true</code> if this set contains a linked resource
* referencing the given path (or fragment thereof).
*/
public boolean hasPath(String path)
{
return !getByPath(path).isEmpty();
}
private LinkedResources(Iterable<LinkedResource> resources)
{
ImmutableList.Builder<LinkedResource> listBuilder = ImmutableList.builder();
ImmutableListMultimap.Builder<String, LinkedResource> byPathBuilder = ImmutableListMultimap
.builder();
Map<String, LinkedResource> byIdMap = Maps.newHashMap();
for (LinkedResource resource : resources)
{
listBuilder.add(resource);
byPathBuilder.put(resource.getPath(), resource);
if (resource.getId().isPresent()) byIdMap.put(resource.getId().get(), resource);
}
this.resources = listBuilder.build();
this.resourcesByPath = byPathBuilder.build();
this.resourcesById = ImmutableMap.copyOf(byIdMap);
}
/**
* Creates a new builder. Calling this method is identical to calling the
* empty {@link Builder} constructor.
*
* @return a newly created builder.
*/
public static Builder builder()
{
return new Builder();
}
/**
* A builder for {@link LinkedResources}.
*
*/
public static final class Builder
{
private final LinkedHashSet<LinkedResource> resources = Sets.newLinkedHashSet();
/**
* Add a new linked resource to this builder.
*
* @param resource
* the resource to add.
* @return this builder.
*/
public Builder add(LinkedResource resource)
{
if (resource != null) resources.add(resource);
return this;
}
/**
* Returns a newly created set of linked resources.
*/
public LinkedResources build()
{
return new LinkedResources(resources);
}
}
}
| 1,311 |
5,937 | // Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
//------------------------------------------------------------------------------
//
//
// Description:
// Contains CD3DSwapChain implementation
//
MtExtern(CD3DSwapChain);
MtExtern(D3DResource_SwapChain);
//------------------------------------------------------------------------------
//
// Class: CD3DSwapChain
//
// Description:
// Abstracts the core D3D swapchain. The main reason to wrap this
// d3d object is so that we can respond to mode changes, i.e.,
// respond to E_DEVICELOST on the Present.
//
//------------------------------------------------------------------------------
class CD3DSwapChain : public CD3DResource
{
// We present through CD3DDeviceLevel1 instead of the CD3DSwap chain so that
// we can have CD3DDeviceLevel1 internally call Begin/EndScene instead of
// exposing this on the CD3DDeviceLevel1 interface
friend HRESULT CD3DDeviceLevel1::Present(
__in_ecount(1) CD3DSwapChain const *pD3DSwapChain,
__in_ecount_opt(1) CMILSurfaceRect const *prcSource,
__in_ecount_opt(1) CMILSurfaceRect const *prcDest,
__in_ecount(1) CMILDeviceContext const *pMILDC,
__in_ecount_opt(1) RGNDATA const * pDirtyRegion,
DWORD dwD3DPresentFlags
);
protected:
inline void __cdecl operator delete(void * pv) { WPFFree(ProcessHeap, pv); }
__allocator __bcount_opt(cb + cBackBuffers*sizeof(PVOID))
void * __cdecl operator new(size_t cb, size_t cBackBuffers);
inline void __cdecl operator delete(void* pv, size_t) { WPFFree(ProcessHeap, pv); }
public:
static HRESULT Create(
__inout_ecount(1) CD3DResourceManager *pResourceManager,
__inout_ecount(1) IDirect3DSwapChain9 *pID3DSwapChain9,
UINT BackBufferCount,
__in_ecount_opt(1) CMILDeviceContext const *pPresentContext,
__deref_out_ecount(1) CD3DSwapChain **ppSwapChain
);
//
// IDirect3DSwapChain9 like helper methods
//
HRESULT GetBackBuffer(
__in_range(<, this->m_cBackBuffers) UINT iBackBuffer,
__deref_out_ecount(1) CD3DSurface **ppBackBuffer
) const;
HRESULT GetFrontBuffer(
__deref_out_ecount(1) CD3DSurface **ppFrontBuffer
);
virtual HRESULT GetDC(
__in_range(<, this->m_cBackBuffers) UINT iBackBuffer,
__in_ecount(1) const CMilRectU& rcDirty,
__deref_out HDC *phdcBackBuffer
) const;
virtual HRESULT ReleaseDC(
__in_range(<, this->m_cBackBuffers) UINT iBackBuffer,
__in HDC hdcBackBuffer
) const;
#if DBG
UINT DbgGetNumBackBuffers() const
{
return m_cBackBuffers;
}
#endif
protected:
CD3DSwapChain(
__inout_ecount(1) IDirect3DSwapChain9 *pD3DSwapChain9,
__in_range(>, 0) __out_range(==, this->m_cBackBuffers) UINT cBackBuffers,
__out_ecount_full_opt(cBackBuffers) CD3DSurface * * const prgBackBuffers
);
virtual ~CD3DSwapChain();
protected:
virtual HRESULT Init(
__inout_ecount(1) CD3DResourceManager *pResourceManager
);
private:
#if PERFMETER
virtual PERFMETERTAG GetPerfMeterTag() const
{
return Mt(D3DResource_SwapChain);
}
#endif
//
// CD3DResource methods
//
void ReleaseD3DResources();
private:
// Pointer to the actual D3D resource.
// The pointer is constant to help enforce the modification restrictions
// of CD3DResource objects.
IDirect3DSwapChain9 * const m_pD3DSwapChain;
IDirect3DSwapChain9Ex *m_pD3DSwapChainEx;
protected:
__field_range(>, 1) UINT const m_cBackBuffers;
__field_ecount(m_cBackBuffers) CD3DSurface * * const m_rgBackBuffers;
};
| 1,593 |
14,668 | <gh_stars>1000+
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "ash/assistant/model/assistant_screen_context_model.h"
namespace ash {
AssistantStructureFuture::AssistantStructureFuture() = default;
AssistantStructureFuture::~AssistantStructureFuture() = default;
void AssistantStructureFuture::SetValue(
ax::mojom::AssistantStructurePtr structure) {
DCHECK(!HasValue());
structure_ = std::move(structure);
Notify();
}
void AssistantStructureFuture::GetValueAsync(Callback callback) {
if (HasValue()) {
RunCallback(std::move(callback));
return;
}
callbacks_.push_back(std::move(callback));
}
bool AssistantStructureFuture::HasValue() const {
return !structure_.is_null();
}
void AssistantStructureFuture::Clear() {
structure_.reset();
}
void AssistantStructureFuture::Notify() {
for (auto& callback : callbacks_)
RunCallback(std::move(callback));
callbacks_.clear();
}
void AssistantStructureFuture::RunCallback(Callback callback) {
std::move(callback).Run(*structure_);
}
AssistantScreenContextModel::AssistantScreenContextModel() = default;
AssistantScreenContextModel::~AssistantScreenContextModel() = default;
void AssistantScreenContextModel::Clear() {
assistant_structure_.Clear();
}
} // namespace ash
| 410 |
1,016 | <reponame>peter-ls/kylo
/**
*
*/
package com.thinkbiganalytics.auth.jaas.http;
/*-
* #%L
* kylo-security-auth
* %%
* Copyright (C) 2017 - 2018 ThinkBig Analytics, a Teradata Company
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.io.Serializable;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import javax.security.auth.callback.Callback;
/**
* A callback for obtaining the value of a particular header in the authenticating
* HTTP request.
*/
public class HttpHeaderCallback implements Callback, Serializable {
private static final long serialVersionUID = 1L;
private String name;
private List<String> values;
/**
* Creates a callback for requesting a particular header in the HTTP request.
* @param header the header name
*/
public HttpHeaderCallback(String header) {
this.name = header;
}
/**
* @return the header name to find in the request
*/
public String getName() {
return name;
}
/**
* @return true if the header was found and it had multiple values
*/
public boolean isMultiValued() {
return this.values != null && this.values.size() > 1;
}
/**
* Returns a list of header values found in the request if the header was multi-valued,
* otherwise a list of a single value. Returns an empty list if no matching header was found.
* @return the list of values
*/
public List<String> getValues() {
return Collections.unmodifiableList(this.values);
}
/**
* Returns an optional value of the header if a matching one was found in the request, otherwise
* an empty optional. If the header was multi-valued then the result string will contain all values
* joined by commas.
* @return the optional header value
*/
public Optional<String> getValue() {
if (this.values.size() == 0) {
return Optional.empty();
} else {
if (this.values.size() > 0) {
return Optional.of(this.values.stream().collect(Collectors.joining(",")));
} else {
return Optional.of(this.values.get(0));
}
}
}
/**
* @param value the values of a multi-valued headers
*/
public void setValues(Collection<String> values) {
this.values = values.stream().collect(Collectors.toList());
}
/**
* Sets the value of the header. If the value is comma-separated then it
* is parsed into multiple values.
* @param value the header value(s)
*/
public void setValue(String value) {
if (value == null) {
this.values = Collections.emptyList();
} else if (value.contains(",")) {
Arrays.stream(value.split(","))
.map(String::trim)
.filter(str -> str.length() > 0)
.collect(Collectors.toList());
} else {
this.values = Collections.singletonList(value);
}
}
}
| 1,365 |
741 | <filename>mobicomkitui/src/main/java/com/applozic/mobicomkit/uiwidgets/conversation/activity/EditChannelDescriptionActivity.java
package com.applozic.mobicomkit.uiwidgets.conversation.activity;
import android.content.Intent;
import android.content.IntentFilter;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.net.ConnectivityManager;
import android.os.Build;
import android.os.Bundle;
import android.text.TextUtils;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.appcompat.app.ActionBar;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.widget.Toolbar;
import com.applozic.mobicomkit.broadcast.ConnectivityReceiver;
import com.applozic.mobicomkit.feed.GroupInfoUpdate;
import com.applozic.mobicomkit.uiwidgets.AlCustomizationSettings;
import com.applozic.mobicomkit.uiwidgets.R;
import com.applozic.mobicommons.file.FileUtils;
import com.applozic.mobicommons.json.GsonUtils;
import com.applozic.mobicommons.people.channel.ChannelMetadata;
import java.util.HashMap;
import java.util.Map;
public class EditChannelDescriptionActivity extends AppCompatActivity {
private static final String TAG = "EditChannelDescriptionActivity";
ActionBar mActionBar;
GroupInfoUpdate groupInfoUpdate;
private EditText editTextChannelDescription;
private AlCustomizationSettings alCustomizationSettings;
private ConnectivityReceiver connectivityReceiver;
private void loadCustomizationFile() {
String jsonString = FileUtils.loadSettingsJsonFile(getApplicationContext());
if (!TextUtils.isEmpty(jsonString)) {
alCustomizationSettings = (AlCustomizationSettings) GsonUtils.getObjectFromJson(jsonString, AlCustomizationSettings.class);
} else {
alCustomizationSettings = new AlCustomizationSettings();
}
}
private void setActionBarColorFromCustomizationFile() {
if (alCustomizationSettings == null) {
return;
}
if(!TextUtils.isEmpty(alCustomizationSettings.getThemeColorPrimary()) && !TextUtils.isEmpty(alCustomizationSettings.getThemeColorPrimaryDark())){
mActionBar.setBackgroundDrawable(new ColorDrawable(Color.parseColor(alCustomizationSettings.getThemeColorPrimary())));
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
getWindow().setStatusBarColor(Color.parseColor(alCustomizationSettings.getThemeColorPrimaryDark()));
}
}
}
private GroupInfoUpdate getExistingGroupInfoDataFromActivityIntent() {
if (getIntent().getExtras() != null) {
String groupInfoJson = getIntent().getExtras().getString(ChannelInfoActivity.GROUP_UPDTAE_INFO);
return (GroupInfoUpdate) GsonUtils.getObjectFromJson(groupInfoJson, GroupInfoUpdate.class);
}
return null;
}
private @NonNull String getExistingChannelDescriptionFrom(GroupInfoUpdate groupInfoUpdate) {
final String EMPTY_STRING = "";
if(groupInfoUpdate == null) {
return EMPTY_STRING;
}
return ChannelMetadata.getChannelDescriptionFrom(groupInfoUpdate.getMetadata());
}
private void updateGroupInfoUpdateObjectsChannelDescription(String channelDescription) {
final String EMPTY_STRING = "";
Map<String, String> metadata = groupInfoUpdate.getMetadata();
if (metadata == null) {
metadata = new HashMap<>();
}
if(channelDescription == null) {
channelDescription = EMPTY_STRING;
}
metadata.put(ChannelMetadata.AL_CHANNEL_DESCRIPTION, channelDescription);
groupInfoUpdate.setMetadata(metadata);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.update_channel_description_layout);
editTextChannelDescription = (EditText) findViewById(R.id.editTextNewChannelDescription);
groupInfoUpdate = getExistingGroupInfoDataFromActivityIntent();
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
mActionBar = getSupportActionBar();
mActionBar.setTitle(getString(R.string.update_channel_title_description));
loadCustomizationFile();
setActionBarColorFromCustomizationFile();
final String existingChannelDescription = getExistingChannelDescriptionFrom(groupInfoUpdate);
editTextChannelDescription.setText(existingChannelDescription);
Button buttonOk = (Button) findViewById(R.id.buttonChannelDescriptionOk);
Button buttonCancel = (Button) findViewById(R.id.buttonChannelDescriptionCancel);
buttonOk.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String editTextString = editTextChannelDescription.getText().toString().trim();
if (editTextString != null && !editTextString.equals(existingChannelDescription)) {
updateGroupInfoUpdateObjectsChannelDescription(editTextString);
Intent intent = new Intent();
intent.putExtra(ChannelInfoActivity.GROUP_UPDTAE_INFO, GsonUtils.getJsonFromObject(groupInfoUpdate, GroupInfoUpdate.class));
setResult(RESULT_OK, intent);
if (editTextString.trim().length() == 0) {
Toast.makeText(EditChannelDescriptionActivity.this, getString(R.string.channel_description_will_be_removed), Toast.LENGTH_SHORT).show();
}
}
EditChannelDescriptionActivity.this.finish();
}
});
buttonCancel.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
EditChannelDescriptionActivity.this.finish();
}
});
connectivityReceiver = new ConnectivityReceiver();
registerReceiver(connectivityReceiver, new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION));
}
@Override
protected void onDestroy() {
super.onDestroy();
try {
if (connectivityReceiver != null) {
unregisterReceiver(connectivityReceiver);
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
| 2,487 |
2,059 | <filename>include/HubFramework/HUBComponentCollectionViewCell.h
/*
* Copyright (c) 2016 Spotify AB.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
#import <UIKit/UIKit.h>
@protocol HUBComponent;
NS_ASSUME_NONNULL_BEGIN
/**
* Collection view cell that can be used to display a Hub Framework component
*
* The Hub Framework uses this collection view cell internally to wrap component views, manage their size,
* reuse, etc. If you're building a component that uses a nested `UICollectionView` to display child components,
* you can use this cell class to easily be able to render your child components.
*/
@interface HUBComponentCollectionViewCell : UICollectionViewCell
/// A unique identifier for the cell, can be used to track this instance in various operations
@property (nonatomic, strong, readonly) NSUUID *identifier;
/**
* The component that the collection view is currently displaying
*
* Set this property to replace the component with a new one. The previous component will be removed from the
* cell's content view, and the new one added.
*
* When a component has been attached to this cell, it will start managing it in terms of resizing and reuse, so
* you don't need to manually send `prepareForReuse` to the component, it will automatically be sent when the cell
* itself gets reused.
*/
@property (nonatomic, strong, nullable) id<HUBComponent> component;
@end
NS_ASSUME_NONNULL_END
| 586 |
5,823 | // Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/win/scoped_variant.h"
#include <propvarutil.h>
#include <wrl/client.h>
#include <algorithm>
#include <functional>
#include "base/logging.h"
#include "base/numerics/ranges.h"
#include "base/win/variant_util.h"
namespace base {
namespace win {
// Global, const instance of an empty variant.
const VARIANT ScopedVariant::kEmptyVariant = {{{VT_EMPTY}}};
ScopedVariant::ScopedVariant(ScopedVariant&& var) {
var_.vt = VT_EMPTY;
Reset(var.Release());
}
ScopedVariant::~ScopedVariant() {
static_assert(sizeof(ScopedVariant) == sizeof(VARIANT), "ScopedVariantSize");
::VariantClear(&var_);
}
ScopedVariant::ScopedVariant(const wchar_t* str) {
var_.vt = VT_EMPTY;
Set(str);
}
ScopedVariant::ScopedVariant(const wchar_t* str, UINT length) {
var_.vt = VT_BSTR;
var_.bstrVal = ::SysAllocStringLen(str, length);
}
ScopedVariant::ScopedVariant(long value, VARTYPE vt) {
var_.vt = vt;
var_.lVal = value;
}
ScopedVariant::ScopedVariant(int value) {
var_.vt = VT_I4;
var_.lVal = value;
}
ScopedVariant::ScopedVariant(bool value) {
var_.vt = VT_BOOL;
var_.boolVal = value ? VARIANT_TRUE : VARIANT_FALSE;
}
ScopedVariant::ScopedVariant(double value, VARTYPE vt) {
BASE_DCHECK(vt == VT_R8 || vt == VT_DATE);
var_.vt = vt;
var_.dblVal = value;
}
ScopedVariant::ScopedVariant(IDispatch* dispatch) {
var_.vt = VT_EMPTY;
Set(dispatch);
}
ScopedVariant::ScopedVariant(IUnknown* unknown) {
var_.vt = VT_EMPTY;
Set(unknown);
}
ScopedVariant::ScopedVariant(SAFEARRAY* safearray) {
var_.vt = VT_EMPTY;
Set(safearray);
}
ScopedVariant::ScopedVariant(const VARIANT& var) {
var_.vt = VT_EMPTY;
Set(var);
}
void ScopedVariant::Reset(const VARIANT& var) {
if (&var != &var_) {
::VariantClear(&var_);
var_ = var;
}
}
VARIANT ScopedVariant::Release() {
VARIANT var = var_;
var_.vt = VT_EMPTY;
return var;
}
void ScopedVariant::Swap(ScopedVariant& var) {
VARIANT tmp = var_;
var_ = var.var_;
var.var_ = tmp;
}
VARIANT* ScopedVariant::Receive() {
BASE_DCHECK(!IsLeakableVarType(var_.vt)) << "variant leak. type: " << var_.vt;
return &var_;
}
VARIANT ScopedVariant::Copy() const {
VARIANT ret = {{{VT_EMPTY}}};
::VariantCopy(&ret, &var_);
return ret;
}
int ScopedVariant::Compare(const VARIANT& other, bool ignore_case) const {
BASE_DCHECK(!V_ISARRAY(&var_))
<< "Comparison is not supported when |this| owns a SAFEARRAY";
BASE_DCHECK(!V_ISARRAY(&other))
<< "Comparison is not supported when |other| owns a SAFEARRAY";
const bool this_is_empty = var_.vt == VT_EMPTY || var_.vt == VT_NULL;
const bool other_is_empty = other.vt == VT_EMPTY || other.vt == VT_NULL;
// 1. VT_NULL and VT_EMPTY is always considered less-than any other VARTYPE.
if (this_is_empty)
return other_is_empty ? 0 : -1;
if (other_is_empty)
return 1;
// 2. If both VARIANTS have either VT_UNKNOWN or VT_DISPATCH even if the
// VARTYPEs do not match, the address of its IID_IUnknown is compared to
// guarantee a logical ordering even though it is not a meaningful order.
// e.g. (a.Compare(b) != b.Compare(a)) unless (a == b).
const bool this_is_unknown = var_.vt == VT_UNKNOWN || var_.vt == VT_DISPATCH;
const bool other_is_unknown =
other.vt == VT_UNKNOWN || other.vt == VT_DISPATCH;
if (this_is_unknown && other_is_unknown) {
// https://docs.microsoft.com/en-us/windows/win32/com/rules-for-implementing-queryinterface
// Query IID_IUnknown to determine whether the two variants point
// to the same instance of an object
Microsoft::WRL::ComPtr<IUnknown> this_unknown;
Microsoft::WRL::ComPtr<IUnknown> other_unknown;
V_UNKNOWN(&var_)->QueryInterface(IID_PPV_ARGS(&this_unknown));
V_UNKNOWN(&other)->QueryInterface(IID_PPV_ARGS(&other_unknown));
if (this_unknown.Get() == other_unknown.Get())
return 0;
// std::less for any pointer type yields a strict total order even if the
// built-in operator< does not.
return std::less<>{}(this_unknown.Get(), other_unknown.Get()) ? -1 : 1;
}
// 3. If the VARTYPEs do not match, then the value of the VARTYPE is compared.
if (V_VT(&var_) != V_VT(&other))
return (V_VT(&var_) < V_VT(&other)) ? -1 : 1;
const VARTYPE shared_vartype = V_VT(&var_);
// 4. Comparing VT_BSTR values is a lexicographical comparison of the contents
// of the BSTR, taking into account |ignore_case|.
if (shared_vartype == VT_BSTR) {
ULONG flags = ignore_case ? NORM_IGNORECASE : 0;
HRESULT hr =
::VarBstrCmp(V_BSTR(&var_), V_BSTR(&other), LOCALE_USER_DEFAULT, flags);
BASE_DCHECK(SUCCEEDED(hr) && hr != VARCMP_NULL)
<< "unsupported variant comparison: " << var_.vt << " and " << other.vt;
switch (hr) {
case VARCMP_LT:
return -1;
case VARCMP_GT:
case VARCMP_NULL:
return 1;
default:
return 0;
}
}
// 5. Otherwise returns the lexicographical comparison of the values held by
// the two VARIANTS that share the same VARTYPE.
return ::VariantCompare(var_, other);
}
void ScopedVariant::Set(const wchar_t* str) {
BASE_DCHECK(!IsLeakableVarType(var_.vt)) << "leaking variant: " << var_.vt;
var_.vt = VT_BSTR;
var_.bstrVal = ::SysAllocString(str);
}
void ScopedVariant::Set(int8_t i8) {
BASE_DCHECK(!IsLeakableVarType(var_.vt)) << "leaking variant: " << var_.vt;
var_.vt = VT_I1;
var_.cVal = i8;
}
void ScopedVariant::Set(uint8_t ui8) {
BASE_DCHECK(!IsLeakableVarType(var_.vt)) << "leaking variant: " << var_.vt;
var_.vt = VT_UI1;
var_.bVal = ui8;
}
void ScopedVariant::Set(int16_t i16) {
BASE_DCHECK(!IsLeakableVarType(var_.vt)) << "leaking variant: " << var_.vt;
var_.vt = VT_I2;
var_.iVal = i16;
}
void ScopedVariant::Set(uint16_t ui16) {
BASE_DCHECK(!IsLeakableVarType(var_.vt)) << "leaking variant: " << var_.vt;
var_.vt = VT_UI2;
var_.uiVal = ui16;
}
void ScopedVariant::Set(int32_t i32) {
BASE_DCHECK(!IsLeakableVarType(var_.vt)) << "leaking variant: " << var_.vt;
var_.vt = VT_I4;
var_.lVal = i32;
}
void ScopedVariant::Set(uint32_t ui32) {
BASE_DCHECK(!IsLeakableVarType(var_.vt)) << "leaking variant: " << var_.vt;
var_.vt = VT_UI4;
var_.ulVal = ui32;
}
void ScopedVariant::Set(int64_t i64) {
BASE_DCHECK(!IsLeakableVarType(var_.vt)) << "leaking variant: " << var_.vt;
var_.vt = VT_I8;
var_.llVal = i64;
}
void ScopedVariant::Set(uint64_t ui64) {
BASE_DCHECK(!IsLeakableVarType(var_.vt)) << "leaking variant: " << var_.vt;
var_.vt = VT_UI8;
var_.ullVal = ui64;
}
void ScopedVariant::Set(float r32) {
BASE_DCHECK(!IsLeakableVarType(var_.vt)) << "leaking variant: " << var_.vt;
var_.vt = VT_R4;
var_.fltVal = r32;
}
void ScopedVariant::Set(double r64) {
BASE_DCHECK(!IsLeakableVarType(var_.vt)) << "leaking variant: " << var_.vt;
var_.vt = VT_R8;
var_.dblVal = r64;
}
void ScopedVariant::SetDate(DATE date) {
BASE_DCHECK(!IsLeakableVarType(var_.vt)) << "leaking variant: " << var_.vt;
var_.vt = VT_DATE;
var_.date = date;
}
void ScopedVariant::Set(IDispatch* disp) {
BASE_DCHECK(!IsLeakableVarType(var_.vt)) << "leaking variant: " << var_.vt;
var_.vt = VT_DISPATCH;
var_.pdispVal = disp;
if (disp)
disp->AddRef();
}
void ScopedVariant::Set(bool b) {
BASE_DCHECK(!IsLeakableVarType(var_.vt)) << "leaking variant: " << var_.vt;
var_.vt = VT_BOOL;
var_.boolVal = b ? VARIANT_TRUE : VARIANT_FALSE;
}
void ScopedVariant::Set(IUnknown* unk) {
BASE_DCHECK(!IsLeakableVarType(var_.vt)) << "leaking variant: " << var_.vt;
var_.vt = VT_UNKNOWN;
var_.punkVal = unk;
if (unk)
unk->AddRef();
}
void ScopedVariant::Set(SAFEARRAY* array) {
BASE_DCHECK(!IsLeakableVarType(var_.vt)) << "leaking variant: " << var_.vt;
if (SUCCEEDED(::SafeArrayGetVartype(array, &var_.vt))) {
var_.vt |= VT_ARRAY;
var_.parray = array;
} else {
BASE_DCHECK(!array) << "Unable to determine safearray vartype";
var_.vt = VT_EMPTY;
}
}
void ScopedVariant::Set(const VARIANT& var) {
BASE_DCHECK(!IsLeakableVarType(var_.vt)) << "leaking variant: " << var_.vt;
if (FAILED(::VariantCopy(&var_, &var))) {
BASE_DLOG() << "Error: VariantCopy failed";
var_.vt = VT_EMPTY;
}
}
ScopedVariant& ScopedVariant::operator=(ScopedVariant&& var) {
if (var.ptr() != &var_)
Reset(var.Release());
return *this;
}
ScopedVariant& ScopedVariant::operator=(const VARIANT& var) {
if (&var != &var_) {
VariantClear(&var_);
Set(var);
}
return *this;
}
bool ScopedVariant::IsLeakableVarType(VARTYPE vt) {
bool leakable = false;
switch (vt & VT_TYPEMASK) {
case VT_BSTR:
case VT_DISPATCH:
// we treat VT_VARIANT as leakable to err on the safe side.
case VT_VARIANT:
case VT_UNKNOWN:
case VT_SAFEARRAY:
// very rarely used stuff (if ever):
case VT_VOID:
case VT_PTR:
case VT_CARRAY:
case VT_USERDEFINED:
case VT_LPSTR:
case VT_LPWSTR:
case VT_RECORD:
case VT_INT_PTR:
case VT_UINT_PTR:
case VT_FILETIME:
case VT_BLOB:
case VT_STREAM:
case VT_STORAGE:
case VT_STREAMED_OBJECT:
case VT_STORED_OBJECT:
case VT_BLOB_OBJECT:
case VT_VERSIONED_STREAM:
case VT_BSTR_BLOB:
leakable = true;
break;
}
if (!leakable && (vt & VT_ARRAY) != 0) {
leakable = true;
}
return leakable;
}
} // namespace win
} // namespace base
| 3,964 |
17,037 | # This module is automatically generated by autogen.sh. DO NOT EDIT.
from . import _OpenStack
class _Orchestration(_OpenStack):
_type = "orchestration"
_icon_dir = "resources/openstack/orchestration"
class Blazar(_Orchestration):
_icon = "blazar.png"
class Heat(_Orchestration):
_icon = "heat.png"
class Mistral(_Orchestration):
_icon = "mistral.png"
class Senlin(_Orchestration):
_icon = "senlin.png"
class Zaqar(_Orchestration):
_icon = "zaqar.png"
# Aliases
| 187 |
1,857 | import os
import pexpect
from pathlib import Path
from textwrap import dedent
def test_history_not_log_auth(cli):
cli.sendline("AUTH 123")
cli.expect(["Client sent AUTH, but no password is set", "127.0.0.1"])
cli.sendline("set foo bar")
cli.expect("OK")
with open(os.path.expanduser("~/.iredis_history"), "r") as history_file:
content = history_file.read()
assert "set foo bar" in content
assert "AUTH" not in content
def test_history_create_and_writing_with_config():
config_content = dedent(
"""
[main]
history_location = /tmp/iredis_history.txt
"""
)
with open("/tmp/iredisrc", "w+") as etc_config:
etc_config.write(config_content)
cli = pexpect.spawn("iredis -n 15 --iredisrc /tmp/iredisrc", timeout=2)
cli.expect("127.0.0.1")
cli.sendline("set hello world")
cli.expect("OK")
cli.close()
log = Path("/tmp/iredis_history.txt")
assert log.exists()
with open(log, "r") as logfile:
content = logfile.read()
assert "set hello world" in content
| 458 |
4,587 | <reponame>steamboatid/keydb<gh_stars>1000+
#
# Copyright (C) 2016 Intel Corporation.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice(s),
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice(s),
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) ``AS IS'' AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL THE COPYRIGHT HOLDER(S) BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import pytest
import os
from python_framework import CMD_helper
def _get_lib_path():
for p in ["/usr/lib64", "/usr/lib"]:
if os.path.isdir(p):
return p
raise Exception("Cannot find library path in OS")
class Test_autohbw(object):
binary = "../autohbw_test_helper"
fail_msg = "Test failed with:\n {0}"
test_prefix = "AUTO_HBW_LOG=2 LD_PRELOAD=%s/libautohbw.so.0 " % _get_lib_path()
memkind_malloc_log = "In my memkind malloc"
memkind_calloc_log = "In my memkind calloc"
memkind_realloc_log = "In my memkind realloc"
memkind_posix_memalign_log = "In my memkind align"
memkind_free_log = "In my memkind free"
cmd_helper = CMD_helper()
def test_TC_MEMKIND_autohbw_malloc_and_free(self):
""" This test executes ./autohbw_test_helper with LD_PRELOAD that is overriding malloc() and free() to equivalent autohbw functions"""
command = self.test_prefix + self.cmd_helper.get_command_path(self.binary) + " malloc"
print "Executing command: {0}".format(command)
output, retcode = self.cmd_helper.execute_cmd(command, sudo=False)
assert retcode == 0, self.fail_msg.format("\nError: autohbw_test_helper returned {0} \noutput: {1}".format(retcode,output))
assert self.memkind_malloc_log in output, self.fail_msg.format("\nError: malloc was not overrided by autohbw equivalent \noutput: {0}").format(output)
assert self.memkind_free_log in output, self.fail_msg.format("\nError: free was not overrided by autohbw equivalent \noutput: {0}").format(output)
def test_TC_MEMKIND_autohbw_calloc_and_free(self):
""" This test executes ./autohbw_test_helper with LD_PRELOAD that is overriding calloc() and free() to equivalent autohbw functions"""
command = self.test_prefix + self.cmd_helper.get_command_path(self.binary) + " calloc"
print "Executing command: {0}".format(command)
output, retcode = self.cmd_helper.execute_cmd(command, sudo=False)
assert retcode == 0, self.fail_msg.format("\nError: autohbw_test_helper returned {0} \noutput: {1}".format(retcode,output))
assert self.memkind_calloc_log in output, self.fail_msg.format("\nError: calloc was not overrided by autohbw equivalent \noutput: {0}").format(output)
assert self.memkind_free_log in output, self.fail_msg.format("Error: free was not overrided by autohbw equivalent \noutput: {0}").format(output)
def test_TC_MEMKIND_autohbw_realloc_and_free(self):
""" This test executes ./autohbw_test_helper with LD_PRELOAD that is overriding realloc() and free() to equivalent autohbw functions"""
command = self.test_prefix + self.cmd_helper.get_command_path(self.binary) + " realloc"
print "Executing command: {0}".format(command)
output, retcode = self.cmd_helper.execute_cmd(command, sudo=False)
assert retcode == 0, self.fail_msg.format("\nError: autohbw_test_helper returned {0} \noutput: {1}".format(retcode,output))
assert self.memkind_realloc_log in output, self.fail_msg.format("\nError: realloc was not overrided by autohbw equivalent \noutput: {0}").format(output)
assert self.memkind_free_log in output, self.fail_msg.format("\nError: free was not overrided by autohbw equivalent \noutput: {0}").format(output)
def test_TC_MEMKIND_autohbw_posix_memalign_and_free(self):
""" This test executes ./autohbw_test_helper with LD_PRELOAD that is overriding posix_memalign() and free() to equivalent autohbw functions"""
command = self.test_prefix + self.cmd_helper.get_command_path(self.binary) + " posix_memalign"
print "Executing command: {0}".format(command)
output, retcode = self.cmd_helper.execute_cmd(command, sudo=False)
assert retcode == 0, self.fail_msg.format("\nError: autohbw_test_helper returned {0} \noutput: {1}".format(retcode,output))
assert self.memkind_posix_memalign_log in output, self.fail_msg.format("\nError: posix_memalign was not overrided by autohbw equivalent \noutput: {0}").format(output)
assert self.memkind_free_log in output, self.fail_msg.format("\nError: free was not overrided by autohbw equivalent \noutput: {0}").format(output)
| 2,054 |
11,010 | <gh_stars>1000+
package com.google.inject.internal;
import static com.google.common.base.Preconditions.checkArgument;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.inject.Key;
import com.google.inject.TypeLiteral;
import com.google.inject.internal.util.Classes;
import com.google.inject.internal.util.StackTraceElements;
import com.google.inject.spi.Dependency;
import com.google.inject.spi.ElementSource;
import com.google.inject.spi.InjectionPoint;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Member;
import java.lang.reflect.Method;
import java.lang.reflect.Parameter;
import java.util.Formatter;
import java.util.List;
/** Formatting a single source in Guice error message. */
final class SourceFormatter {
static final String INDENT = Strings.repeat(" ", 5);
private final Object source;
private final Formatter formatter;
private final boolean omitPreposition;
private final String moduleStack;
SourceFormatter(Object source, Formatter formatter, boolean omitPreposition) {
if (source instanceof ElementSource) {
ElementSource elementSource = (ElementSource) source;
this.source = elementSource.getDeclaringSource();
this.moduleStack = getModuleStack(elementSource);
} else {
this.source = source;
this.moduleStack = "";
}
this.formatter = formatter;
this.omitPreposition = omitPreposition;
}
void format() {
boolean appendModuleSource = !moduleStack.isEmpty();
if (source instanceof Dependency) {
formatDependency((Dependency<?>) source);
} else if (source instanceof InjectionPoint) {
formatInjectionPoint(null, (InjectionPoint) source);
} else if (source instanceof Class) {
formatter.format("%s%s%n", preposition("at "), StackTraceElements.forType((Class<?>) source));
} else if (source instanceof Member) {
formatMember((Member) source);
} else if (source instanceof TypeLiteral) {
formatter.format("%s%s%n", preposition("while locating "), source);
} else if (source instanceof Key) {
formatKey((Key<?>) source);
} else if (source instanceof Thread) {
appendModuleSource = false;
formatter.format("%s%s%n", preposition("in thread "), source);
} else {
formatter.format("%s%s%n", preposition("at "), source);
}
if (appendModuleSource) {
formatter.format("%s \\_ installed by: %s%n", INDENT, moduleStack);
}
}
private String preposition(String prepostition) {
if (omitPreposition) {
return "";
}
return prepostition;
}
private void formatDependency(Dependency<?> dependency) {
InjectionPoint injectionPoint = dependency.getInjectionPoint();
if (injectionPoint != null) {
formatInjectionPoint(dependency, injectionPoint);
} else {
formatKey(dependency.getKey());
}
}
private void formatKey(Key<?> key) {
formatter.format("%s%s%n", preposition("while locating "), Messages.convert(key));
}
private void formatMember(Member member) {
formatter.format("%s%s%n", preposition("at "), StackTraceElements.forMember(member));
}
private void formatInjectionPoint(Dependency<?> dependency, InjectionPoint injectionPoint) {
Member member = injectionPoint.getMember();
Class<? extends Member> memberType = Classes.memberType(member);
formatMember(injectionPoint.getMember());
if (memberType == Field.class) {
formatter.format("%s \\_ for field %s%n", INDENT, Messages.redBold(member.getName()));
} else if (dependency != null) {
formatter.format("%s \\_ for %s%n", INDENT, getParameterName(dependency));
}
}
static String getModuleStack(ElementSource elementSource) {
if (elementSource == null) {
return "";
}
List<String> modules = Lists.newArrayList(elementSource.getModuleClassNames());
// Insert any original element sources w/ module info into the path.
while (elementSource.getOriginalElementSource() != null) {
elementSource = elementSource.getOriginalElementSource();
modules.addAll(0, elementSource.getModuleClassNames());
}
if (modules.size() <= 1) {
return "";
}
return String.join(" -> ", Lists.reverse(modules));
}
static String getParameterName(Dependency<?> dependency) {
int parameterIndex = dependency.getParameterIndex();
int ordinal = parameterIndex + 1;
Member member = dependency.getInjectionPoint().getMember();
Parameter parameter = null;
if (member instanceof Constructor) {
parameter = ((Constructor<?>) member).getParameters()[parameterIndex];
} else if (member instanceof Method) {
parameter = ((Method) member).getParameters()[parameterIndex];
}
String parameterName = "";
if (parameter != null && parameter.isNamePresent()) {
parameterName = parameter.getName();
}
return String.format(
"%s%s parameter%s",
ordinal,
getOrdinalSuffix(ordinal),
parameterName.isEmpty() ? "" : " " + Messages.redBold(parameterName));
}
/**
* Maps {@code 1} to the string {@code "1st"} ditto for all non-negative numbers
*
* @see <a href="https://en.wikipedia.org/wiki/English_numerals#Ordinal_numbers">
* https://en.wikipedia.org/wiki/English_numerals#Ordinal_numbers</a>
*/
private static String getOrdinalSuffix(int ordinal) {
// negative ordinals don't make sense, we allow zero though because we are programmers
checkArgument(ordinal >= 0);
if ((ordinal / 10) % 10 == 1) {
// all the 'teens' are weird
return "th";
} else {
// could use a lookup table? any better?
switch (ordinal % 10) {
case 1:
return "st";
case 2:
return "nd";
case 3:
return "rd";
default:
return "th";
}
}
}
}
| 2,075 |
678 | <reponame>bzxy/cydia
/**
* This header is generated by class-dump-z 0.2b.
*
* Source: /System/Library/PrivateFrameworks/iAdCore.framework/iAdCore
*/
#import <iAdCore/XXUnknownSuperclass.h>
@class NSString, NSURL, NSData, NSDate;
@interface _ADWebArchiveCacheNode : XXUnknownSuperclass {
@private
unsigned _numberOfBytes; // 4 = 0x4
NSData *_data; // 8 = 0x8
BOOL _inMemory; // 12 = 0xc
BOOL _hasDiskRepresentation; // 13 = 0xd
NSString *_filename; // 16 = 0x10
NSURL *_URL; // 20 = 0x14
NSDate *_expiration; // 24 = 0x18
}
@property(retain, nonatomic) NSDate *expiration; // G=0x8519; S=0x8529; @synthesize=_expiration
@property(retain, nonatomic) NSURL *URL; // G=0x84e5; S=0x84f5; @synthesize=_URL
@property(retain, nonatomic) NSString *filename; // G=0x84b1; S=0x84c1; @synthesize=_filename
@property(assign) BOOL hasDiskRepresentation; // G=0x8491; S=0x84a1; @synthesize=_hasDiskRepresentation
@property(assign) BOOL inMemory; // G=0x8471; S=0x8481; @synthesize=_inMemory
@property(retain, nonatomic) NSData *data; // G=0x843d; S=0x844d; @synthesize=_data
@property(assign) unsigned numberOfBytes; // G=0x841d; S=0x842d; @synthesize=_numberOfBytes
+ (id)resourceCacheNodeWithDictionary:(id)dictionary; // 0x7bed
// declared property setter: - (void)setExpiration:(id)expiration; // 0x8529
// declared property getter: - (id)expiration; // 0x8519
// declared property setter: - (void)setURL:(id)url; // 0x84f5
// declared property getter: - (id)URL; // 0x84e5
// declared property setter: - (void)setFilename:(id)filename; // 0x84c1
// declared property getter: - (id)filename; // 0x84b1
// declared property setter: - (void)setHasDiskRepresentation:(BOOL)representation; // 0x84a1
// declared property getter: - (BOOL)hasDiskRepresentation; // 0x8491
// declared property setter: - (void)setInMemory:(BOOL)memory; // 0x8481
// declared property getter: - (BOOL)inMemory; // 0x8471
// declared property setter: - (void)setData:(id)data; // 0x844d
// declared property getter: - (id)data; // 0x843d
// declared property setter: - (void)setNumberOfBytes:(unsigned)bytes; // 0x842d
// declared property getter: - (unsigned)numberOfBytes; // 0x841d
- (void)dealloc; // 0x8395
- (BOOL)isExpired; // 0x8341
- (void)removeFromMemory; // 0x8315
- (BOOL)destroy; // 0x81f5
- (BOOL)loadFromDisk; // 0x80a9
- (void)writeToDiskWithCompletionHandler:(id)completionHandler; // 0x7e69
- (id)dictionaryRepresentation; // 0x7d85
- (id)init; // 0x7d2d
@end
| 993 |
2,567 | class SdA {
public:
int N;
int n_ins;
int *hidden_layer_sizes;
int n_outs;
int n_layers;
HiddenLayer **sigmoid_layers;
dA **dA_layers;
LogisticRegression *log_layer;
SdA(int, int, int*, int, int);
~SdA();
void pretrain(int*, double, double, int);
void finetune(int*, int*, double, int);
void predict(int*, double*);
};
| 146 |
3,748 | <filename>mmcv/ops/csrc/tensorrt/trt_scatternd.hpp<gh_stars>1000+
#ifndef TRT_SCATTERND_HPP
#define TRT_SCATTERND_HPP
#include <cublas_v2.h>
#include <memory>
#include <string>
#include <vector>
#include "trt_plugin_helper.hpp"
class ONNXScatterNDDynamic : public nvinfer1::IPluginV2DynamicExt {
public:
ONNXScatterNDDynamic(const std::string &name);
ONNXScatterNDDynamic(const std::string name, const void *data, size_t length);
ONNXScatterNDDynamic() = delete;
// IPluginV2DynamicExt Methods
nvinfer1::IPluginV2DynamicExt *clone() const override;
nvinfer1::DimsExprs getOutputDimensions(
int outputIndex, const nvinfer1::DimsExprs *inputs, int nbInputs,
nvinfer1::IExprBuilder &exprBuilder) override;
bool supportsFormatCombination(int pos,
const nvinfer1::PluginTensorDesc *inOut,
int nbInputs, int nbOutputs) override;
void configurePlugin(const nvinfer1::DynamicPluginTensorDesc *in,
int nbInputs,
const nvinfer1::DynamicPluginTensorDesc *out,
int nbOutputs) override;
size_t getWorkspaceSize(const nvinfer1::PluginTensorDesc *inputs,
int nbInputs,
const nvinfer1::PluginTensorDesc *outputs,
int nbOutputs) const override;
int enqueue(const nvinfer1::PluginTensorDesc *inputDesc,
const nvinfer1::PluginTensorDesc *outputDesc,
const void *const *inputs, void *const *outputs, void *workspace,
cudaStream_t stream) override;
// IPluginV2Ext Methods
nvinfer1::DataType getOutputDataType(int index,
const nvinfer1::DataType *inputTypes,
int nbInputs) const override;
// IPluginV2 Methods
const char *getPluginType() const override;
const char *getPluginVersion() const override;
int getNbOutputs() const override;
int initialize() override;
void terminate() override;
size_t getSerializationSize() const override;
void serialize(void *buffer) const override;
void destroy() override;
void setPluginNamespace(const char *pluginNamespace) override;
const char *getPluginNamespace() const override;
private:
const std::string mLayerName;
std::string mNamespace;
protected:
// To prevent compiler warnings.
using nvinfer1::IPluginV2DynamicExt::canBroadcastInputAcrossBatch;
using nvinfer1::IPluginV2DynamicExt::configurePlugin;
using nvinfer1::IPluginV2DynamicExt::enqueue;
using nvinfer1::IPluginV2DynamicExt::getOutputDimensions;
using nvinfer1::IPluginV2DynamicExt::getWorkspaceSize;
using nvinfer1::IPluginV2DynamicExt::isOutputBroadcastAcrossBatch;
using nvinfer1::IPluginV2DynamicExt::supportsFormat;
};
class ONNXScatterNDDynamicCreator : public nvinfer1::IPluginCreator {
public:
ONNXScatterNDDynamicCreator();
const char *getPluginName() const override;
const char *getPluginVersion() const override;
const nvinfer1::PluginFieldCollection *getFieldNames() override;
nvinfer1::IPluginV2 *createPlugin(
const char *name, const nvinfer1::PluginFieldCollection *fc) override;
nvinfer1::IPluginV2 *deserializePlugin(const char *name,
const void *serialData,
size_t serialLength) override;
void setPluginNamespace(const char *pluginNamespace) override;
const char *getPluginNamespace() const override;
private:
static nvinfer1::PluginFieldCollection mFC;
static std::vector<nvinfer1::PluginField> mPluginAttributes;
std::string mNamespace;
};
#endif // TRT_SCATTERND_HPP
| 1,501 |
903 | <gh_stars>100-1000
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package software.amazon.smithy.build.transforms;
import java.util.LinkedHashMap;
import java.util.Map;
import software.amazon.smithy.build.SmithyBuildException;
import software.amazon.smithy.build.TransformContext;
import software.amazon.smithy.model.Model;
import software.amazon.smithy.model.shapes.ShapeId;
import software.amazon.smithy.model.shapes.ShapeType;
/**
* {@code changeType} is used to change the type of one or more shapes.
*/
public final class ChangeTypes extends ConfigurableProjectionTransformer<ChangeTypes.Config> {
/**
* {@code flattenNamespaces} configuration settings.
*/
public static final class Config {
private final Map<ShapeId, ShapeType> shapeTypes = new LinkedHashMap<>();
/**
* Sets the map of shape IDs to shape types to set.
*
* @param shapeTypes Map of shape ID to shape type.
*/
public void setShapeTypes(Map<ShapeId, ShapeType> shapeTypes) {
this.shapeTypes.clear();
this.shapeTypes.putAll(shapeTypes);
}
public Map<ShapeId, ShapeType> getShapeTypes() {
return shapeTypes;
}
}
@Override
public Class<Config> getConfigType() {
return Config.class;
}
@Override
public String getName() {
return "changeTypes";
}
@Override
protected Model transformWithConfig(TransformContext context, Config config) {
if (config.getShapeTypes().isEmpty()) {
throw new SmithyBuildException(getName() + ": shapeTypes must not be empty");
}
return context.getTransformer().changeShapeType(context.getModel(), config.getShapeTypes());
}
}
| 778 |
608 | // -------------------------------
// Copyright (c) Corman Technologies Inc.
// See LICENSE.txt for license information.
// -------------------------------
//
// File: stdafx.cpp
// Contents: Source file that includes just the standard
// includes stdafx.pch will be the pre-compiled
// header, stdafx.obj will contain the pre-compiled
// type information
#include "stdafx.h"
| 144 |
309 | package com.github.codesniper.poplayer.pop;
import android.content.Context;
import io.reactivex.Observable;
import io.reactivex.Observer;
import io.reactivex.disposables.Disposable;
public class PopSubscriber<T> implements Observer<T> {
private Popi popi;
private Context mContext;
private boolean isRequestSuccess=false;
public PopSubscriber(Context context,Popi popi) {
this.mContext=context;
this.popi = popi;
}
@Override
public void onSubscribe(Disposable d) {
}
@Override
public void onNext(T t) {
if(t!=null){
isRequestSuccess=true;
}
}
@Override
public void onError(Throwable e) {
isRequestSuccess=false;
}
@Override
public void onComplete() {
if(isRequestSuccess){
StickyPopManager.getInstance(mContext).pushToQueue(popi);
}else {
StickyPopManager.getInstance(mContext).clear();
}
}
}
| 409 |
1,555 | <reponame>LightSun/mir<filename>c-tests/lacc/deref-deep.c
struct typetree {
int type;
struct typetree *next;
};
struct var {
struct typetree *type;
};
int main() {
struct typetree p = {1};
struct typetree q = {3};
struct var root;
p.next = &q;
root.type = &p;
return root.type->next->type;
}
| 130 |
400 | <gh_stars>100-1000
/*
* fisheye_image_file.h - Fisheye image file class
*
* Copyright (c) 2020 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Author: <NAME> <<EMAIL>>
*/
#ifndef XCAM_FISHEYE_IMAGE_FILE_H
#define XCAM_FISHEYE_IMAGE_FILE_H
#include <xcam_std.h>
#include <image_file.h>
#include <video_buffer.h>
namespace XCam {
class FisheyeImageFile
: public ImageFile
{
public:
FisheyeImageFile ();
virtual ~FisheyeImageFile ();
virtual XCamReturn read_buf (const SmartPtr<VideoBuffer> &buf);
bool set_fisheye_num (uint32_t num);
void set_img_size (uint32_t width, uint32_t height);
void set_center (float cx, float cy, uint32_t idx = 0);
void set_roi_radius (uint32_t roi_radius, uint32_t idx = 0);
private:
XCAM_DEAD_COPY (FisheyeImageFile);
bool gen_roi_pos (uint32_t idx);
XCamReturn read_roi (const SmartPtr<VideoBuffer> &buf, uint32_t idx);
enum { FISHEYE_MAX_NUM = 2 };
private:
uint32_t _fisheye_num;
uint32_t _img_w;
uint32_t _img_h;
float _cx[FISHEYE_MAX_NUM];
float _cy[FISHEYE_MAX_NUM];
uint32_t _roi_radius[FISHEYE_MAX_NUM];
uint32_t *_x_min[FISHEYE_MAX_NUM];
uint32_t *_x_max[FISHEYE_MAX_NUM];
bool _update_roi_pos[FISHEYE_MAX_NUM];
};
}
#endif // XCAM_FISHEYE_IMAGE_FILE_H
| 806 |
13,653 | <reponame>cihan-demir/NineMensMorris
import pytest
from unittest import mock
import torch # noqa I201
from mlagents.torch_utils import set_torch_config, default_device
from mlagents.trainers.settings import TorchSettings
@pytest.mark.parametrize(
"device_str, expected_type, expected_index, expected_tensor_type",
[
("cpu", "cpu", None, torch.FloatTensor),
("cuda", "cuda", None, torch.cuda.FloatTensor),
("cuda:42", "cuda", 42, torch.cuda.FloatTensor),
("opengl", "opengl", None, torch.FloatTensor),
],
)
@mock.patch.object(torch, "set_default_tensor_type")
def test_set_torch_device(
mock_set_default_tensor_type,
device_str,
expected_type,
expected_index,
expected_tensor_type,
):
try:
torch_settings = TorchSettings(device=device_str)
set_torch_config(torch_settings)
assert default_device().type == expected_type
if expected_index is None:
assert default_device().index is None
else:
assert default_device().index == expected_index
mock_set_default_tensor_type.assert_called_once_with(expected_tensor_type)
except Exception:
raise
finally:
# restore the defaults
torch_settings = TorchSettings(device=None)
set_torch_config(torch_settings)
| 548 |
1,062 | // -*- mode: ObjC -*-
// This file is part of class-dump, a utility for examining the Objective-C segment of Mach-O files.
// Copyright (C) 1997-1998, 2000-2001, 2004-2012 <NAME>.
#import <Foundation/Foundation.h>
@class CDType;
@interface CDOCProperty : NSObject
- (id)initWithName:(NSString *)name attributes:(NSString *)attributes;
@property (readonly) NSString *name;
@property (readonly) NSString *attributeString;
@property (readonly) CDType *type;
@property (readonly) NSArray *attributes;
@property (strong) NSString *attributeStringAfterType;
@property (nonatomic, readonly) NSString *defaultGetter;
@property (nonatomic, readonly) NSString *defaultSetter;
@property (strong) NSString *customGetter;
@property (strong) NSString *customSetter;
@property (nonatomic, readonly) NSString *getter;
@property (nonatomic, readonly) NSString *setter;
@property (readonly) BOOL isReadOnly;
@property (readonly) BOOL isDynamic;
- (NSComparisonResult)ascendingCompareByName:(CDOCProperty *)otherProperty;
@end
| 326 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.