Datasets:
repo_name
stringlengths 4
116
| path
stringlengths 3
942
| size
stringlengths 1
7
| content
stringlengths 3
1.05M
| license
stringclasses 15
values |
---|---|---|---|---|
Socratacom/socrata-europe | wp-content/themes/sage/node_modules/asset-builder/node_modules/main-bower-files/node_modules/vinyl-fs/node_modules/glob-stream/node_modules/unique-stream/node_modules/es6-set/polyfill.js | 2730 | 'use strict';
var clear = require('es5-ext/array/#/clear')
, eIndexOf = require('es5-ext/array/#/e-index-of')
, setPrototypeOf = require('es5-ext/object/set-prototype-of')
, callable = require('es5-ext/object/valid-callable')
, d = require('d')
, ee = require('event-emitter')
, Symbol = require('es6-symbol')
, iterator = require('es6-iterator/valid-iterable')
, forOf = require('es6-iterator/for-of')
, Iterator = require('./lib/iterator')
, isNative = require('./is-native-implemented')
, call = Function.prototype.call, defineProperty = Object.defineProperty
, SetPoly, getValues;
module.exports = SetPoly = function (/*iterable*/) {
var iterable = arguments[0];
if (!(this instanceof SetPoly)) return new SetPoly(iterable);
if (this.__setData__ !== undefined) {
throw new TypeError(this + " cannot be reinitialized");
}
if (iterable != null) iterator(iterable);
defineProperty(this, '__setData__', d('c', []));
if (!iterable) return;
forOf(iterable, function (value) {
if (eIndexOf.call(this, value) !== -1) return;
this.push(value);
}, this.__setData__);
};
if (isNative) {
if (setPrototypeOf) setPrototypeOf(SetPoly, Set);
SetPoly.prototype = Object.create(Set.prototype, {
constructor: d(SetPoly)
});
}
ee(Object.defineProperties(SetPoly.prototype, {
add: d(function (value) {
if (this.has(value)) return this;
this.emit('_add', this.__setData__.push(value) - 1, value);
return this;
}),
clear: d(function () {
if (!this.__setData__.length) return;
clear.call(this.__setData__);
this.emit('_clear');
}),
delete: d(function (value) {
var index = eIndexOf.call(this.__setData__, value);
if (index === -1) return false;
this.__setData__.splice(index, 1);
this.emit('_delete', index, value);
return true;
}),
entries: d(function () { return new Iterator(this, 'key+value'); }),
forEach: d(function (cb/*, thisArg*/) {
var thisArg = arguments[1], iterator, result, value;
callable(cb);
iterator = this.values();
result = iterator._next();
while (result !== undefined) {
value = iterator._resolve(result);
call.call(cb, thisArg, value, value, this);
result = iterator._next();
}
}),
has: d(function (value) {
return (eIndexOf.call(this.__setData__, value) !== -1);
}),
keys: d(getValues = function () { return this.values(); }),
size: d.gs(function () { return this.__setData__.length; }),
values: d(function () { return new Iterator(this); }),
toString: d(function () { return '[object Set]'; })
}));
defineProperty(SetPoly.prototype, Symbol.iterator, d(getValues));
defineProperty(SetPoly.prototype, Symbol.toStringTag, d('c', 'Set'));
| gpl-2.0 |
mshick/velvet | core/classes/data.js | 266 | 'use strict';
const TYPE = Symbol.for('type');
class Data {
constructor(options) {
// File details
this.filepath = options.filepath;
// Type
this[TYPE] = 'data';
// Data
Object.assign(this, options.data);
}
}
module.exports = Data;
| isc |
GoKillers/libsodium-go | sodium/runtime.go | 322 | package sodium
// #cgo pkg-config: libsodium
// #include <stdlib.h>
// #include <sodium.h>
import "C"
func RuntimeHasNeon() bool {
return C.sodium_runtime_has_neon() != 0
}
func RuntimeHasSse2() bool {
return C.sodium_runtime_has_sse2() != 0
}
func RuntimeHasSse3() bool {
return C.sodium_runtime_has_sse3() != 0
}
| isc |
burninggarden/burninggarden | test/unit/resource/accounts.js | 886 |
function collectWithWildcard(test) {
test.expect(4);
var api_server = new Test_ApiServer(function handler(request, callback) {
var url = request.url;
switch (url) {
case '/accounts?username=chariz*':
let account = new Model_Account({
username: 'charizard'
});
return void callback(null, [
account.redact()
]);
default:
let error = new Error('Invalid url: ' + url);
return void callback(error);
}
});
var parameters = {
username: 'chariz*'
};
function handler(error, results) {
test.equals(error, null);
test.equals(results.length, 1);
var account = results[0];
test.equals(account.get('username'), 'charizard');
test.equals(account.get('type'), Enum_AccountTypes.MEMBER);
api_server.destroy();
test.done();
}
Resource_Accounts.collect(parameters, handler);
}
module.exports = {
collectWithWildcard
};
| isc |
guide42/php-immutable | src/base.php | 165 | <?php
interface Container {
/**
* Checks if a $x exists.
*
* @param unknown $x
*
* @return boolean
*/
function contains($x);
} | isc |
markokr/libusual | usual/json.c | 38477 | /*
* Read and write JSON.
*
* Copyright (c) 2014 Marko Kreen
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include <usual/json.h>
#include <usual/cxextra.h>
#include <usual/cbtree.h>
#include <usual/misc.h>
#include <usual/utf8.h>
#include <usual/ctype.h>
#include <usual/bytemap.h>
#include <usual/string.h>
#include <math.h>
#define TYPE_BITS 3
#define TYPE_MASK ((1 << TYPE_BITS) - 1)
#define UNATTACHED ((struct JsonValue *)(1 << TYPE_BITS))
#define JSON_MAX_KEY (1024*1024)
#define NUMBER_BUF 100
#define JSON_MAXINT ((1LL << 53) - 1)
#define JSON_MININT (-(1LL << 53) + 1)
/*
* Common struct for all JSON values
*/
struct JsonValue {
/* actual value for simple types */
union {
double v_float; /* float */
int64_t v_int; /* int */
bool v_bool; /* bool */
size_t v_size; /* str/list/dict */
} u;
/* pointer to next elem and type in low bits */
uintptr_t v_next_and_type;
};
/*
* List container.
*/
struct ValueList {
struct JsonValue *first;
struct JsonValue *last;
struct JsonValue **array;
};
/*
* Extra data for list/dict.
*/
struct JsonContainer {
/* parent container */
struct JsonValue *c_parent;
/* main context for child alloc */
struct JsonContext *c_ctx;
/* child elements */
union {
struct CBTree *c_dict;
struct ValueList c_list;
} u;
};
#define DICT_EXTRA (offsetof(struct JsonContainer, u.c_dict) + sizeof(struct CBTree *))
#define LIST_EXTRA (sizeof(struct JsonContainer))
/*
* Allocation context.
*/
struct JsonContext {
CxMem *pool;
unsigned int options;
/* parse state */
struct JsonValue *parent;
struct JsonValue *cur_key;
struct JsonValue *top;
const char *lasterr;
char errbuf[128];
int64_t linenr;
};
struct RenderState {
struct MBuf *dst;
unsigned int options;
};
/*
* Parser states
*/
enum ParseState {
S_INITIAL_VALUE = 1,
S_LIST_VALUE,
S_LIST_VALUE_OR_CLOSE,
S_LIST_COMMA_OR_CLOSE,
S_DICT_KEY,
S_DICT_KEY_OR_CLOSE,
S_DICT_COLON,
S_DICT_VALUE,
S_DICT_COMMA_OR_CLOSE,
S_PARENT,
S_DONE,
MAX_STATES,
};
/*
* Tokens that change state.
*/
enum TokenTypes {
T_STRING,
T_OTHER,
T_COMMA,
T_COLON,
T_OPEN_DICT,
T_OPEN_LIST,
T_CLOSE_DICT,
T_CLOSE_LIST,
MAX_TOKENS
};
/*
* 4-byte ints for small string tokens.
*/
#define C_NULL FOURCC('n','u','l','l')
#define C_TRUE FOURCC('t','r','u','e')
#define C_ALSE FOURCC('a','l','s','e')
/*
* Signature for render functions.
*/
typedef bool (*render_func_t)(struct RenderState *rs, struct JsonValue *jv);
static bool render_any(struct RenderState *rs, struct JsonValue *jv);
/*
* Header manipulation
*/
static inline enum JsonValueType get_type(struct JsonValue *jv)
{
return jv->v_next_and_type & TYPE_MASK;
}
static inline bool has_type(struct JsonValue *jv, enum JsonValueType type)
{
if (!jv)
return false;
return get_type(jv) == type;
}
static inline struct JsonValue *get_next(struct JsonValue *jv)
{
return (struct JsonValue *)(jv->v_next_and_type & ~(uintptr_t)TYPE_MASK);
}
static inline void set_next(struct JsonValue *jv, struct JsonValue *next)
{
jv->v_next_and_type = (uintptr_t)next | get_type(jv);
}
static inline bool is_unattached(struct JsonValue *jv)
{
return get_next(jv) == UNATTACHED;
}
static inline void *get_extra(struct JsonValue *jv)
{
return (void *)(jv + 1);
}
static inline char *get_cstring(struct JsonValue *jv)
{
enum JsonValueType type = get_type(jv);
if (type != JSON_STRING)
return NULL;
return get_extra(jv);
}
/*
* Collection header manipulation.
*/
static inline struct JsonContainer *get_container(struct JsonValue *jv)
{
enum JsonValueType type = get_type(jv);
if (type != JSON_DICT && type != JSON_LIST)
return NULL;
return get_extra(jv);
}
static inline void set_parent(struct JsonValue *jv, struct JsonValue *parent)
{
struct JsonContainer *c = get_container(jv);
if (c)
c->c_parent = parent;
}
static inline struct JsonContext *get_context(struct JsonValue *jv)
{
struct JsonContainer *c = get_container(jv);
return c ? c->c_ctx : NULL;
}
static inline struct CBTree *get_dict_tree(struct JsonValue *jv)
{
struct JsonContainer *c;
if (has_type(jv, JSON_DICT)) {
c = get_container(jv);
return c->u.c_dict;
}
return NULL;
}
static inline struct ValueList *get_list_vlist(struct JsonValue *jv)
{
struct JsonContainer *c;
if (has_type(jv, JSON_LIST)) {
c = get_container(jv);
return &c->u.c_list;
}
return NULL;
}
/*
* Random helpers
*/
/* copy and return final pointer */
static inline char *plain_copy(char *dst, const char *src, const char *endptr)
{
if (src < endptr) {
memcpy(dst, src, endptr - src);
return dst + (endptr - src);
}
return dst;
}
/* error message on context */
_PRINTF(2,0)
static void format_err(struct JsonContext *ctx, const char *errmsg, va_list ap)
{
char buf[119];
if (ctx->lasterr)
return;
vsnprintf(buf, sizeof(buf), errmsg, ap);
snprintf(ctx->errbuf, sizeof(ctx->errbuf), "Line #%" PRIi64 ": %s", ctx->linenr, buf);
ctx->lasterr = ctx->errbuf;
}
/* set message and return false */
_PRINTF(2,3)
static bool err_false(struct JsonContext *ctx, const char *errmsg, ...)
{
va_list ap;
va_start(ap, errmsg);
format_err(ctx, errmsg, ap);
va_end(ap);
return false;
}
/* set message and return NULL */
_PRINTF(2,3)
static void *err_null(struct JsonContext *ctx, const char *errmsg, ...)
{
va_list ap;
va_start(ap, errmsg);
format_err(ctx, errmsg, ap);
va_end(ap);
return NULL;
}
/* callback for cbtree, returns key bytes */
static size_t get_key_data_cb(void *dictptr, void *keyptr, const void **dst_p)
{
struct JsonValue *key = keyptr;
*dst_p = get_cstring(key);
return key->u.v_size;
}
/* add elemnt to list */
static void real_list_append(struct JsonValue *list, struct JsonValue *elem)
{
struct ValueList *vlist;
vlist = get_list_vlist(list);
if (vlist->last) {
set_next(vlist->last, elem);
} else {
vlist->first = elem;
}
vlist->last = elem;
vlist->array = NULL;
list->u.v_size++;
}
/* add key to tree */
static bool real_dict_add_key(struct JsonContext *ctx, struct JsonValue *dict, struct JsonValue *key)
{
struct CBTree *tree;
tree = get_dict_tree(dict);
if (!tree)
return err_false(ctx, "Expect dict");
if (json_value_size(key) > JSON_MAX_KEY)
return err_false(ctx, "Too large key");
dict->u.v_size++;
if (!cbtree_insert(tree, key))
return err_false(ctx, "Key insertion failed");
return true;
}
/* create basic value struct, link to stuctures */
static struct JsonValue *mk_value(struct JsonContext *ctx, enum JsonValueType type, size_t extra, bool attach)
{
struct JsonValue *val;
struct JsonContainer *col = NULL;
if (!ctx)
return NULL;
val = cx_alloc(ctx->pool, sizeof(struct JsonValue) + extra);
if (!val)
return err_null(ctx, "No memory");
if ((uintptr_t)val & TYPE_MASK)
return err_null(ctx, "Unaligned pointer");
/* initial value */
val->v_next_and_type = type;
val->u.v_int = 0;
if (type == JSON_DICT || type == JSON_LIST) {
col = get_container(val);
col->c_ctx = ctx;
col->c_parent = NULL;
if (type == JSON_DICT) {
col->u.c_dict = cbtree_create(get_key_data_cb, NULL, val, ctx->pool);
if (!col->u.c_dict)
return err_null(ctx, "No memory");
} else {
memset(&col->u.c_list, 0, sizeof(col->u.c_list));
}
}
/* independent JsonValue? */
if (!attach) {
set_next(val, UNATTACHED);
return val;
}
/* attach to parent */
if (col)
col->c_parent = ctx->parent;
/* attach to previous value */
if (has_type(ctx->parent, JSON_DICT)) {
if (ctx->cur_key) {
set_next(ctx->cur_key, val);
ctx->cur_key = NULL;
} else {
ctx->cur_key = val;
}
} else if (has_type(ctx->parent, JSON_LIST)) {
real_list_append(ctx->parent, val);
} else if (!ctx->top) {
ctx->top = val;
} else {
return err_null(ctx, "Only one top element is allowed");
}
return val;
}
static void prepare_array(struct JsonValue *list)
{
struct JsonContainer *c;
struct JsonValue *val;
struct ValueList *vlist;
size_t i;
vlist = get_list_vlist(list);
if (vlist->array)
return;
c = get_container(list);
vlist->array = cx_alloc(c->c_ctx->pool, list->u.v_size * sizeof(struct JsonValue *));
if (!vlist->array)
return;
val = vlist->first;
for (i = 0; i < list->u.v_size && val; i++) {
vlist->array[i] = val;
val = get_next(val);
}
}
/*
* Parsing code starts
*/
/* create and change context */
static bool open_container(struct JsonContext *ctx, enum JsonValueType type, unsigned int extra)
{
struct JsonValue *jv;
jv = mk_value(ctx, type, extra, true);
if (!jv)
return false;
ctx->parent = jv;
ctx->cur_key = NULL;
return true;
}
/* close and change context */
static enum ParseState close_container(struct JsonContext *ctx, enum ParseState state)
{
struct JsonContainer *c;
if (state != S_PARENT)
return (int)err_false(ctx, "close_container bug");
c = get_container(ctx->parent);
if (!c)
return (int)err_false(ctx, "invalid parent");
ctx->parent = c->c_parent;
ctx->cur_key = NULL;
if (has_type(ctx->parent, JSON_DICT)) {
return S_DICT_COMMA_OR_CLOSE;
} else if (has_type(ctx->parent, JSON_LIST)) {
return S_LIST_COMMA_OR_CLOSE;
}
return S_DONE;
}
/* parse 4-char token */
static bool parse_char4(struct JsonContext *ctx, const char **src_p, const char *end,
uint32_t t_exp, enum JsonValueType type, bool val)
{
const char *src;
uint32_t t_got;
struct JsonValue *jv;
src = *src_p;
if (src + 4 > end)
return err_false(ctx, "Unexpected end of token");
memcpy(&t_got, src, 4);
if (t_exp != t_got)
return err_false(ctx, "Invalid token");
jv = mk_value(ctx, type, 0, true);
if (!jv)
return false;
jv->u.v_bool = val;
*src_p += 4;
return true;
}
/* parse int or float */
static bool parse_number(struct JsonContext *ctx, const char **src_p, const char *end)
{
const char *start, *src;
enum JsonValueType type = JSON_INT;
char *tokend = NULL;
char buf[NUMBER_BUF];
size_t len;
struct JsonValue *jv;
double v_float = 0;
int64_t v_int = 0;
/* scan & copy */
start = src = *src_p;
for (; src < end; src++) {
if (*src >= '0' && *src <= '9') {
} else if (*src == '+' || *src == '-') {
} else if (*src == '.' || *src == 'e' || *src == 'E') {
type = JSON_FLOAT;
} else {
break;
}
}
len = src - start;
if (len >= NUMBER_BUF)
goto failed;
memcpy(buf, start, len);
buf[len] = 0;
/* now parse */
errno = 0;
tokend = buf;
if (type == JSON_FLOAT) {
v_float = strtod_dot(buf, &tokend);
if (*tokend != 0 || errno || !isfinite(v_float))
goto failed;
} else if (len < 8) {
v_int = strtol(buf, &tokend, 10);
if (*tokend != 0 || errno)
goto failed;
} else {
v_int = strtoll(buf, &tokend, 10);
if (*tokend != 0 || errno || v_int < JSON_MININT || v_int > JSON_MAXINT)
goto failed;
}
/* create value struct */
jv = mk_value(ctx, type, 0, true);
if (!jv)
return false;
if (type == JSON_FLOAT) {
jv->u.v_float = v_float;
} else {
jv->u.v_int = v_int;
}
*src_p = src;
return true;
failed:
if (!errno)
errno = EINVAL;
return err_false(ctx, "Number parse failed");
}
/*
* String parsing
*/
static int parse_hex(const char *s, const char *end)
{
int v = 0, c, i, x;
if (s + 4 > end)
return -1;
for (i = 0; i < 4; i++) {
c = s[i];
if (c >= '0' && c <= '9') {
x = c - '0';
} else if (c >= 'a' && c <= 'f') {
x = c - 'a' + 10;
} else if (c >= 'A' && c <= 'F') {
x = c - 'A' + 10;
} else {
return -1;
}
v = (v << 4) | x;
}
return v;
}
/* process \uXXXX escapes, merge surrogates */
static bool parse_uescape(struct JsonContext *ctx, char **dst_p, char *dstend,
const char **src_p, const char *end)
{
int c, c2;
const char *src = *src_p;
c = parse_hex(src, end);
if (c <= 0)
return err_false(ctx, "Invalid hex escape");
src += 4;
if (c >= 0xD800 && c <= 0xDFFF) {
/* first surrogate */
if (c >= 0xDC00)
return err_false(ctx, "Invalid UTF16 escape");
if (src + 6 > end)
return err_false(ctx, "Invalid UTF16 escape");
/* second surrogate */
if (src[0] != '\\' || src[1] != 'u')
return err_false(ctx, "Invalid UTF16 escape");
c2 = parse_hex(src + 2, end);
if (c2 < 0xDC00 || c2 > 0xDFFF)
return err_false(ctx, "Invalid UTF16 escape");
c = 0x10000 + ((c & 0x3FF) << 10) + (c2 & 0x3FF);
src += 6;
}
/* now write char */
if (!utf8_put_char(c, dst_p, dstend))
return err_false(ctx, "Invalid UTF16 escape");
*src_p = src;
return true;
}
#define meta_string(c) (((c) == '"' || (c) == '\\' || (c) == '\0' || \
(c) == '\n' || ((c) & 0x80) != 0) ? 1 : 0)
static const uint8_t string_examine_chars[] = INTMAP256_CONST(meta_string);
/* look for string end, validate contents */
static bool scan_string(struct JsonContext *ctx, const char *src, const char *end,
const char **str_end_p, bool *hasesc_p, int64_t *nlines_p)
{
bool hasesc = false;
int64_t lines = 0;
unsigned int n;
bool check_utf8 = true;
if (ctx->options & JSON_PARSE_IGNORE_ENCODING)
check_utf8 = false;
while (src < end) {
if (!string_examine_chars[(uint8_t)*src]) {
src++;
} else if (*src == '"') {
/* string end */
*hasesc_p = hasesc;
*str_end_p = src;
*nlines_p = lines;
return true;
} else if (*src == '\\') {
hasesc = true;
src++;
if (src < end && (*src == '\\' || *src == '"'))
src++;
} else if (*src & 0x80) {
n = utf8_validate_seq(src, end);
if (n) {
src += n;
} else if (check_utf8) {
goto badutf;
} else {
src++;
}
} else if (*src == '\n') {
lines++;
src++;
} else {
goto badutf;
}
}
return err_false(ctx, "Unexpected end of string");
badutf:
return err_false(ctx, "Invalid UTF8 sequence");
}
/* string boundaries are known, copy and unescape */
static char *process_escapes(struct JsonContext *ctx,
const char *src, const char *end,
char *dst, char *dstend)
{
const char *esc;
/* process escapes */
while (src < end) {
esc = memchr(src, '\\', end - src);
if (!esc) {
dst = plain_copy(dst, src, end);
break;
}
dst = plain_copy(dst, src, esc);
src = esc + 1;
switch (*src++) {
case '"': *dst++ = '"'; break;
case '\\': *dst++ = '\\'; break;
case '/': *dst++ = '/'; break;
case 'b': *dst++ = '\b'; break;
case 'f': *dst++ = '\f'; break;
case 'n': *dst++ = '\n'; break;
case 'r': *dst++ = '\r'; break;
case 't': *dst++ = '\t'; break;
case 'u':
if (!parse_uescape(ctx, &dst, dstend, &src, end))
return NULL;
break;
default:
return err_null(ctx, "Invalid escape code");
}
}
return dst;
}
/* 2-phase string processing */
static bool parse_string(struct JsonContext *ctx, const char **src_p, const char *end)
{
const char *start, *strend = NULL;
bool hasesc = false;
char *dst, *dstend;
size_t len;
struct JsonValue *jv;
int64_t lines = 0;
/* find string boundaries, validate */
start = *src_p;
if (!scan_string(ctx, start, end, &strend, &hasesc, &lines))
return false;
/* create value struct */
len = strend - start;
jv = mk_value(ctx, JSON_STRING, len + 1, true);
if (!jv)
return false;
dst = get_cstring(jv);
dstend = dst + len;
/* copy & process escapes */
if (hasesc) {
dst = process_escapes(ctx, start, strend, dst, dstend);
if (!dst)
return false;
} else {
dst = plain_copy(dst, start, strend);
}
*dst = '\0';
jv->u.v_size = dst - get_cstring(jv);
ctx->linenr += lines;
*src_p = strend + 1;
return true;
}
/*
* Helpers for relaxed parsing
*/
static bool skip_comment(struct JsonContext *ctx, const char **src_p, const char *end)
{
const char *s, *start;
char c;
size_t lnr;
s = start = *src_p;
if (s >= end)
return false;
c = *s++;
if (c == '/') {
s = memchr(s, '\n', end - s);
if (s) {
ctx->linenr++;
*src_p = s + 1;
} else {
*src_p = end;
}
return true;
} else if (c == '*') {
for (lnr = 0; s + 2 <= end; s++) {
if (s[0] == '*' && s[1] == '/') {
ctx->linenr += lnr;
*src_p = s + 2;
return true;
} else if (s[0] == '\n') {
lnr++;
}
}
}
return false;
}
static bool skip_extra_comma(struct JsonContext *ctx, const char **src_p, const char *end, enum ParseState state)
{
bool skip = false;
const char *src = *src_p;
while (src < end && isspace(*src)) {
if (*src == '\n')
ctx->linenr++;
src++;
}
if (src < end) {
if (*src == '}') {
if (state == S_DICT_COMMA_OR_CLOSE || state == S_DICT_KEY_OR_CLOSE)
skip = true;
} else if (*src == ']') {
if (state == S_LIST_COMMA_OR_CLOSE || state == S_LIST_VALUE_OR_CLOSE)
skip = true;
}
}
*src_p = src;
return skip;
}
/*
* Main parser
*/
/* oldstate + token -> newstate */
static const unsigned char STATE_STEPS[MAX_STATES][MAX_TOKENS] = {
[S_INITIAL_VALUE] = {
[T_OPEN_LIST] = S_LIST_VALUE_OR_CLOSE,
[T_OPEN_DICT] = S_DICT_KEY_OR_CLOSE,
[T_STRING] = S_DONE,
[T_OTHER] = S_DONE },
[S_LIST_VALUE] = {
[T_OPEN_LIST] = S_LIST_VALUE_OR_CLOSE,
[T_OPEN_DICT] = S_DICT_KEY_OR_CLOSE,
[T_STRING] = S_LIST_COMMA_OR_CLOSE,
[T_OTHER] = S_LIST_COMMA_OR_CLOSE },
[S_LIST_VALUE_OR_CLOSE] = {
[T_OPEN_LIST] = S_LIST_VALUE_OR_CLOSE,
[T_OPEN_DICT] = S_DICT_KEY_OR_CLOSE,
[T_STRING] = S_LIST_COMMA_OR_CLOSE,
[T_OTHER] = S_LIST_COMMA_OR_CLOSE,
[T_CLOSE_LIST] = S_PARENT },
[S_LIST_COMMA_OR_CLOSE] = {
[T_COMMA] = S_LIST_VALUE,
[T_CLOSE_LIST] = S_PARENT },
[S_DICT_KEY] = {
[T_STRING] = S_DICT_COLON },
[S_DICT_KEY_OR_CLOSE] = {
[T_STRING] = S_DICT_COLON,
[T_CLOSE_DICT] = S_PARENT },
[S_DICT_COLON] = {
[T_COLON] = S_DICT_VALUE },
[S_DICT_VALUE] = {
[T_OPEN_LIST] = S_LIST_VALUE_OR_CLOSE,
[T_OPEN_DICT] = S_DICT_KEY_OR_CLOSE,
[T_STRING] = S_DICT_COMMA_OR_CLOSE,
[T_OTHER] = S_DICT_COMMA_OR_CLOSE },
[S_DICT_COMMA_OR_CLOSE] = {
[T_COMMA] = S_DICT_KEY,
[T_CLOSE_DICT] = S_PARENT },
};
#define MAPSTATE(state, tok) do { \
int newstate = STATE_STEPS[state][tok]; \
if (!newstate) \
return err_false(ctx, "Unexpected symbol: '%c'", c); \
state = newstate; \
} while (0)
/* actual parser */
static bool parse_tokens(struct JsonContext *ctx, const char *src, const char *end)
{
char c;
enum ParseState state = S_INITIAL_VALUE;
bool relaxed = ctx->options & JSON_PARSE_RELAXED;
while (src < end) {
c = *src++;
switch (c) {
case '\n':
ctx->linenr++;
case ' ': case '\t': case '\r': case '\f': case '\v':
/* common case - many spaces */
while (src < end && *src == ' ') src++;
break;
case '"':
MAPSTATE(state, T_STRING);
if (!parse_string(ctx, &src, end))
goto failed;
break;
case 'n':
MAPSTATE(state, T_OTHER);
src--;
if (!parse_char4(ctx, &src, end, C_NULL, JSON_NULL, 0))
goto failed;
continue;
case 't':
MAPSTATE(state, T_OTHER);
src--;
if (!parse_char4(ctx, &src, end, C_TRUE, JSON_BOOL, 1))
goto failed;
break;
case 'f':
MAPSTATE(state, T_OTHER);
if (!parse_char4(ctx, &src, end, C_ALSE, JSON_BOOL, 0))
goto failed;
break;
case '-':
case '0': case '1': case '2': case '3': case '4':
case '5': case '6': case '7': case '8': case '9':
MAPSTATE(state, T_OTHER);
src--;
if (!parse_number(ctx, &src, end))
goto failed;
break;
case '[':
MAPSTATE(state, T_OPEN_LIST);
if (!open_container(ctx, JSON_LIST, LIST_EXTRA))
goto failed;
break;
case '{':
MAPSTATE(state, T_OPEN_DICT);
if (!open_container(ctx, JSON_DICT, DICT_EXTRA))
goto failed;
break;
case ']':
MAPSTATE(state, T_CLOSE_LIST);
state = close_container(ctx, state);
if (!state)
goto failed;
break;
case '}':
MAPSTATE(state, T_CLOSE_DICT);
state = close_container(ctx, state);
if (!state)
goto failed;
break;
case ':':
MAPSTATE(state, T_COLON);
if (!real_dict_add_key(ctx, ctx->parent, ctx->cur_key))
goto failed;
break;
case ',':
if (relaxed && skip_extra_comma(ctx, &src, end, state))
continue;
MAPSTATE(state, T_COMMA);
break;
case '/':
if (relaxed && skip_comment(ctx, &src, end))
continue;
/* fallthrough */
default:
return err_false(ctx, "Invalid symbol: '%c'", c);
}
}
if (state != S_DONE)
return err_false(ctx, "Container still open");
return true;
failed:
return false;
}
/* parser public api */
struct JsonValue *json_parse(struct JsonContext *ctx, const char *json, size_t len)
{
const char *end = json + len;
/* reset parser */
ctx->linenr = 1;
ctx->parent = NULL;
ctx->cur_key = NULL;
ctx->lasterr = NULL;
ctx->top = NULL;
if (!parse_tokens(ctx, json, end))
return NULL;
return ctx->top;
}
/*
* Render value as JSON string.
*/
static bool render_null(struct RenderState *rs, struct JsonValue *jv)
{
return mbuf_write(rs->dst, "null", 4);
}
static bool render_bool(struct RenderState *rs, struct JsonValue *jv)
{
if (jv->u.v_bool)
return mbuf_write(rs->dst, "true", 4);
return mbuf_write(rs->dst, "false", 5);
}
static bool render_int(struct RenderState *rs, struct JsonValue *jv)
{
char buf[NUMBER_BUF];
int len;
len = snprintf(buf, sizeof(buf), "%" PRIi64, jv->u.v_int);
if (len < 0 || len >= NUMBER_BUF)
return false;
return mbuf_write(rs->dst, buf, len);
}
static bool render_float(struct RenderState *rs, struct JsonValue *jv)
{
char buf[NUMBER_BUF + 2];
int len;
len = dtostr_dot(buf, NUMBER_BUF, jv->u.v_float);
if (len < 0 || len >= NUMBER_BUF)
return false;
if (!memchr(buf, '.', len) && !memchr(buf, 'e', len)) {
buf[len++] = '.';
buf[len++] = '0';
}
return mbuf_write(rs->dst, buf, len);
}
static bool escape_char(struct MBuf *dst, unsigned int c)
{
char ec;
char buf[10];
/* start escape */
if (!mbuf_write_byte(dst, '\\'))
return false;
/* escape same char */
if (c == '"' || c == '\\')
return mbuf_write_byte(dst, c);
/* low-ascii mess */
switch (c) {
case '\b': ec = 'b'; break;
case '\f': ec = 'f'; break;
case '\n': ec = 'n'; break;
case '\r': ec = 'r'; break;
case '\t': ec = 't'; break;
default:
snprintf(buf, sizeof(buf), "u%04x", c);
return mbuf_write(dst, buf, 5);
}
return mbuf_write_byte(dst, ec);
}
static bool render_string(struct RenderState *rs, struct JsonValue *jv)
{
const char *s, *last;
const char *val = get_cstring(jv);
size_t len = jv->u.v_size;
const char *end = val + len;
unsigned int c;
/* start quote */
if (!mbuf_write_byte(rs->dst, '"'))
return false;
for (s = last = val; s < end; s++) {
if (*s == '"' || *s == '\\' || (unsigned char)*s < 0x20 ||
/* Valid in JSON, but not in JS:
\u2028 - Line separator
\u2029 - Paragraph separator */
((unsigned char)s[0] == 0xE2 && (unsigned char)s[1] == 0x80 &&
((unsigned char)s[2] == 0xA8 || (unsigned char)s[2] == 0xA9)))
{
/* flush */
if (last < s) {
if (!mbuf_write(rs->dst, last, s - last))
return false;
}
if ((unsigned char)s[0] == 0xE2) {
c = 0x2028 + ((unsigned char)s[2] - 0xA8);
last = s + 3;
} else {
c = (unsigned char)*s;
last = s + 1;
}
/* output escaped char */
if (!escape_char(rs->dst, c))
return false;
}
}
/* flush */
if (last < s) {
if (!mbuf_write(rs->dst, last, s - last))
return false;
}
/* final quote */
if (!mbuf_write_byte(rs->dst, '"'))
return false;
return true;
}
/*
* Render complex values
*/
struct ElemWriterState {
struct RenderState *rs;
char sep;
};
static bool list_elem_writer(void *arg, struct JsonValue *elem)
{
struct ElemWriterState *state = arg;
if (state->sep && !mbuf_write_byte(state->rs->dst, state->sep))
return false;
state->sep = ',';
return render_any(state->rs, elem);
}
static bool render_list(struct RenderState *rs, struct JsonValue *list)
{
struct ElemWriterState state;
state.rs = rs;
state.sep = 0;
if (!mbuf_write_byte(rs->dst, '['))
return false;
if (!json_list_iter(list, list_elem_writer, &state))
return false;
if (!mbuf_write_byte(rs->dst, ']'))
return false;
return true;
}
static bool dict_elem_writer(void *ctx, struct JsonValue *key, struct JsonValue *val)
{
struct ElemWriterState *state = ctx;
if (state->sep && !mbuf_write_byte(state->rs->dst, state->sep))
return false;
state->sep = ',';
if (!render_any(state->rs, key))
return false;
if (!mbuf_write_byte(state->rs->dst, ':'))
return false;
return render_any(state->rs, val);
}
static bool render_dict(struct RenderState *rs, struct JsonValue *dict)
{
struct ElemWriterState state;
state.rs = rs;
state.sep = 0;
if (!mbuf_write_byte(rs->dst, '{'))
return false;
if (!json_dict_iter(dict, dict_elem_writer, &state))
return false;
if (!mbuf_write_byte(rs->dst, '}'))
return false;
return true;
}
static bool render_invalid(struct RenderState *rs, struct JsonValue *jv)
{
return false;
}
/*
* Public api
*/
static bool render_any(struct RenderState *rs, struct JsonValue *jv)
{
static const render_func_t rfunc_map[] = {
render_invalid, render_null, render_bool, render_int,
render_float, render_string, render_list, render_dict,
};
return rfunc_map[get_type(jv)](rs, jv);
}
bool json_render(struct MBuf *dst, struct JsonValue *jv)
{
struct RenderState rs;
rs.dst = dst;
rs.options = 0;
return render_any(&rs, jv);
}
/*
* Examine single value
*/
enum JsonValueType json_value_type(struct JsonValue *jv)
{
return get_type(jv);
}
size_t json_value_size(struct JsonValue *jv)
{
if (has_type(jv, JSON_STRING) ||
has_type(jv, JSON_LIST) ||
has_type(jv, JSON_DICT))
return jv->u.v_size;
return 0;
}
bool json_value_as_bool(struct JsonValue *jv, bool *dst_p)
{
if (!has_type(jv, JSON_BOOL))
return false;
*dst_p = jv->u.v_bool;
return true;
}
bool json_value_as_int(struct JsonValue *jv, int64_t *dst_p)
{
if (!has_type(jv, JSON_INT))
return false;
*dst_p = jv->u.v_int;
return true;
}
bool json_value_as_float(struct JsonValue *jv, double *dst_p)
{
if (!has_type(jv, JSON_FLOAT)) {
if (has_type(jv, JSON_INT)) {
*dst_p = jv->u.v_int;
return true;
}
return false;
}
*dst_p = jv->u.v_float;
return true;
}
bool json_value_as_string(struct JsonValue *jv, const char **dst_p, size_t *size_p)
{
if (!has_type(jv, JSON_STRING))
return false;
*dst_p = get_cstring(jv);
if (size_p)
*size_p = jv->u.v_size;
return true;
}
/*
* Load value from dict.
*/
static int dict_getter(struct JsonValue *dict,
const char *key, unsigned int klen,
struct JsonValue **val_p,
enum JsonValueType req_type, bool req_value)
{
struct JsonValue *val, *kjv;
struct CBTree *tree;
tree = get_dict_tree(dict);
if (!tree)
return false;
kjv = cbtree_lookup(tree, key, klen);
if (!kjv) {
if (req_value)
return false;
*val_p = NULL;
return true;
}
val = get_next(kjv);
if (!req_value && json_value_is_null(val)) {
*val_p = NULL;
return true;
}
if (!has_type(val, req_type))
return false;
*val_p = val;
return true;
}
bool json_dict_get_value(struct JsonValue *dict, const char *key, struct JsonValue **val_p)
{
struct CBTree *tree;
struct JsonValue *kjv;
size_t klen;
tree = get_dict_tree(dict);
if (!tree)
return false;
klen = strlen(key);
kjv = cbtree_lookup(tree, key, klen);
if (!kjv)
return false;
*val_p = get_next(kjv);
return true;
}
bool json_dict_is_null(struct JsonValue *dict, const char *key)
{
struct JsonValue *val;
if (!json_dict_get_value(dict, key, &val))
return true;
return has_type(val, JSON_NULL);
}
bool json_dict_get_bool(struct JsonValue *dict, const char *key, bool *dst_p)
{
struct JsonValue *val;
if (!dict_getter(dict, key, strlen(key), &val, JSON_BOOL, true))
return false;
return json_value_as_bool(val, dst_p);
}
bool json_dict_get_int(struct JsonValue *dict, const char *key, int64_t *dst_p)
{
struct JsonValue *val;
if (!dict_getter(dict, key, strlen(key), &val, JSON_INT, true))
return false;
return json_value_as_int(val, dst_p);
}
bool json_dict_get_float(struct JsonValue *dict, const char *key, double *dst_p)
{
struct JsonValue *val;
if (!dict_getter(dict, key, strlen(key), &val, JSON_FLOAT, true))
return false;
return json_value_as_float(val, dst_p);
}
bool json_dict_get_string(struct JsonValue *dict, const char *key, const char **dst_p, size_t *len_p)
{
struct JsonValue *val;
if (!dict_getter(dict, key, strlen(key), &val, JSON_STRING, true))
return false;
return json_value_as_string(val, dst_p, len_p);
}
bool json_dict_get_list(struct JsonValue *dict, const char *key, struct JsonValue **dst_p)
{
return dict_getter(dict, key, strlen(key), dst_p, JSON_LIST, true);
}
bool json_dict_get_dict(struct JsonValue *dict, const char *key, struct JsonValue **dst_p)
{
return dict_getter(dict, key, strlen(key), dst_p, JSON_DICT, true);
}
/*
* Load optional dict element.
*/
bool json_dict_get_opt_bool(struct JsonValue *dict, const char *key, bool *dst_p)
{
struct JsonValue *val;
if (!dict_getter(dict, key, strlen(key), &val, JSON_BOOL, false))
return false;
return !val || json_value_as_bool(val, dst_p);
}
bool json_dict_get_opt_int(struct JsonValue *dict, const char *key, int64_t *dst_p)
{
struct JsonValue *val;
if (!dict_getter(dict, key, strlen(key), &val, JSON_INT, false))
return false;
return !val || json_value_as_int(val, dst_p);
}
bool json_dict_get_opt_float(struct JsonValue *dict, const char *key, double *dst_p)
{
struct JsonValue *val;
if (!dict_getter(dict, key, strlen(key), &val, JSON_FLOAT, false))
return false;
return !val || json_value_as_float(val, dst_p);
}
bool json_dict_get_opt_string(struct JsonValue *dict, const char *key, const char **dst_p, size_t *len_p)
{
struct JsonValue *val;
if (!dict_getter(dict, key, strlen(key), &val, JSON_STRING, false))
return false;
return !val || json_value_as_string(val, dst_p, len_p);
}
bool json_dict_get_opt_list(struct JsonValue *dict, const char *key, struct JsonValue **dst_p)
{
struct JsonValue *val;
if (!dict_getter(dict, key, strlen(key), &val, JSON_LIST, false))
return false;
if (val)
*dst_p = val;
return true;
}
bool json_dict_get_opt_dict(struct JsonValue *dict, const char *key, struct JsonValue **dst_p)
{
struct JsonValue *val;
if (!dict_getter(dict, key, strlen(key), &val, JSON_DICT, false))
return false;
if (val)
*dst_p = val;
return true;
}
/*
* Load value from list.
*/
bool json_list_get_value(struct JsonValue *list, size_t index, struct JsonValue **val_p)
{
struct JsonValue *val;
struct ValueList *vlist;
size_t i;
vlist = get_list_vlist(list);
if (!vlist)
return false;
if (index >= list->u.v_size)
return false;
if (!vlist->array && list->u.v_size > 10)
prepare_array(list);
/* direct fetch */
if (vlist->array) {
*val_p = vlist->array[index];
return true;
}
/* walk */
val = vlist->first;
for (i = 0; val; i++) {
if (i == index) {
*val_p = val;
return true;
}
val = get_next(val);
}
return false;
}
bool json_list_is_null(struct JsonValue *list, size_t n)
{
struct JsonValue *jv;
if (!json_list_get_value(list, n, &jv))
return true;
return has_type(jv, JSON_NULL);
}
bool json_list_get_bool(struct JsonValue *list, size_t index, bool *val_p)
{
struct JsonValue *jv;
if (!json_list_get_value(list, index, &jv))
return false;
return json_value_as_bool(jv, val_p);
}
bool json_list_get_int(struct JsonValue *list, size_t index, int64_t *val_p)
{
struct JsonValue *jv;
if (!json_list_get_value(list, index, &jv))
return false;
return json_value_as_int(jv, val_p);
}
bool json_list_get_float(struct JsonValue *list, size_t index, double *val_p)
{
struct JsonValue *jv;
if (!json_list_get_value(list, index, &jv))
return false;
return json_value_as_float(jv, val_p);
}
bool json_list_get_string(struct JsonValue *list, size_t index, const char **val_p, size_t *len_p)
{
struct JsonValue *jv;
if (!json_list_get_value(list, index, &jv))
return false;
return json_value_as_string(jv, val_p, len_p);
}
bool json_list_get_list(struct JsonValue *list, size_t index, struct JsonValue **val_p)
{
struct JsonValue *jv;
if (!json_list_get_value(list, index, &jv))
return false;
if (!has_type(jv, JSON_LIST))
return false;
*val_p = jv;
return true;
}
bool json_list_get_dict(struct JsonValue *list, size_t index, struct JsonValue **val_p)
{
struct JsonValue *jv;
if (!json_list_get_value(list, index, &jv))
return false;
if (!has_type(jv, JSON_DICT))
return false;
*val_p = jv;
return true;
}
/*
* Iterate over list and dict values.
*/
struct DictIterState {
json_dict_iter_callback_f cb_func;
void *cb_arg;
};
static bool dict_iter_helper(void *arg, void *jv)
{
struct DictIterState *state = arg;
struct JsonValue *key = jv;
struct JsonValue *val = get_next(key);
return state->cb_func(state->cb_arg, key, val);
}
bool json_dict_iter(struct JsonValue *dict, json_dict_iter_callback_f cb_func, void *cb_arg)
{
struct DictIterState state;
struct CBTree *tree;
tree = get_dict_tree(dict);
if (!tree)
return false;
state.cb_func = cb_func;
state.cb_arg = cb_arg;
return cbtree_walk(tree, dict_iter_helper, &state);
}
bool json_list_iter(struct JsonValue *list, json_list_iter_callback_f cb_func, void *cb_arg)
{
struct JsonValue *elem;
struct ValueList *vlist;
vlist = get_list_vlist(list);
if (!vlist)
return false;
for (elem = vlist->first; elem; elem = get_next(elem)) {
if (!cb_func(cb_arg, elem))
return false;
}
return true;
}
/*
* Create new values.
*/
struct JsonValue *json_new_null(struct JsonContext *ctx)
{
return mk_value(ctx, JSON_NULL, 0, false);
}
struct JsonValue *json_new_bool(struct JsonContext *ctx, bool val)
{
struct JsonValue *jv;
jv = mk_value(ctx, JSON_BOOL, 0, false);
if (jv)
jv->u.v_bool = val;
return jv;
}
struct JsonValue *json_new_int(struct JsonContext *ctx, int64_t val)
{
struct JsonValue *jv;
if (val < JSON_MININT || val > JSON_MAXINT) {
errno = ERANGE;
return NULL;
}
jv = mk_value(ctx, JSON_INT, 0, false);
if (jv)
jv->u.v_int = val;
return jv;
}
struct JsonValue *json_new_float(struct JsonContext *ctx, double val)
{
struct JsonValue *jv;
/* check if value survives JSON roundtrip */
if (!isfinite(val))
return false;
jv = mk_value(ctx, JSON_FLOAT, 0, false);
if (jv)
jv->u.v_float = val;
return jv;
}
struct JsonValue *json_new_string(struct JsonContext *ctx, const char *val)
{
struct JsonValue *jv;
size_t len;
len = strlen(val);
if (!utf8_validate_string(val, val + len))
return NULL;
jv = mk_value(ctx, JSON_STRING, len + 1, false);
if (jv) {
memcpy(get_cstring(jv), val, len + 1);
jv->u.v_size = len;
}
return jv;
}
struct JsonValue *json_new_list(struct JsonContext *ctx)
{
return mk_value(ctx, JSON_LIST, LIST_EXTRA, false);
}
struct JsonValue *json_new_dict(struct JsonContext *ctx)
{
return mk_value(ctx, JSON_DICT, DICT_EXTRA, false);
}
/*
* Add to containers
*/
bool json_list_append(struct JsonValue *list, struct JsonValue *val)
{
if (!val)
return false;
if (!has_type(list, JSON_LIST))
return false;
if (!is_unattached(val))
return false;
set_parent(val, list);
set_next(val, NULL);
real_list_append(list, val);
return true;
}
bool json_list_append_null(struct JsonValue *list)
{
struct JsonValue *v;
v = json_new_null(get_context(list));
return json_list_append(list, v);
}
bool json_list_append_bool(struct JsonValue *list, bool val)
{
struct JsonValue *v;
v = json_new_bool(get_context(list), val);
return json_list_append(list, v);
}
bool json_list_append_int(struct JsonValue *list, int64_t val)
{
struct JsonValue *v;
v = json_new_int(get_context(list), val);
return json_list_append(list, v);
}
bool json_list_append_float(struct JsonValue *list, double val)
{
struct JsonValue *v;
v = json_new_float(get_context(list), val);
return json_list_append(list, v);
}
bool json_list_append_string(struct JsonValue *list, const char *val)
{
struct JsonValue *v;
v = json_new_string(get_context(list), val);
return json_list_append(list, v);
}
bool json_dict_put(struct JsonValue *dict, const char *key, struct JsonValue *val)
{
struct JsonValue *kjv;
struct JsonContainer *c;
if (!key || !val)
return false;
if (!has_type(dict, JSON_DICT))
return false;
if (!is_unattached(val))
return false;
c = get_container(dict);
kjv = json_new_string(c->c_ctx, key);
if (!kjv)
return false;
if (!real_dict_add_key(c->c_ctx, dict, kjv))
return false;
set_next(kjv, val);
set_next(val, NULL);
set_parent(val, dict);
return true;
}
bool json_dict_put_null(struct JsonValue *dict, const char *key)
{
struct JsonValue *v;
v = json_new_null(get_context(dict));
return json_dict_put(dict, key, v);
}
bool json_dict_put_bool(struct JsonValue *dict, const char *key, bool val)
{
struct JsonValue *v;
v = json_new_bool(get_context(dict), val);
return json_dict_put(dict, key, v);
}
bool json_dict_put_int(struct JsonValue *dict, const char *key, int64_t val)
{
struct JsonValue *v;
v = json_new_int(get_context(dict), val);
return json_dict_put(dict, key, v);
}
bool json_dict_put_float(struct JsonValue *dict, const char *key, double val)
{
struct JsonValue *v;
v = json_new_float(get_context(dict), val);
return json_dict_put(dict, key, v);
}
bool json_dict_put_string(struct JsonValue *dict, const char *key, const char *val)
{
struct JsonValue *v;
v = json_new_string(get_context(dict), val);
return json_dict_put(dict, key, v);
}
/*
* Main context management
*/
struct JsonContext *json_new_context(const void *cx, size_t initial_mem)
{
struct JsonContext *ctx;
CxMem *pool;
pool = cx_new_pool(cx, initial_mem, 8);
if (!pool)
return NULL;
ctx = cx_alloc0(pool, sizeof(*ctx));
if (!ctx) {
cx_destroy(pool);
return NULL;
}
ctx->pool = pool;
return ctx;
}
void json_free_context(struct JsonContext *ctx)
{
if (ctx) {
CxMem *pool = ctx->pool;
memset(ctx, 0, sizeof(*ctx));
cx_destroy(pool);
}
}
const char *json_strerror(struct JsonContext *ctx)
{
return ctx->lasterr;
}
void json_set_options(struct JsonContext *ctx, unsigned int options)
{
ctx->options = options;
}
| isc |
pikachumetal/cursoangular05 | app/loginModule/services/localstorage.js | 515 | angular.module('appTesting').service("LoginLocalStorage", function () {
"use strict";
var STORE_NAME = "login";
var setUser = function setUser(user) {
localStorage.setItem(STORE_NAME, JSON.stringify(user));
}
var getUser = function getUser() {
var storedTasks = localStorage.getItem(STORE_NAME);
if (storedTasks) {
return JSON.parse(storedTasks);
}
return {};
}
return {
setUser: setUser,
getUser: getUser
}
}); | isc |
arssivka/naomech | xmlrpc-c/src/xmlrpc_build.c | 12744 | /* Copyright information is at end of file */
#include "xmlrpc_config.h"
#include <stddef.h>
#include <stdlib.h>
#include <stdarg.h>
#include <string.h>
#include "stdargx.h"
#include "xmlrpc-c/base.h"
#include "xmlrpc-c/base_int.h"
#include "xmlrpc-c/string_int.h"
static void
getString(xmlrpc_env *const envP,
const char **const formatP,
va_listx *const argsP,
xmlrpc_value **const valPP) {
const char *str;
size_t len;
str = (const char *) va_arg(argsP->v, char*);
if (*(*formatP) == '#') {
++(*formatP);
len = (size_t) va_arg(argsP->v, size_t);
} else
len = strlen(str);
*valPP = xmlrpc_string_new_lp(envP, len, str);
}
static void
getWideString(xmlrpc_env *const envP ATTR_UNUSED,
const char **const formatP ATTR_UNUSED,
va_listx *const argsP ATTR_UNUSED,
xmlrpc_value **const valPP ATTR_UNUSED) {
#if HAVE_UNICODE_WCHAR
wchar_t *wcs;
size_t len;
wcs = (wchar_t*) va_arg(argsP->v, wchar_t*);
if (**formatP == '#') {
(*formatP)++;
len = (size_t) va_arg(argsP->v, size_t);
} else
len = wcslen(wcs);
*valPP = xmlrpc_string_w_new_lp(envP, len, wcs);
#endif /* HAVE_UNICODE_WCHAR */
}
static void
getBase64(xmlrpc_env *const envP,
va_listx *const argsP,
xmlrpc_value **const valPP) {
unsigned char *value;
size_t length;
value = (unsigned char *) va_arg(argsP->v, unsigned char*);
length = (size_t) va_arg(argsP->v, size_t);
*valPP = xmlrpc_base64_new(envP, length, value);
}
static void
getValue(xmlrpc_env *const envP,
const char **const format,
va_listx *const argsP,
xmlrpc_value **const valPP);
static void
getArray(xmlrpc_env *const envP,
const char **const formatP,
char const delimiter,
va_listx *const argsP,
xmlrpc_value **const arrayPP) {
xmlrpc_value *arrayP;
arrayP = xmlrpc_array_new(envP);
/* Add items to the array until we hit our delimiter. */
while (**formatP != delimiter && !envP->fault_occurred) {
xmlrpc_value *itemP;
if (**formatP == '\0')
xmlrpc_env_set_fault(
envP, XMLRPC_INTERNAL_ERROR,
"format string ended before closing ')'.");
else {
getValue(envP, formatP, argsP, &itemP);
if (!envP->fault_occurred) {
xmlrpc_array_append_item(envP, arrayP, itemP);
xmlrpc_DECREF(itemP);
}
}
}
if (envP->fault_occurred)
xmlrpc_DECREF(arrayP);
*arrayPP = arrayP;
}
static void
getStructMember(xmlrpc_env *const envP,
const char **const formatP,
va_listx *const argsP,
xmlrpc_value **const keyPP,
xmlrpc_value **const valuePP) {
/* Get the key */
getValue(envP, formatP, argsP, keyPP);
if (!envP->fault_occurred) {
if (**formatP != ':')
xmlrpc_env_set_fault(
envP, XMLRPC_INTERNAL_ERROR,
"format string does not have ':' after a "
"structure member key.");
else {
/* Skip over colon that separates key from value */
(*formatP)++;
/* Get the value */
getValue(envP, formatP, argsP, valuePP);
}
if (envP->fault_occurred)
xmlrpc_DECREF(*keyPP);
}
}
static void
getStruct(xmlrpc_env *const envP,
const char **const formatP,
char const delimiter,
va_listx *const argsP,
xmlrpc_value **const structPP) {
xmlrpc_value *structP;
structP = xmlrpc_struct_new(envP);
if (!envP->fault_occurred) {
while (**formatP != delimiter && !envP->fault_occurred) {
xmlrpc_value *keyP;
xmlrpc_value *valueP;
getStructMember(envP, formatP, argsP, &keyP, &valueP);
if (!envP->fault_occurred) {
if (**formatP == ',')
(*formatP)++; /* Skip over the comma */
else if (**formatP == delimiter) {
/* End of the line */
} else
xmlrpc_env_set_fault(
envP, XMLRPC_INTERNAL_ERROR,
"format string does not have ',' or ')' after "
"a structure member");
if (!envP->fault_occurred)
/* Add the new member to the struct. */
xmlrpc_struct_set_value_v(envP, structP, keyP, valueP);
xmlrpc_DECREF(valueP);
xmlrpc_DECREF(keyP);
}
}
if (envP->fault_occurred)
xmlrpc_DECREF(structP);
}
*structPP = structP;
}
static void
mkArrayFromVal(xmlrpc_env *const envP,
xmlrpc_value *const value,
xmlrpc_value **const valPP) {
if (xmlrpc_value_type(value) != XMLRPC_TYPE_ARRAY)
xmlrpc_env_set_fault(envP, XMLRPC_INTERNAL_ERROR,
"Array format ('A'), non-array xmlrpc_value");
else
xmlrpc_INCREF(value);
*valPP = value;
}
static void
mkStructFromVal(xmlrpc_env *const envP,
xmlrpc_value *const value,
xmlrpc_value **const valPP) {
if (xmlrpc_value_type(value) != XMLRPC_TYPE_STRUCT)
xmlrpc_env_set_fault(envP, XMLRPC_INTERNAL_ERROR,
"Struct format ('S'), non-struct xmlrpc_value");
else
xmlrpc_INCREF(value);
*valPP = value;
}
static void
getValue(xmlrpc_env *const envP,
const char **const formatP,
va_listx *const argsP,
xmlrpc_value **const valPP) {
/*----------------------------------------------------------------------------
Get the next value from the list. *formatP points to the specifier
for the next value in the format string (i.e. to the type code
character) and we move *formatP past the whole specifier for the
next value. We read the required arguments from 'argsP'. We return
the value as *valPP with a reference to it.
For example, if *formatP points to the "i" in the string "sis",
we read one argument from 'argsP' and return as *valP an integer whose
value is the argument we read. We advance *formatP to point to the
last 's' and advance 'argsP' to point to the argument that belongs to
that 's'.
-----------------------------------------------------------------------------*/
char const formatChar = *(*formatP)++;
switch (formatChar) {
case 'i':
*valPP =
xmlrpc_int_new(envP, (xmlrpc_int32) va_arg(argsP->v,
xmlrpc_int32));
break;
case 'b':
*valPP =
xmlrpc_bool_new(envP, (xmlrpc_bool) va_arg(argsP->v,
xmlrpc_bool));
break;
case 'd':
*valPP =
xmlrpc_double_new(envP, (double) va_arg(argsP->v, double));
break;
case 's':
getString(envP, formatP, argsP, valPP);
break;
case 'w':
getWideString(envP, formatP, argsP, valPP);
break;
case 't':
*valPP = xmlrpc_datetime_new_sec(envP, va_arg(argsP->v, time_t));
break;
case '8':
*valPP = xmlrpc_datetime_new_str(envP, va_arg(argsP->v, char*));
break;
case '6':
getBase64(envP, argsP, valPP);
break;
case 'n':
*valPP =
xmlrpc_nil_new(envP);
break;
case 'I':
*valPP =
xmlrpc_i8_new(envP, (xmlrpc_int64) va_arg(argsP->v,
xmlrpc_int64));
break;
case 'p':
*valPP =
xmlrpc_cptr_new(envP, (void *) va_arg(argsP->v, void*));
break;
case 'A':
mkArrayFromVal(envP,
(xmlrpc_value *) va_arg(argsP->v, xmlrpc_value*),
valPP);
break;
case 'S':
mkStructFromVal(envP,
(xmlrpc_value *) va_arg(argsP->v, xmlrpc_value*),
valPP);
break;
case 'V':
*valPP = (xmlrpc_value *) va_arg(argsP->v, xmlrpc_value*);
xmlrpc_INCREF(*valPP);
break;
case '(':
getArray(envP, formatP, ')', argsP, valPP);
if (!envP->fault_occurred) {
XMLRPC_ASSERT(**formatP == ')');
(*formatP)++; /* Skip over closing parenthesis */
}
break;
case '{':
getStruct(envP, formatP, '}', argsP, valPP);
if (!envP->fault_occurred) {
XMLRPC_ASSERT(**formatP == '}');
(*formatP)++; /* Skip over closing brace */
}
break;
default: {
const char *const badCharacter = xmlrpc_makePrintableChar(
formatChar);
xmlrpc_env_set_fault_formatted(
envP, XMLRPC_INTERNAL_ERROR,
"Unexpected character '%s' in format string", badCharacter);
xmlrpc_strfree(badCharacter);
}
}
}
void
xmlrpc_build_value_va(xmlrpc_env *const envP,
const char *const format,
va_list const args,
xmlrpc_value **const valPP,
const char **const tailP) {
XMLRPC_ASSERT_ENV_OK(envP);
XMLRPC_ASSERT(format != NULL);
if (strlen(format) == 0)
xmlrpc_faultf(envP, "Format string is empty.");
else {
va_listx currentArgs;
const char *formatCursor;
init_va_listx(¤tArgs, args);
formatCursor = &format[0];
getValue(envP, &formatCursor, ¤tArgs, valPP);
if (!envP->fault_occurred)
XMLRPC_ASSERT_VALUE_OK(*valPP);
*tailP = formatCursor;
}
}
xmlrpc_value *
xmlrpc_build_value(xmlrpc_env *const envP,
const char *const format,
...) {
va_list args;
xmlrpc_value *retval;
const char *suffix;
va_start(args, format);
xmlrpc_build_value_va(envP, format, args, &retval, &suffix);
va_end(args);
if (!envP->fault_occurred) {
if (*suffix != '\0')
xmlrpc_faultf(envP, "Junk after the format specifier: '%s'. "
"The format string must describe exactly "
"one XML-RPC value "
"(but it might be a compound value "
"such as an array)",
suffix);
if (envP->fault_occurred)
xmlrpc_DECREF(retval);
}
return retval;
}
/* Copyright (C) 2001 by First Peer, Inc. All rights reserved.
** Copyright (C) 2001 by Eric Kidd. All rights reserved.
**
** Redistribution and use in source and binary forms, with or without
** modification, are permitted provided that the following conditions
** are met:
** 1. Redistributions of source code must retain the above copyright
** notice, this list of conditions and the following disclaimer.
** 2. Redistributions in binary form must reproduce the above copyright
** notice, this list of conditions and the following disclaimer in the
** documentation and/or other materials provided with the distribution.
** 3. The name of the author may not be used to endorse or promote products
** derived from this software without specific prior written permission.
**
** THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
** ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
** IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
** ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
** FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
** DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
** OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
** HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
** LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
** OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
** SUCH DAMAGE. */
| isc |
julien-noblet/cad-killer | flow-typed/npm/ua-parser-js_vx.x.x.js | 1660 | // flow-typed signature: d37503430b92ad584be6e2c6f8d1fc08
// flow-typed version: <<STUB>>/ua-parser-js_v1.0.2/flow_v0.171.0
/**
* This is an autogenerated libdef stub for:
*
* 'ua-parser-js'
*
* Fill this stub out by replacing all the `any` types.
*
* Once filled out, we encourage you to share your work with the
* community by sending a pull request to:
* https://github.com/flowtype/flow-typed
*/
declare module 'ua-parser-js' {
declare module.exports: any;
}
/**
* We include stubs for each file inside this npm package in case you need to
* require those files directly. Feel free to delete any files that aren't
* needed.
*/
declare module 'ua-parser-js/dist/ua-parser.min' {
declare module.exports: any;
}
declare module 'ua-parser-js/dist/ua-parser.pack' {
declare module.exports: any;
}
declare module 'ua-parser-js/package' {
declare module.exports: any;
}
declare module 'ua-parser-js/src/ua-parser' {
declare module.exports: any;
}
declare module 'ua-parser-js/test/test' {
declare module.exports: any;
}
// Filename aliases
declare module 'ua-parser-js/dist/ua-parser.min.js' {
declare module.exports: $Exports<'ua-parser-js/dist/ua-parser.min'>;
}
declare module 'ua-parser-js/dist/ua-parser.pack.js' {
declare module.exports: $Exports<'ua-parser-js/dist/ua-parser.pack'>;
}
declare module 'ua-parser-js/package.js' {
declare module.exports: $Exports<'ua-parser-js/package'>;
}
declare module 'ua-parser-js/src/ua-parser.js' {
declare module.exports: $Exports<'ua-parser-js/src/ua-parser'>;
}
declare module 'ua-parser-js/test/test.js' {
declare module.exports: $Exports<'ua-parser-js/test/test'>;
}
| isc |
skarnet/s6-networking | src/sbearssl/sbearssl_skey_to.c | 438 | /* ISC license. */
#include <bearssl.h>
#include <s6-networking/sbearssl.h>
int sbearssl_skey_to (sbearssl_skey const *l, br_skey *k, char *s)
{
switch (l->type)
{
case BR_KEYTYPE_RSA :
sbearssl_rsa_skey_to(&l->data.rsa, &k->data.rsa, s) ;
break ;
case BR_KEYTYPE_EC :
sbearssl_ec_skey_to(&l->data.ec, &k->data.ec, s) ;
break ;
default :
return 0 ;
}
k->type = l->type ;
return 1 ;
}
| isc |
sauban/angular-boilerplate | vendor/dropzone/dropzone.css | 12777 | /*
* The MIT License
* Copyright (c) 2012 Matias Meno <[email protected]>
*/
@-webkit-keyframes passing-through {
0% {
opacity: 0;
-webkit-transform: translateY(40px);
-moz-transform: translateY(40px);
-ms-transform: translateY(40px);
-o-transform: translateY(40px);
transform: translateY(40px);
}
30%,
70% {
opacity: 1;
-webkit-transform: translateY(0px);
-moz-transform: translateY(0px);
-ms-transform: translateY(0px);
-o-transform: translateY(0px);
transform: translateY(0px);
}
100% {
opacity: 0;
-webkit-transform: translateY(-40px);
-moz-transform: translateY(-40px);
-ms-transform: translateY(-40px);
-o-transform: translateY(-40px);
transform: translateY(-40px);
}
}
@-moz-keyframes passing-through {
0% {
opacity: 0;
-webkit-transform: translateY(40px);
-moz-transform: translateY(40px);
-ms-transform: translateY(40px);
-o-transform: translateY(40px);
transform: translateY(40px);
}
30%,
70% {
opacity: 1;
-webkit-transform: translateY(0px);
-moz-transform: translateY(0px);
-ms-transform: translateY(0px);
-o-transform: translateY(0px);
transform: translateY(0px);
}
100% {
opacity: 0;
-webkit-transform: translateY(-40px);
-moz-transform: translateY(-40px);
-ms-transform: translateY(-40px);
-o-transform: translateY(-40px);
transform: translateY(-40px);
}
}
@keyframes passing-through {
0% {
opacity: 0;
-webkit-transform: translateY(40px);
-moz-transform: translateY(40px);
-ms-transform: translateY(40px);
-o-transform: translateY(40px);
transform: translateY(40px);
}
30%,
70% {
opacity: 1;
-webkit-transform: translateY(0px);
-moz-transform: translateY(0px);
-ms-transform: translateY(0px);
-o-transform: translateY(0px);
transform: translateY(0px);
}
100% {
opacity: 0;
-webkit-transform: translateY(-40px);
-moz-transform: translateY(-40px);
-ms-transform: translateY(-40px);
-o-transform: translateY(-40px);
transform: translateY(-40px);
}
}
@-webkit-keyframes slide-in {
0% {
opacity: 0;
-webkit-transform: translateY(40px);
-moz-transform: translateY(40px);
-ms-transform: translateY(40px);
-o-transform: translateY(40px);
transform: translateY(40px);
}
30% {
opacity: 1;
-webkit-transform: translateY(0px);
-moz-transform: translateY(0px);
-ms-transform: translateY(0px);
-o-transform: translateY(0px);
transform: translateY(0px);
}
}
@-moz-keyframes slide-in {
0% {
opacity: 0;
-webkit-transform: translateY(40px);
-moz-transform: translateY(40px);
-ms-transform: translateY(40px);
-o-transform: translateY(40px);
transform: translateY(40px);
}
30% {
opacity: 1;
-webkit-transform: translateY(0px);
-moz-transform: translateY(0px);
-ms-transform: translateY(0px);
-o-transform: translateY(0px);
transform: translateY(0px);
}
}
@keyframes slide-in {
0% {
opacity: 0;
-webkit-transform: translateY(40px);
-moz-transform: translateY(40px);
-ms-transform: translateY(40px);
-o-transform: translateY(40px);
transform: translateY(40px);
}
30% {
opacity: 1;
-webkit-transform: translateY(0px);
-moz-transform: translateY(0px);
-ms-transform: translateY(0px);
-o-transform: translateY(0px);
transform: translateY(0px);
}
}
@-webkit-keyframes pulse {
0% {
-webkit-transform: scale(1);
-moz-transform: scale(1);
-ms-transform: scale(1);
-o-transform: scale(1);
transform: scale(1);
}
10% {
-webkit-transform: scale(1.1);
-moz-transform: scale(1.1);
-ms-transform: scale(1.1);
-o-transform: scale(1.1);
transform: scale(1.1);
}
20% {
-webkit-transform: scale(1);
-moz-transform: scale(1);
-ms-transform: scale(1);
-o-transform: scale(1);
transform: scale(1);
}
}
@-moz-keyframes pulse {
0% {
-webkit-transform: scale(1);
-moz-transform: scale(1);
-ms-transform: scale(1);
-o-transform: scale(1);
transform: scale(1);
}
10% {
-webkit-transform: scale(1.1);
-moz-transform: scale(1.1);
-ms-transform: scale(1.1);
-o-transform: scale(1.1);
transform: scale(1.1);
}
20% {
-webkit-transform: scale(1);
-moz-transform: scale(1);
-ms-transform: scale(1);
-o-transform: scale(1);
transform: scale(1);
}
}
@keyframes pulse {
0% {
-webkit-transform: scale(1);
-moz-transform: scale(1);
-ms-transform: scale(1);
-o-transform: scale(1);
transform: scale(1);
}
10% {
-webkit-transform: scale(1.1);
-moz-transform: scale(1.1);
-ms-transform: scale(1.1);
-o-transform: scale(1.1);
transform: scale(1.1);
}
20% {
-webkit-transform: scale(1);
-moz-transform: scale(1);
-ms-transform: scale(1);
-o-transform: scale(1);
transform: scale(1);
}
}
.dropzone,
.dropzone * {
box-sizing: border-box;
}
.dropzone {
min-height: 150px;
height: 100%;
border: 2px dashed #0087F7;
border-radius: 5px;
background: white;
padding: 20px 20px;
}
.dropzone.dz-clickable {
cursor: pointer;
}
.dropzone.dz-clickable * {
cursor: default;
}
.dropzone.dz-clickable .dz-message,
.dropzone.dz-clickable .dz-message * {
cursor: pointer;
}
.dropzone.dz-started .dz-message {
display: none;
}
.dropzone.dz-drag-hover {
border-style: solid;
}
.dropzone.dz-drag-hover .dz-message {
opacity: 0.5;
}
.dropzone .dz-message {
text-align: center;
margin: 2em 0;
}
.dropzone .dz-preview {
position: relative;
display: inline-block;
vertical-align: top;
margin: 16px;
min-height: 100px;
}
.dropzone .dz-preview:hover {
z-index: 1000;
}
.dropzone .dz-preview:hover .dz-details {
opacity: 1;
}
.dropzone .dz-preview.dz-file-preview .dz-image {
border-radius: 20px;
background: #999;
background: linear-gradient(to bottom, #eee, #ddd);
}
.dropzone .dz-preview.dz-file-preview .dz-details {
opacity: 1;
}
.dropzone .dz-preview.dz-image-preview {
background: white;
}
.dropzone .dz-preview.dz-image-preview .dz-details {
-webkit-transition: opacity 0.2s linear;
-moz-transition: opacity 0.2s linear;
-ms-transition: opacity 0.2s linear;
-o-transition: opacity 0.2s linear;
transition: opacity 0.2s linear;
}
.dropzone .dz-preview .dz-remove {
font-size: 14px;
text-align: center;
display: block;
cursor: pointer;
border: none;
}
.dropzone .dz-preview .dz-remove:hover {
text-decoration: underline;
}
.dropzone .dz-preview:hover .dz-details {
opacity: 1;
}
.dropzone .dz-preview .dz-details {
z-index: 20;
position: absolute;
top: 0;
left: 0;
opacity: 0;
font-size: 13px;
min-width: 100%;
max-width: 100%;
padding: 2em 1em;
text-align: center;
color: rgba(0, 0, 0, 0.9);
line-height: 150%;
}
.dropzone .dz-preview .dz-details .dz-size {
margin-bottom: 1em;
font-size: 16px;
}
.dropzone .dz-preview .dz-details .dz-filename {
white-space: nowrap;
}
.dropzone .dz-preview .dz-details .dz-filename:hover span {
border: 1px solid rgba(200, 200, 200, 0.8);
background-color: rgba(255, 255, 255, 0.8);
}
.dropzone .dz-preview .dz-details .dz-filename:not(:hover) {
overflow: hidden;
text-overflow: ellipsis;
}
.dropzone .dz-preview .dz-details .dz-filename:not(:hover) span {
border: 1px solid transparent;
}
.dropzone .dz-preview .dz-details .dz-filename span,
.dropzone .dz-preview .dz-details .dz-size span {
background-color: rgba(255, 255, 255, 0.4);
padding: 0 0.4em;
border-radius: 3px;
}
.dropzone .dz-preview:hover .dz-image img {
-webkit-transform: scale(1.05, 1.05);
-moz-transform: scale(1.05, 1.05);
-ms-transform: scale(1.05, 1.05);
-o-transform: scale(1.05, 1.05);
transform: scale(1.05, 1.05);
-webkit-filter: blur(8px);
filter: blur(8px);
}
.dropzone .dz-preview .dz-image {
border-radius: 20px;
overflow: hidden;
width: 120px;
height: 120px;
position: relative;
display: block;
z-index: 10;
}
.dropzone .dz-preview .dz-image img {
display: block;
}
.dropzone .dz-preview.dz-success .dz-success-mark {
-webkit-animation: passing-through 3s cubic-bezier(0.77, 0, 0.175, 1);
-moz-animation: passing-through 3s cubic-bezier(0.77, 0, 0.175, 1);
-ms-animation: passing-through 3s cubic-bezier(0.77, 0, 0.175, 1);
-o-animation: passing-through 3s cubic-bezier(0.77, 0, 0.175, 1);
animation: passing-through 3s cubic-bezier(0.77, 0, 0.175, 1);
}
.dropzone .dz-preview.dz-error .dz-error-mark {
opacity: 1;
-webkit-animation: slide-in 3s cubic-bezier(0.77, 0, 0.175, 1);
-moz-animation: slide-in 3s cubic-bezier(0.77, 0, 0.175, 1);
-ms-animation: slide-in 3s cubic-bezier(0.77, 0, 0.175, 1);
-o-animation: slide-in 3s cubic-bezier(0.77, 0, 0.175, 1);
animation: slide-in 3s cubic-bezier(0.77, 0, 0.175, 1);
}
.dropzone .dz-preview .dz-success-mark,
.dropzone .dz-preview .dz-error-mark {
pointer-events: none;
opacity: 0;
z-index: 500;
position: absolute;
display: block;
top: 50%;
left: 50%;
margin-left: -27px;
margin-top: -27px;
}
.dropzone .dz-preview .dz-success-mark svg,
.dropzone .dz-preview .dz-error-mark svg {
display: block;
width: 54px;
height: 54px;
}
.dropzone .dz-preview.dz-processing .dz-progress {
opacity: 1;
-webkit-transition: all 0.2s linear;
-moz-transition: all 0.2s linear;
-ms-transition: all 0.2s linear;
-o-transition: all 0.2s linear;
transition: all 0.2s linear;
}
.dropzone .dz-preview.dz-complete .dz-progress {
opacity: 0;
-webkit-transition: opacity 0.4s ease-in;
-moz-transition: opacity 0.4s ease-in;
-ms-transition: opacity 0.4s ease-in;
-o-transition: opacity 0.4s ease-in;
transition: opacity 0.4s ease-in;
}
.dropzone .dz-preview:not(.dz-processing) .dz-progress {
-webkit-animation: pulse 6s ease infinite;
-moz-animation: pulse 6s ease infinite;
-ms-animation: pulse 6s ease infinite;
-o-animation: pulse 6s ease infinite;
animation: pulse 6s ease infinite;
}
.dropzone .dz-preview .dz-progress {
opacity: 1;
z-index: 1000;
pointer-events: none;
position: absolute;
height: 16px;
left: 50%;
top: 50%;
margin-top: -8px;
width: 80px;
margin-left: -40px;
background: rgba(255, 255, 255, 0.9);
-webkit-transform: scale(1);
border-radius: 8px;
overflow: hidden;
}
.dropzone .dz-preview .dz-progress .dz-upload {
background: #333;
background: linear-gradient(to bottom, #666, #444);
position: absolute;
top: 0;
left: 0;
bottom: 0;
width: 0;
-webkit-transition: width 300ms ease-in-out;
-moz-transition: width 300ms ease-in-out;
-ms-transition: width 300ms ease-in-out;
-o-transition: width 300ms ease-in-out;
transition: width 300ms ease-in-out;
}
.dropzone .dz-preview.dz-error .dz-error-message {
display: block;
}
.dropzone .dz-preview.dz-error:hover .dz-error-message {
opacity: 1;
pointer-events: auto;
}
.dropzone .dz-preview .dz-error-message {
pointer-events: none;
z-index: 1000;
position: absolute;
display: block;
display: none;
opacity: 0;
-webkit-transition: opacity 0.3s ease;
-moz-transition: opacity 0.3s ease;
-ms-transition: opacity 0.3s ease;
-o-transition: opacity 0.3s ease;
transition: opacity 0.3s ease;
border-radius: 8px;
font-size: 13px;
top: 130px;
left: -10px;
width: 140px;
background: #be2626;
background: linear-gradient(to bottom, #be2626, #a92222);
padding: 0.5em 1.2em;
color: white;
}
.dropzone .dz-preview .dz-error-message:after {
content: '';
position: absolute;
top: -6px;
left: 64px;
width: 0;
height: 0;
border-left: 6px solid transparent;
border-right: 6px solid transparent;
border-bottom: 6px solid #be2626;
}
| isc |
kartta-labs/iD | build.js | 591 | /* eslint-disable no-console */
const buildData = require('./build_data');
const buildSrc = require('./build_src');
const buildCSS = require('./build_css');
let _currBuild = null;
// if called directly, do the thing.
buildAll();
function buildAll() {
if (_currBuild) return _currBuild;
return _currBuild =
Promise.resolve()
.then(() => buildCSS())
.then(() => buildData())
.then(() => buildSrc())
.then(() => _currBuild = null)
.catch((err) => {
console.error(err);
_currBuild = null;
process.exit(1);
});
}
module.exports = buildAll;
| isc |
wvuRc2/rc2client | source/rc2/model/RCWorkspaceCache.h | 639 | //
// RCWorkspaceCache.h
//
// Created by Mark Lilback on 12/12/11.
// Copyright (c) 2011 . All rights reserved.
//
#import "_RCWorkspaceCache.h"
@interface RCWorkspaceCache : _RCWorkspaceCache
//if multiple values are to be set, it best to get properties, set them, and then call setProperties
//each call to setProperties serializes a plist
@property (nonatomic, strong) NSMutableDictionary *properties;
-(id)propertyForKey:(NSString*)key;
//removes property if value is nil
-(void)setProperty:(id)value forKey:(NSString*)key;
-(BOOL)boolPropertyForKey:(NSString*)key;
-(void)setBoolProperty:(BOOL)val forKey:(NSString*)key;
@end
| isc |
damienmortini/dlib | node_modules/lottie-web/player/js/utils/text/LetterProps.js | 1212 | function LetterProps(o, sw, sc, fc, m, p) {
this.o = o;
this.sw = sw;
this.sc = sc;
this.fc = fc;
this.m = m;
this.p = p;
this._mdf = {
o: true,
sw: !!sw,
sc: !!sc,
fc: !!fc,
m: true,
p: true,
};
}
LetterProps.prototype.update = function (o, sw, sc, fc, m, p) {
this._mdf.o = false;
this._mdf.sw = false;
this._mdf.sc = false;
this._mdf.fc = false;
this._mdf.m = false;
this._mdf.p = false;
var updated = false;
if (this.o !== o) {
this.o = o;
this._mdf.o = true;
updated = true;
}
if (this.sw !== sw) {
this.sw = sw;
this._mdf.sw = true;
updated = true;
}
if (this.sc !== sc) {
this.sc = sc;
this._mdf.sc = true;
updated = true;
}
if (this.fc !== fc) {
this.fc = fc;
this._mdf.fc = true;
updated = true;
}
if (this.m !== m) {
this.m = m;
this._mdf.m = true;
updated = true;
}
if (p.length && (this.p[0] !== p[0] || this.p[1] !== p[1] || this.p[4] !== p[4] || this.p[5] !== p[5] || this.p[12] !== p[12] || this.p[13] !== p[13])) {
this.p = p;
this._mdf.p = true;
updated = true;
}
return updated;
};
| isc |
becm/mpt-solver | modules/daesolv/libinfo.c | 379 | /*!
* DASSL solver library description
*/
#include "libinfo.h"
extern void _start()
{
_library_ident("DAE solver library");
_library_task("interfaces to generic IVP solver");
_library_task("operations on data for included solvers");
_library_task("DASSL solver backend");
_library_task("RADAU solver backend");
_library_task("MEBDFI solver backend");
_exit(0);
}
| isc |
ibc/MediaSoup | worker/deps/openssl/config/archs/linux-armv4/asm_avx2/crypto/buildinf.h | 2032 | /*
* WARNING: do not edit!
* Generated by util/mkbuildinf.pl
*
* Copyright 2014-2017 The OpenSSL Project Authors. All Rights Reserved.
*
* Licensed under the OpenSSL license (the "License"). You may not use
* this file except in compliance with the License. You can obtain a copy
* in the file LICENSE in the source distribution or at
* https://www.openssl.org/source/license.html
*/
#define PLATFORM "platform: linux-armv4"
#define DATE "built on: Fri Sep 13 15:59:17 2019 UTC"
/*
* Generate compiler_flags as an array of individual characters. This is a
* workaround for the situation where CFLAGS gets too long for a C90 string
* literal
*/
static const char compiler_flags[] = {
'c','o','m','p','i','l','e','r',':',' ','.','.','/','c','o','n',
'f','i','g','/','f','a','k','e','_','g','c','c','.','p','l',' ',
'-','f','P','I','C',' ','-','p','t','h','r','e','a','d',' ','-',
'W','a',',','-','-','n','o','e','x','e','c','s','t','a','c','k',
' ','-','W','a','l','l',' ','-','O','3',' ','-','D','O','P','E',
'N','S','S','L','_','U','S','E','_','N','O','D','E','L','E','T',
'E',' ','-','D','O','P','E','N','S','S','L','_','P','I','C',' ',
'-','D','O','P','E','N','S','S','L','_','C','P','U','I','D','_',
'O','B','J',' ','-','D','O','P','E','N','S','S','L','_','B','N',
'_','A','S','M','_','M','O','N','T',' ','-','D','O','P','E','N',
'S','S','L','_','B','N','_','A','S','M','_','G','F','2','m',' ',
'-','D','S','H','A','1','_','A','S','M',' ','-','D','S','H','A',
'2','5','6','_','A','S','M',' ','-','D','S','H','A','5','1','2',
'_','A','S','M',' ','-','D','K','E','C','C','A','K','1','6','0',
'0','_','A','S','M',' ','-','D','A','E','S','_','A','S','M',' ',
'-','D','B','S','A','E','S','_','A','S','M',' ','-','D','G','H',
'A','S','H','_','A','S','M',' ','-','D','E','C','P','_','N','I',
'S','T','Z','2','5','6','_','A','S','M',' ','-','D','P','O','L',
'Y','1','3','0','5','_','A','S','M',' ','-','D','N','D','E','B',
'U','G','\0'
};
| isc |
denzp/pacman | application/lib/angular2/test/core/compiler/dynamic_component_loader_spec.js | 13760 | System.register(["angular2/test_lib", "angular2/src/test_lib/test_bed", "angular2/src/core/annotations_impl/annotations", "angular2/src/core/annotations_impl/view", "angular2/src/core/compiler/dynamic_component_loader", "angular2/src/core/compiler/element_ref", "angular2/src/directives/if", "angular2/src/render/dom/direct_dom_renderer", "angular2/src/dom/dom_adapter"], function($__export) {
"use strict";
var AsyncTestCompleter,
beforeEach,
ddescribe,
xdescribe,
describe,
el,
dispatchEvent,
expect,
iit,
inject,
beforeEachBindings,
it,
xit,
TestBed,
Component,
View,
DynamicComponentLoader,
ElementRef,
If,
DirectDomRenderer,
DOM,
ImperativeViewComponentUsingNgComponent,
ChildComp,
DynamicallyCreatedComponentService,
DynamicComp,
DynamicallyCreatedCmp,
DynamicallyLoaded,
DynamicallyLoaded2,
DynamicallyLoadedWithHostProps,
Location,
MyComp;
function main() {
describe('DynamicComponentLoader', function() {
describe("loading into existing location", (function() {
it('should work', inject([TestBed, AsyncTestCompleter], (function(tb, async) {
tb.overrideView(MyComp, new View({
template: '<dynamic-comp #dynamic></dynamic-comp>',
directives: [DynamicComp]
}));
tb.createView(MyComp).then((function(view) {
var dynamicComponent = view.rawView.locals.get("dynamic");
expect(dynamicComponent).toBeAnInstanceOf(DynamicComp);
dynamicComponent.done.then((function(_) {
view.detectChanges();
expect(view.rootNodes).toHaveText('hello');
async.done();
}));
}));
})));
it('should inject dependencies of the dynamically-loaded component', inject([TestBed, AsyncTestCompleter], (function(tb, async) {
tb.overrideView(MyComp, new View({
template: '<dynamic-comp #dynamic></dynamic-comp>',
directives: [DynamicComp]
}));
tb.createView(MyComp).then((function(view) {
var dynamicComponent = view.rawView.locals.get("dynamic");
dynamicComponent.done.then((function(ref) {
expect(ref.instance.dynamicallyCreatedComponentService).toBeAnInstanceOf(DynamicallyCreatedComponentService);
async.done();
}));
}));
})));
it('should allow to destroy and create them via viewcontainer directives', inject([TestBed, AsyncTestCompleter], (function(tb, async) {
tb.overrideView(MyComp, new View({
template: '<div><dynamic-comp #dynamic template="if: ctxBoolProp"></dynamic-comp></div>',
directives: [DynamicComp, If]
}));
tb.createView(MyComp).then((function(view) {
view.context.ctxBoolProp = true;
view.detectChanges();
var dynamicComponent = view.rawView.viewContainers[0].views[0].locals.get("dynamic");
dynamicComponent.done.then((function(_) {
view.detectChanges();
expect(view.rootNodes).toHaveText('hello');
view.context.ctxBoolProp = false;
view.detectChanges();
expect(view.rawView.viewContainers[0].views.length).toBe(0);
expect(view.rootNodes).toHaveText('');
view.context.ctxBoolProp = true;
view.detectChanges();
var dynamicComponent = view.rawView.viewContainers[0].views[0].locals.get("dynamic");
return dynamicComponent.done;
})).then((function(_) {
view.detectChanges();
expect(view.rootNodes).toHaveText('hello');
async.done();
}));
}));
})));
}));
describe("loading next to an existing location", (function() {
it('should work', inject([DynamicComponentLoader, TestBed, AsyncTestCompleter], (function(loader, tb, async) {
tb.overrideView(MyComp, new View({
template: '<div><location #loc></location></div>',
directives: [Location]
}));
tb.createView(MyComp).then((function(view) {
var location = view.rawView.locals.get("loc");
loader.loadNextToExistingLocation(DynamicallyLoaded, location.elementRef).then((function(ref) {
expect(view.rootNodes).toHaveText("Location;DynamicallyLoaded;");
async.done();
}));
}));
})));
it('should return a disposable component ref', inject([DynamicComponentLoader, TestBed, AsyncTestCompleter], (function(loader, tb, async) {
tb.overrideView(MyComp, new View({
template: '<div><location #loc></location></div>',
directives: [Location]
}));
tb.createView(MyComp).then((function(view) {
var location = view.rawView.locals.get("loc");
loader.loadNextToExistingLocation(DynamicallyLoaded, location.elementRef).then((function(ref) {
loader.loadNextToExistingLocation(DynamicallyLoaded2, location.elementRef).then((function(ref2) {
expect(view.rootNodes).toHaveText("Location;DynamicallyLoaded;DynamicallyLoaded2;");
ref2.dispose();
expect(view.rootNodes).toHaveText("Location;DynamicallyLoaded;");
async.done();
}));
}));
}));
})));
it('should update host properties', inject([DynamicComponentLoader, TestBed, AsyncTestCompleter], (function(loader, tb, async) {
tb.overrideView(MyComp, new View({
template: '<div><location #loc></location></div>',
directives: [Location]
}));
tb.createView(MyComp).then((function(view) {
var location = view.rawView.locals.get("loc");
loader.loadNextToExistingLocation(DynamicallyLoadedWithHostProps, location.elementRef).then((function(ref) {
ref.instance.id = "new value";
view.detectChanges();
var newlyInsertedElement = DOM.childNodesAsList(view.rootNodes[0])[1];
expect(newlyInsertedElement.id).toEqual("new value");
async.done();
}));
}));
})));
}));
describe('loading into a new location', (function() {
it('should allow to create, update and destroy components', inject([TestBed, AsyncTestCompleter], (function(tb, async) {
tb.overrideView(MyComp, new View({
template: '<imp-ng-cmp #impview></imp-ng-cmp>',
directives: [ImperativeViewComponentUsingNgComponent]
}));
tb.createView(MyComp).then((function(view) {
var userViewComponent = view.rawView.locals.get("impview");
userViewComponent.done.then((function(childComponentRef) {
view.detectChanges();
expect(view.rootNodes).toHaveText('hello');
childComponentRef.instance.ctxProp = 'new';
view.detectChanges();
expect(view.rootNodes).toHaveText('new');
childComponentRef.dispose();
expect(view.rootNodes).toHaveText('');
async.done();
}));
}));
})));
}));
});
}
$__export("main", main);
return {
setters: [function($__m) {
AsyncTestCompleter = $__m.AsyncTestCompleter;
beforeEach = $__m.beforeEach;
ddescribe = $__m.ddescribe;
xdescribe = $__m.xdescribe;
describe = $__m.describe;
el = $__m.el;
dispatchEvent = $__m.dispatchEvent;
expect = $__m.expect;
iit = $__m.iit;
inject = $__m.inject;
beforeEachBindings = $__m.beforeEachBindings;
it = $__m.it;
xit = $__m.xit;
}, function($__m) {
TestBed = $__m.TestBed;
}, function($__m) {
Component = $__m.Component;
}, function($__m) {
View = $__m.View;
}, function($__m) {
DynamicComponentLoader = $__m.DynamicComponentLoader;
}, function($__m) {
ElementRef = $__m.ElementRef;
}, function($__m) {
If = $__m.If;
}, function($__m) {
DirectDomRenderer = $__m.DirectDomRenderer;
}, function($__m) {
DOM = $__m.DOM;
}],
execute: function() {
ImperativeViewComponentUsingNgComponent = (function() {
var ImperativeViewComponentUsingNgComponent = function ImperativeViewComponentUsingNgComponent(self, dynamicComponentLoader, renderer) {
var div = el('<div></div>');
renderer.setImperativeComponentRootNodes(self.parentView.render, self.boundElementIndex, [div]);
this.done = dynamicComponentLoader.loadIntoNewLocation(ChildComp, self, div, null);
};
return ($traceurRuntime.createClass)(ImperativeViewComponentUsingNgComponent, {}, {});
}());
Object.defineProperty(ImperativeViewComponentUsingNgComponent, "annotations", {get: function() {
return [new Component({selector: 'imp-ng-cmp'}), new View({renderer: 'imp-ng-cmp-renderer'})];
}});
Object.defineProperty(ImperativeViewComponentUsingNgComponent, "parameters", {get: function() {
return [[ElementRef], [DynamicComponentLoader], [DirectDomRenderer]];
}});
ChildComp = (function() {
var ChildComp = function ChildComp() {
this.ctxProp = 'hello';
};
return ($traceurRuntime.createClass)(ChildComp, {}, {});
}());
Object.defineProperty(ChildComp, "annotations", {get: function() {
return [new Component({selector: 'child-cmp'}), new View({template: '{{ctxProp}}'})];
}});
DynamicallyCreatedComponentService = (function() {
var DynamicallyCreatedComponentService = function DynamicallyCreatedComponentService() {
;
};
return ($traceurRuntime.createClass)(DynamicallyCreatedComponentService, {}, {});
}());
DynamicComp = (function() {
var DynamicComp = function DynamicComp(loader, location) {
this.done = loader.loadIntoExistingLocation(DynamicallyCreatedCmp, location);
};
return ($traceurRuntime.createClass)(DynamicComp, {}, {});
}());
Object.defineProperty(DynamicComp, "annotations", {get: function() {
return [new Component({selector: 'dynamic-comp'})];
}});
Object.defineProperty(DynamicComp, "parameters", {get: function() {
return [[DynamicComponentLoader], [ElementRef]];
}});
DynamicallyCreatedCmp = (function() {
var DynamicallyCreatedCmp = function DynamicallyCreatedCmp(a) {
this.greeting = "hello";
this.dynamicallyCreatedComponentService = a;
};
return ($traceurRuntime.createClass)(DynamicallyCreatedCmp, {}, {});
}());
Object.defineProperty(DynamicallyCreatedCmp, "annotations", {get: function() {
return [new Component({
selector: 'hello-cmp',
injectables: [DynamicallyCreatedComponentService]
}), new View({template: "{{greeting}}"})];
}});
Object.defineProperty(DynamicallyCreatedCmp, "parameters", {get: function() {
return [[DynamicallyCreatedComponentService]];
}});
DynamicallyLoaded = (function() {
var DynamicallyLoaded = function DynamicallyLoaded() {
;
};
return ($traceurRuntime.createClass)(DynamicallyLoaded, {}, {});
}());
Object.defineProperty(DynamicallyLoaded, "annotations", {get: function() {
return [new Component({selector: 'dummy'}), new View({template: "DynamicallyLoaded;"})];
}});
DynamicallyLoaded2 = (function() {
var DynamicallyLoaded2 = function DynamicallyLoaded2() {
;
};
return ($traceurRuntime.createClass)(DynamicallyLoaded2, {}, {});
}());
Object.defineProperty(DynamicallyLoaded2, "annotations", {get: function() {
return [new Component({selector: 'dummy'}), new View({template: "DynamicallyLoaded2;"})];
}});
DynamicallyLoadedWithHostProps = (function() {
var DynamicallyLoadedWithHostProps = function DynamicallyLoadedWithHostProps() {
this.id = "default";
};
return ($traceurRuntime.createClass)(DynamicallyLoadedWithHostProps, {}, {});
}());
Object.defineProperty(DynamicallyLoadedWithHostProps, "annotations", {get: function() {
return [new Component({
selector: 'dummy',
hostProperties: {'id': 'id'}
}), new View({template: "DynamicallyLoadedWithHostProps;"})];
}});
Location = (function() {
var Location = function Location(elementRef) {
this.elementRef = elementRef;
};
return ($traceurRuntime.createClass)(Location, {}, {});
}());
Object.defineProperty(Location, "annotations", {get: function() {
return [new Component({selector: 'location'}), new View({template: "Location;"})];
}});
Object.defineProperty(Location, "parameters", {get: function() {
return [[ElementRef]];
}});
MyComp = (function() {
var MyComp = function MyComp() {
this.ctxBoolProp = false;
};
return ($traceurRuntime.createClass)(MyComp, {}, {});
}());
Object.defineProperty(MyComp, "annotations", {get: function() {
return [new Component({selector: 'my-comp'}), new View({directives: []})];
}});
}
};
});
//# sourceMappingURL=dynamic_component_loader_spec.es6.map
//# sourceMappingURL=./dynamic_component_loader_spec.js.map | isc |
dsusco/tp.net-armageddon.org | _site/_units/dreadnought.md | 409 | ---
name: Dreadnought
type: AV
speed: 15cm
armour: 3+
cc: 4+
ff: 4+
special_rules:
- walker
notes:
|
Armed with either a Missile Launcher and Twin Lascannon, or an Assault Cannon and Power Fist.
weapons:
-
id: missile-launcher
multiplier: 0–1
-
id: twin-lascannon
multiplier: 0–1
-
id: assault-cannon
multiplier: 0–1
-
id: power-fist
multiplier: 0–1
--- | isc |
DanielAeolusLaude/DPF-NTK | distrho/src/DistrhoPluginLV2export.cpp | 16790 | /*
* DISTRHO Plugin Framework (DPF)
* Copyright (C) 2012-2014 Filipe Coelho <[email protected]>
*
* Permission to use, copy, modify, and/or distribute this software for any purpose with
* or without fee is hereby granted, provided that the above copyright notice and this
* permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
* TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN
* NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL
* DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
* IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
* CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include "DistrhoPluginInternal.hpp"
#include "lv2/atom.h"
#include "lv2/buf-size.h"
#include "lv2/data-access.h"
#include "lv2/instance-access.h"
#include "lv2/midi.h"
#include "lv2/options.h"
#include "lv2/port-props.h"
#include "lv2/resize-port.h"
#include "lv2/state.h"
#include "lv2/time.h"
#include "lv2/ui.h"
#include "lv2/units.h"
#include "lv2/urid.h"
#include "lv2/worker.h"
#include "lv2/lv2_kxstudio_properties.h"
#include "lv2/lv2_programs.h"
#include <fstream>
#include <iostream>
#ifndef DISTRHO_PLUGIN_URI
# error DISTRHO_PLUGIN_URI undefined!
#endif
#ifndef DISTRHO_PLUGIN_MINIMUM_BUFFER_SIZE
# define DISTRHO_PLUGIN_MINIMUM_BUFFER_SIZE 2048
#endif
#define DISTRHO_LV2_USE_EVENTS_IN (DISTRHO_PLUGIN_HAS_MIDI_INPUT || DISTRHO_PLUGIN_WANT_TIMEPOS || (DISTRHO_PLUGIN_WANT_STATE && DISTRHO_PLUGIN_HAS_UI))
#define DISTRHO_LV2_USE_EVENTS_OUT (DISTRHO_PLUGIN_HAS_MIDI_OUTPUT || (DISTRHO_PLUGIN_WANT_STATE && DISTRHO_PLUGIN_HAS_UI))
// -----------------------------------------------------------------------
DISTRHO_PLUGIN_EXPORT
void lv2_generate_ttl(const char* const basename)
{
USE_NAMESPACE_DISTRHO
// Dummy plugin to get data from
d_lastBufferSize = 512;
d_lastSampleRate = 44100.0;
PluginExporter plugin;
d_lastBufferSize = 0;
d_lastSampleRate = 0.0;
d_string pluginDLL(basename);
d_string pluginTTL(pluginDLL + ".ttl");
// ---------------------------------------------
{
std::cout << "Writing manifest.ttl..."; std::cout.flush();
std::fstream manifestFile("manifest.ttl", std::ios::out);
d_string manifestString;
manifestString += "@prefix lv2: <" LV2_CORE_PREFIX "> .\n";
manifestString += "@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .\n";
#if DISTRHO_PLUGIN_HAS_UI
manifestString += "@prefix ui: <" LV2_UI_PREFIX "> .\n";
#endif
manifestString += "\n";
manifestString += "<" DISTRHO_PLUGIN_URI ">\n";
manifestString += " a lv2:Plugin ;\n";
manifestString += " lv2:binary <" + pluginDLL + "." DISTRHO_DLL_EXTENSION "> ;\n";
manifestString += " rdfs:seeAlso <" + pluginTTL + "> .\n";
manifestString += "\n";
#if DISTRHO_PLUGIN_HAS_UI
manifestString += "<" DISTRHO_UI_URI ">\n";
# if DISTRHO_OS_HAIKU
manifestString += " a ui:BeUI ;\n";
# elif DISTRHO_OS_MAC
manifestString += " a ui:CocoaUI ;\n";
# elif DISTRHO_OS_WINDOWS
manifestString += " a ui:WindowsUI ;\n";
# else
manifestString += " a ui:X11UI ;\n";
# endif
# if ! DISTRHO_PLUGIN_WANT_DIRECT_ACCESS
d_string pluginUI(pluginDLL);
pluginUI.truncate(pluginDLL.rfind("_dsp"));
pluginUI += "_ui";
manifestString += " ui:binary <" + pluginUI + "." DISTRHO_DLL_EXTENSION "> ;\n";
# else
manifestString += " ui:binary <" + pluginDLL + "." DISTRHO_DLL_EXTENSION "> ;\n";
#endif
manifestString += " lv2:extensionData ui:idleInterface ,\n";
# if DISTRHO_PLUGIN_WANT_PROGRAMS
manifestString += " ui:showInterface ,\n";
manifestString += " <" LV2_PROGRAMS__Interface "> ;\n";
# else
manifestString += " ui:showInterface ;\n";
# endif
manifestString += " lv2:optionalFeature ui:noUserResize ,\n";
manifestString += " ui:resize ,\n";
manifestString += " ui:touch ;\n";
# if DISTRHO_PLUGIN_WANT_DIRECT_ACCESS
manifestString += " lv2:requiredFeature <" LV2_DATA_ACCESS_URI "> ,\n";
manifestString += " <" LV2_INSTANCE_ACCESS_URI "> ,\n";
manifestString += " <" LV2_OPTIONS__options "> ,\n";
# else
manifestString += " lv2:requiredFeature <" LV2_OPTIONS__options "> ,\n";
# endif
manifestString += " <" LV2_URID__map "> .\n";
#endif
manifestFile << manifestString << std::endl;
manifestFile.close();
std::cout << " done!" << std::endl;
}
// ---------------------------------------------
{
std::cout << "Writing " << pluginTTL << "..."; std::cout.flush();
std::fstream pluginFile(pluginTTL, std::ios::out);
d_string pluginString;
// header
#if DISTRHO_LV2_USE_EVENTS_IN
pluginString += "@prefix atom: <" LV2_ATOM_PREFIX "> .\n";
#endif
pluginString += "@prefix doap: <http://usefulinc.com/ns/doap#> .\n";
pluginString += "@prefix foaf: <http://xmlns.com/foaf/0.1/> .\n";
pluginString += "@prefix lv2: <" LV2_CORE_PREFIX "> .\n";
pluginString += "@prefix rsz: <" LV2_RESIZE_PORT_PREFIX "> .\n";
#if DISTRHO_PLUGIN_HAS_UI
pluginString += "@prefix ui: <" LV2_UI_PREFIX "> .\n";
#endif
pluginString += "@prefix unit: <" LV2_UNITS_PREFIX "> .\n";
pluginString += "\n";
// plugin
pluginString += "<" DISTRHO_PLUGIN_URI ">\n";
#if DISTRHO_PLUGIN_IS_SYNTH
pluginString += " a lv2:InstrumentPlugin, lv2:Plugin ;\n";
#else
pluginString += " a lv2:Plugin ;\n";
#endif
pluginString += "\n";
// extensionData
pluginString += " lv2:extensionData <" LV2_STATE__interface "> ";
#if DISTRHO_PLUGIN_WANT_STATE
pluginString += ",\n <" LV2_OPTIONS__interface "> ";
pluginString += ",\n <" LV2_WORKER__interface "> ";
#endif
#if DISTRHO_PLUGIN_WANT_PROGRAMS
pluginString += ",\n <" LV2_PROGRAMS__Interface "> ";
#endif
pluginString += ";\n\n";
// optionalFeatures
#if DISTRHO_PLUGIN_IS_RT_SAFE
pluginString += " lv2:optionalFeature <" LV2_CORE__hardRTCapable "> ,\n";
pluginString += " <" LV2_BUF_SIZE__boundedBlockLength "> ;\n";
#else
pluginString += " lv2:optionalFeature <" LV2_BUF_SIZE__boundedBlockLength "> ;\n";
#endif
pluginString += "\n";
// requiredFeatures
pluginString += " lv2:requiredFeature <" LV2_OPTIONS__options "> ";
pluginString += ",\n <" LV2_URID__map "> ";
#if DISTRHO_PLUGIN_WANT_STATE
pluginString += ",\n <" LV2_WORKER__schedule "> ";
#endif
pluginString += ";\n\n";
// UI
#if DISTRHO_PLUGIN_HAS_UI
pluginString += " ui:ui <" DISTRHO_UI_URI "> ;\n";
pluginString += "\n";
#endif
{
uint32_t portIndex = 0;
#if DISTRHO_PLUGIN_NUM_INPUTS > 0
for (uint32_t i=0; i < DISTRHO_PLUGIN_NUM_INPUTS; ++i, ++portIndex)
{
if (i == 0)
pluginString += " lv2:port [\n";
else
pluginString += " [\n";
pluginString += " a lv2:InputPort, lv2:AudioPort ;\n";
pluginString += " lv2:index " + d_string(portIndex) + " ;\n";
pluginString += " lv2:symbol \"lv2_audio_in_" + d_string(i+1) + "\" ;\n";
pluginString += " lv2:name \"Audio Input " + d_string(i+1) + "\" ;\n";
if (i+1 == DISTRHO_PLUGIN_NUM_INPUTS)
pluginString += " ] ;\n\n";
else
pluginString += " ] ,\n";
}
pluginString += "\n";
#endif
#if DISTRHO_PLUGIN_NUM_OUTPUTS > 0
for (uint32_t i=0; i < DISTRHO_PLUGIN_NUM_OUTPUTS; ++i, ++portIndex)
{
if (i == 0)
pluginString += " lv2:port [\n";
else
pluginString += " [\n";
pluginString += " a lv2:OutputPort, lv2:AudioPort ;\n";
pluginString += " lv2:index " + d_string(portIndex) + " ;\n";
pluginString += " lv2:symbol \"lv2_audio_out_" + d_string(i+1) + "\" ;\n";
pluginString += " lv2:name \"Audio Output " + d_string(i+1) + "\" ;\n";
if (i+1 == DISTRHO_PLUGIN_NUM_OUTPUTS)
pluginString += " ] ;\n\n";
else
pluginString += " ] ,\n";
}
pluginString += "\n";
#endif
#if DISTRHO_LV2_USE_EVENTS_IN
pluginString += " lv2:port [\n";
pluginString += " a lv2:InputPort, atom:AtomPort ;\n";
pluginString += " lv2:index " + d_string(portIndex) + " ;\n";
pluginString += " lv2:name \"Events Input\" ;\n";
pluginString += " lv2:symbol \"lv2_events_in\" ;\n";
pluginString += " rsz:minimumSize " + d_string(DISTRHO_PLUGIN_MINIMUM_BUFFER_SIZE) + " ;\n";
pluginString += " atom:bufferType atom:Sequence ;\n";
# if (DISTRHO_PLUGIN_WANT_STATE && DISTRHO_PLUGIN_HAS_UI)
pluginString += " atom:supports <" LV2_ATOM__String "> ;\n";
# endif
# if DISTRHO_PLUGIN_HAS_MIDI_INPUT
pluginString += " atom:supports <" LV2_MIDI__MidiEvent "> ;\n";
# endif
# if DISTRHO_PLUGIN_WANT_TIMEPOS
pluginString += " atom:supports <" LV2_TIME__Position "> ;\n";
# endif
pluginString += " ] ;\n\n";
++portIndex;
#endif
#if DISTRHO_LV2_USE_EVENTS_OUT
pluginString += " lv2:port [\n";
pluginString += " a lv2:OutputPort, atom:AtomPort ;\n";
pluginString += " lv2:index " + d_string(portIndex) + " ;\n";
pluginString += " lv2:name \"Events Output\" ;\n";
pluginString += " lv2:symbol \"lv2_events_out\" ;\n";
pluginString += " rsz:minimumSize " + d_string(DISTRHO_PLUGIN_MINIMUM_BUFFER_SIZE) + " ;\n";
pluginString += " atom:bufferType atom:Sequence ;\n";
# if (DISTRHO_PLUGIN_WANT_STATE && DISTRHO_PLUGIN_HAS_UI)
pluginString += " atom:supports <" LV2_ATOM__String "> ;\n";
# endif
# if DISTRHO_PLUGIN_HAS_MIDI_OUTPUT
pluginString += " atom:supports <" LV2_MIDI__MidiEvent "> ;\n";
# endif
pluginString += " ] ;\n\n";
++portIndex;
#endif
#if DISTRHO_PLUGIN_WANT_LATENCY
pluginString += " lv2:port [\n";
pluginString += " a lv2:OutputPort, lv2:ControlPort ;\n";
pluginString += " lv2:index " + d_string(portIndex) + " ;\n";
pluginString += " lv2:name \"Latency\" ;\n";
pluginString += " lv2:symbol \"lv2_latency\" ;\n";
pluginString += " lv2:designation lv2:latency ;\n";
pluginString += " lv2:portProperty lv2:reportsLatency, lv2:integer ;\n";
pluginString += " ] ;\n\n";
++portIndex;
#endif
for (uint32_t i=0, count=plugin.getParameterCount(); i < count; ++i, ++portIndex)
{
if (i == 0)
pluginString += " lv2:port [\n";
else
pluginString += " [\n";
if (plugin.isParameterOutput(i))
pluginString += " a lv2:OutputPort, lv2:ControlPort ;\n";
else
pluginString += " a lv2:InputPort, lv2:ControlPort ;\n";
pluginString += " lv2:index " + d_string(portIndex) + " ;\n";
pluginString += " lv2:name \"" + plugin.getParameterName(i) + "\" ;\n";
// symbol
{
d_string symbol(plugin.getParameterSymbol(i));
if (symbol.isEmpty())
symbol = "lv2_port_" + d_string(portIndex-1);
pluginString += " lv2:symbol \"" + symbol + "\" ;\n";
}
// ranges
{
const ParameterRanges& ranges(plugin.getParameterRanges(i));
if (plugin.getParameterHints(i) & kParameterIsInteger)
{
pluginString += " lv2:default " + d_string(int(plugin.getParameterValue(i))) + " ;\n";
pluginString += " lv2:minimum " + d_string(int(ranges.min)) + " ;\n";
pluginString += " lv2:maximum " + d_string(int(ranges.max)) + " ;\n";
}
else
{
pluginString += " lv2:default " + d_string(plugin.getParameterValue(i)) + " ;\n";
pluginString += " lv2:minimum " + d_string(ranges.min) + " ;\n";
pluginString += " lv2:maximum " + d_string(ranges.max) + " ;\n";
}
}
// unit
{
const d_string& unit(plugin.getParameterUnit(i));
if (! unit.isEmpty())
{
if (unit == "db" || unit == "dB")
{
pluginString += " unit:unit unit:db ;\n";
}
else if (unit == "hz" || unit == "Hz")
{
pluginString += " unit:unit unit:hz ;\n";
}
else if (unit == "khz" || unit == "kHz")
{
pluginString += " unit:unit unit:khz ;\n";
}
else if (unit == "mhz" || unit == "mHz")
{
pluginString += " unit:unit unit:mhz ;\n";
}
else if (unit == "%")
{
pluginString += " unit:unit unit:pc ;\n";
}
else
{
pluginString += " unit:unit [\n";
pluginString += " a unit:Unit ;\n";
pluginString += " unit:name \"" + unit + "\" ;\n";
pluginString += " unit:symbol \"" + unit + "\" ;\n";
pluginString += " unit:render \"%f " + unit + "\" ;\n";
pluginString += " ] ;\n";
}
}
}
// hints
{
const uint32_t hints(plugin.getParameterHints(i));
if (hints & kParameterIsBoolean)
pluginString += " lv2:portProperty lv2:toggled ;\n";
if (hints & kParameterIsInteger)
pluginString += " lv2:portProperty lv2:integer ;\n";
if (hints & kParameterIsLogarithmic)
pluginString += " lv2:portProperty <" LV2_PORT_PROPS__logarithmic "> ;\n";
if ((hints & kParameterIsAutomable) == 0 && ! plugin.isParameterOutput(i))
{
pluginString += " lv2:portProperty <" LV2_PORT_PROPS__expensive "> ,\n";
pluginString += " <" LV2_KXSTUDIO_PROPERTIES__NonAutomable "> ;\n";
}
}
if (i+1 == count)
pluginString += " ] ;\n\n";
else
pluginString += " ] ,\n";
}
}
pluginString += " doap:name \"" + d_string(plugin.getName()) + "\" ;\n";
pluginString += " doap:maintainer [ foaf:name \"" + d_string(plugin.getMaker()) + "\" ] .\n";
pluginFile << pluginString << std::endl;
pluginFile.close();
std::cout << " done!" << std::endl;
}
}
| isc |
rhaphazard/moebooru | lib/assets/javascripts/moe-legacy/notes.js | 21067 | // The following are instance methods and variables
var Note = Class.create({
initialize: function(id, is_new, raw_body) {
if (Note.debug) {
console.debug("Note#initialize (id=%d)", id)
}
this.id = id
this.is_new = is_new
this.document_observers = [];
// Cache the elements
this.elements = {
box: $('note-box-' + this.id),
corner: $('note-corner-' + this.id),
body: $('note-body-' + this.id),
image: $('image')
}
// Cache the dimensions
this.fullsize = {
left: this.elements.box.offsetLeft,
top: this.elements.box.offsetTop,
width: this.elements.box.clientWidth,
height: this.elements.box.clientHeight
}
// Store the original values (in case the user clicks Cancel)
this.old = {
raw_body: raw_body,
formatted_body: this.elements.body.innerHTML
}
for (p in this.fullsize) {
this.old[p] = this.fullsize[p]
}
// Make the note translucent
if (is_new) {
this.elements.box.setOpacity(0.2)
} else {
this.elements.box.setOpacity(0.5)
}
if (is_new && raw_body == '') {
this.bodyfit = true
this.elements.body.style.height = "100px"
}
// Attach the event listeners
this.elements.box.observe("mousedown", this.dragStart.bindAsEventListener(this))
this.elements.box.observe("mouseout", this.bodyHideTimer.bindAsEventListener(this))
this.elements.box.observe("mouseover", this.bodyShow.bindAsEventListener(this))
this.elements.corner.observe("mousedown", this.resizeStart.bindAsEventListener(this))
this.elements.body.observe("mouseover", this.bodyShow.bindAsEventListener(this))
this.elements.body.observe("mouseout", this.bodyHideTimer.bindAsEventListener(this))
this.elements.body.observe("click", this.showEditBox.bindAsEventListener(this))
this.adjustScale()
},
// Returns the raw text value of this note
textValue: function() {
if (Note.debug) {
console.debug("Note#textValue (id=%d)", this.id)
}
return this.old.raw_body.strip()
},
// Removes the edit box
hideEditBox: function(e) {
if (Note.debug) {
console.debug("Note#hideEditBox (id=%d)", this.id)
}
var editBox = $('edit-box')
if (editBox != null) {
var boxid = editBox.noteid
$("edit-box").stopObserving()
$("note-save-" + boxid).stopObserving()
$("note-cancel-" + boxid).stopObserving()
$("note-remove-" + boxid).stopObserving()
$("note-history-" + boxid).stopObserving()
$("edit-box").remove()
}
},
// Shows the edit box
showEditBox: function(e) {
if (Note.debug) {
console.debug("Note#showEditBox (id=%d)", this.id)
}
this.hideEditBox(e)
var insertionPosition = Note.getInsertionPosition()
var top = insertionPosition[0]
var left = insertionPosition[1]
var html = ""
html += '<div id="edit-box" style="top: '+top+'px; left: '+left+'px; position: absolute; visibility: visible; z-index: 100; background: white; border: 1px solid black; padding: 12px;">'
html += '<form onsubmit="return false;" style="padding: 0; margin: 0;">'
html += '<textarea rows="7" id="edit-box-text" style="width: 350px; margin: 2px 2px 12px 2px;">' + this.textValue() + '</textarea>'
html += '<input type="submit" value="Save" name="save" id="note-save-' + this.id + '">'
html += '<input type="submit" value="Cancel" name="cancel" id="note-cancel-' + this.id + '">'
html += '<input type="submit" value="Remove" name="remove" id="note-remove-' + this.id + '">'
html += '<input type="submit" value="History" name="history" id="note-history-' + this.id + '">'
html += '</form>'
html += '</div>'
$("note-container").insert({bottom: html})
$('edit-box').noteid = this.id
$("edit-box").observe("mousedown", this.editDragStart.bindAsEventListener(this))
$("note-save-" + this.id).observe("click", this.save.bindAsEventListener(this))
$("note-cancel-" + this.id).observe("click", this.cancel.bindAsEventListener(this))
$("note-remove-" + this.id).observe("click", this.remove.bindAsEventListener(this))
$("note-history-" + this.id).observe("click", this.history.bindAsEventListener(this))
$("edit-box-text").focus()
},
// Shows the body text for the note
bodyShow: function(e) {
if (Note.debug) {
console.debug("Note#bodyShow (id=%d)", this.id)
}
if (this.dragging) {
return
}
if (this.hideTimer) {
clearTimeout(this.hideTimer)
this.hideTimer = null
}
if (Note.noteShowingBody == this) {
return
}
if (Note.noteShowingBody) {
Note.noteShowingBody.bodyHide()
}
Note.noteShowingBody = this
if (Note.zindex >= 9) {
/* don't use more than 10 layers (+1 for the body, which will always be above all notes) */
Note.zindex = 0
for (var i=0; i< Note.all.length; ++i) {
Note.all[i].elements.box.style.zIndex = 0
}
}
this.elements.box.style.zIndex = ++Note.zindex
this.elements.body.style.zIndex = 10
this.elements.body.style.top = 0 + "px"
this.elements.body.style.left = 0 + "px"
var dw = document.documentElement.scrollWidth
this.elements.body.style.visibility = "hidden"
this.elements.body.style.display = "block"
if (!this.bodyfit) {
this.elements.body.style.height = "auto"
this.elements.body.style.minWidth = "140px"
var w = null, h = null, lo = null, hi = null, x = null, last = null
w = this.elements.body.offsetWidth
h = this.elements.body.offsetHeight
if (w/h < 1.6180339887) {
/* for tall notes (lots of text), find more pleasant proportions */
lo = 140, hi = 400
do {
last = w
x = (lo+hi)/2
this.elements.body.style.minWidth = x + "px"
w = this.elements.body.offsetWidth
h = this.elements.body.offsetHeight
if (w/h < 1.6180339887) lo = x
else hi = x
} while ((lo < hi) && (w > last))
} else if (this.elements.body.scrollWidth <= this.elements.body.clientWidth) {
/* for short notes (often a single line), make the box no wider than necessary */
// scroll test necessary for Firefox
lo = 20, hi = w
do {
x = (lo+hi)/2
this.elements.body.style.minWidth = x + "px"
if (this.elements.body.offsetHeight > h) lo = x
else hi = x
} while ((hi - lo) > 4)
if (this.elements.body.offsetHeight > h)
this.elements.body.style.minWidth = hi + "px"
}
if (Prototype.Browser.IE) {
// IE7 adds scrollbars if the box is too small, obscuring the text
if (this.elements.body.offsetHeight < 35) {
this.elements.body.style.minHeight = "35px"
}
if (this.elements.body.offsetWidth < 47) {
this.elements.body.style.minWidth = "47px"
}
}
this.bodyfit = true
}
this.elements.body.style.top = (this.elements.box.offsetTop + this.elements.box.clientHeight + 5) + "px"
// keep the box within the document's width
var l = 0, e = this.elements.box
do { l += e.offsetLeft } while (e = e.offsetParent)
l += this.elements.body.offsetWidth + 10 - dw
if (l > 0)
this.elements.body.style.left = this.elements.box.offsetLeft - l + "px"
else
this.elements.body.style.left = this.elements.box.offsetLeft + "px"
this.elements.body.style.visibility = "visible"
},
// Creates a timer that will hide the body text for the note
bodyHideTimer: function(e) {
if (Note.debug) {
console.debug("Note#bodyHideTimer (id=%d)", this.id)
}
this.hideTimer = setTimeout(this.bodyHide.bindAsEventListener(this), 250)
},
// Hides the body text for the note
bodyHide: function(e) {
if (Note.debug) {
console.debug("Note#bodyHide (id=%d)", this.id)
}
this.elements.body.hide()
if (Note.noteShowingBody == this) {
Note.noteShowingBody = null
}
},
addDocumentObserver: function(name, func)
{
document.observe(name, func);
this.document_observers.push([name, func]);
},
clearDocumentObservers: function(name, handler)
{
for(var i = 0; i < this.document_observers.length; ++i)
{
var observer = this.document_observers[i];
document.stopObserving(observer[0], observer[1]);
}
this.document_observers = [];
},
// Start dragging the note
dragStart: function(e) {
if (Note.debug) {
console.debug("Note#dragStart (id=%d)", this.id)
}
this.addDocumentObserver("mousemove", this.drag.bindAsEventListener(this))
this.addDocumentObserver("mouseup", this.dragStop.bindAsEventListener(this))
this.addDocumentObserver("selectstart", function() {return false})
this.cursorStartX = e.pointerX()
this.cursorStartY = e.pointerY()
this.boxStartX = this.elements.box.offsetLeft
this.boxStartY = this.elements.box.offsetTop
this.boundsX = new ClipRange(5, this.elements.image.clientWidth - this.elements.box.clientWidth - 5)
this.boundsY = new ClipRange(5, this.elements.image.clientHeight - this.elements.box.clientHeight - 5)
this.dragging = true
this.bodyHide()
},
// Stop dragging the note
dragStop: function(e) {
if (Note.debug) {
console.debug("Note#dragStop (id=%d)", this.id)
}
this.clearDocumentObservers()
this.cursorStartX = null
this.cursorStartY = null
this.boxStartX = null
this.boxStartY = null
this.boundsX = null
this.boundsY = null
this.dragging = false
this.bodyShow()
},
ratio: function() {
return this.elements.image.width / this.elements.image.getAttribute("large_width")
// var ratio = this.elements.image.width / this.elements.image.getAttribute("large_width")
// if (this.elements.image.scale_factor != null)
// ratio *= this.elements.image.scale_factor;
// return ratio
},
// Scale the notes for when the image gets resized
adjustScale: function() {
if (Note.debug) {
console.debug("Note#adjustScale (id=%d)", this.id)
}
var ratio = this.ratio()
for (p in this.fullsize) {
this.elements.box.style[p] = this.fullsize[p] * ratio + 'px'
}
},
// Update the note's position as it gets dragged
drag: function(e) {
var left = this.boxStartX + e.pointerX() - this.cursorStartX
var top = this.boxStartY + e.pointerY() - this.cursorStartY
left = this.boundsX.clip(left)
top = this.boundsY.clip(top)
this.elements.box.style.left = left + 'px'
this.elements.box.style.top = top + 'px'
var ratio = this.ratio()
this.fullsize.left = left / ratio
this.fullsize.top = top / ratio
e.stop()
},
// Start dragging the edit box
editDragStart: function(e) {
if (Note.debug) {
console.debug("Note#editDragStart (id=%d)", this.id)
}
var node = e.element().nodeName
if (node != 'FORM' && node != 'DIV') {
return
}
this.addDocumentObserver("mousemove", this.editDrag.bindAsEventListener(this))
this.addDocumentObserver("mouseup", this.editDragStop.bindAsEventListener(this))
this.addDocumentObserver("selectstart", function() {return false})
this.elements.editBox = $('edit-box');
this.cursorStartX = e.pointerX()
this.cursorStartY = e.pointerY()
this.editStartX = this.elements.editBox.offsetLeft
this.editStartY = this.elements.editBox.offsetTop
this.dragging = true
},
// Stop dragging the edit box
editDragStop: function(e) {
if (Note.debug) {
console.debug("Note#editDragStop (id=%d)", this.id)
}
this.clearDocumentObservers()
this.cursorStartX = null
this.cursorStartY = null
this.editStartX = null
this.editStartY = null
this.dragging = false
},
// Update the edit box's position as it gets dragged
editDrag: function(e) {
var left = this.editStartX + e.pointerX() - this.cursorStartX
var top = this.editStartY + e.pointerY() - this.cursorStartY
this.elements.editBox.style.left = left + 'px'
this.elements.editBox.style.top = top + 'px'
e.stop()
},
// Start resizing the note
resizeStart: function(e) {
if (Note.debug) {
console.debug("Note#resizeStart (id=%d)", this.id)
}
this.cursorStartX = e.pointerX()
this.cursorStartY = e.pointerY()
this.boxStartWidth = this.elements.box.clientWidth
this.boxStartHeight = this.elements.box.clientHeight
this.boxStartX = this.elements.box.offsetLeft
this.boxStartY = this.elements.box.offsetTop
this.boundsX = new ClipRange(10, this.elements.image.clientWidth - this.boxStartX - 5)
this.boundsY = new ClipRange(10, this.elements.image.clientHeight - this.boxStartY - 5)
this.dragging = true
this.clearDocumentObservers()
this.addDocumentObserver("mousemove", this.resize.bindAsEventListener(this))
this.addDocumentObserver("mouseup", this.resizeStop.bindAsEventListener(this))
e.stop()
this.bodyHide()
},
// Stop resizing teh note
resizeStop: function(e) {
if (Note.debug) {
console.debug("Note#resizeStop (id=%d)", this.id)
}
this.clearDocumentObservers()
this.boxCursorStartX = null
this.boxCursorStartY = null
this.boxStartWidth = null
this.boxStartHeight = null
this.boxStartX = null
this.boxStartY = null
this.boundsX = null
this.boundsY = null
this.dragging = false
e.stop()
},
// Update the note's dimensions as it gets resized
resize: function(e) {
var width = this.boxStartWidth + e.pointerX() - this.cursorStartX
var height = this.boxStartHeight + e.pointerY() - this.cursorStartY
width = this.boundsX.clip(width)
height = this.boundsY.clip(height)
this.elements.box.style.width = width + "px"
this.elements.box.style.height = height + "px"
var ratio = this.ratio()
this.fullsize.width = width / ratio
this.fullsize.height = height / ratio
e.stop()
},
// Save the note to the database
save: function(e) {
if (Note.debug) {
console.debug("Note#save (id=%d)", this.id)
}
var note = this
for (p in this.fullsize) {
this.old[p] = this.fullsize[p]
}
this.old.raw_body = $('edit-box-text').value
this.old.formatted_body = this.textValue()
// FIXME: this is not quite how the note will look (filtered elems, <tn>...). the user won't input a <script> that only damages him, but it might be nice to "preview" the <tn> here
this.elements.body.update(this.textValue())
this.hideEditBox(e)
this.bodyHide()
this.bodyfit = false
var params = {
"id": this.id,
"note[x]": this.old.left,
"note[y]": this.old.top,
"note[width]": this.old.width,
"note[height]": this.old.height,
"note[body]": this.old.raw_body
}
if (this.is_new) {
params["note[post_id]"] = Note.post_id
}
notice("Saving note...")
new Ajax.Request('/note/update.json', {
parameters: params,
onComplete: function(resp) {
var resp = resp.responseJSON
if (resp.success) {
notice("Note saved")
var note = Note.find(resp.old_id)
if (resp.old_id < 0) {
note.is_new = false
note.id = resp.new_id
note.elements.box.id = 'note-box-' + note.id
note.elements.body.id = 'note-body-' + note.id
note.elements.corner.id = 'note-corner-' + note.id
}
note.elements.body.innerHTML = resp.formatted_body
note.elements.box.setOpacity(0.5)
note.elements.box.removeClassName('unsaved')
} else {
notice("Error: " + resp.reason)
note.elements.box.addClassName('unsaved')
}
}
})
e.stop()
},
// Revert the note to the last saved state
cancel: function(e) {
if (Note.debug) {
console.debug("Note#cancel (id=%d)", this.id)
}
this.hideEditBox(e)
this.bodyHide()
var ratio = this.ratio()
for (p in this.fullsize) {
this.fullsize[p] = this.old[p]
this.elements.box.style[p] = this.fullsize[p] * ratio + 'px'
}
this.elements.body.innerHTML = this.old.formatted_body
e.stop()
},
// Remove all references to the note from the page
removeCleanup: function() {
if (Note.debug) {
console.debug("Note#removeCleanup (id=%d)", this.id)
}
this.elements.box.remove()
this.elements.body.remove()
var allTemp = []
for (i=0; i<Note.all.length; ++i) {
if (Note.all[i].id != this.id) {
allTemp.push(Note.all[i])
}
}
Note.all = allTemp
Note.updateNoteCount()
},
// Removes a note from the database
remove: function(e) {
if (Note.debug) {
console.debug("Note#remove (id=%d)", this.id)
}
this.hideEditBox(e)
this.bodyHide()
this_note = this
if (this.is_new) {
this.removeCleanup()
notice("Note removed")
} else {
notice("Removing note...")
new Ajax.Request('/note/update.json', {
parameters: {
"id": this.id,
"note[is_active]": "0"
},
onComplete: function(resp) {
var resp = resp.responseJSON
if (resp.success) {
notice("Note removed")
this_note.removeCleanup()
} else {
notice("Error: " + resp.reason)
}
}
})
}
e.stop()
},
// Redirect to the note's history
history: function(e) {
if (Note.debug) {
console.debug("Note#history (id=%d)", this.id)
}
this.hideEditBox(e)
if (this.is_new) {
notice("This note has no history")
} else {
location.href = '/history?search=notes:' + this.id
}
e.stop()
}
})
// The following are class methods and variables
Object.extend(Note, {
zindex: 0,
counter: -1,
all: [],
display: true,
debug: false,
// Show all notes
show: function() {
if (Note.debug) {
console.debug("Note.show")
}
$("note-container").show()
},
// Hide all notes
hide: function() {
if (Note.debug) {
console.debug("Note.hide")
}
$("note-container").hide()
},
// Find a note instance based on the id number
find: function(id) {
if (Note.debug) {
console.debug("Note.find")
}
for (var i=0; i<Note.all.size(); ++i) {
if (Note.all[i].id == id) {
return Note.all[i]
}
}
return null
},
// Toggle the display of all notes
toggle: function() {
if (Note.debug) {
console.debug("Note.toggle")
}
if (Note.display) {
Note.hide()
Note.display = false
} else {
Note.show()
Note.display = true
}
},
// Update the text displaying the number of notes a post has
updateNoteCount: function() {
if (Note.debug) {
console.debug("Note.updateNoteCount")
}
if (Note.all.length > 0) {
var label = ""
if (Note.all.length == 1)
label = "note"
else
label = "notes"
$('note-count').innerHTML = "This post has <a href=\"/note/history?post_id=" + Note.post_id + "\">" + Note.all.length + " " + label + "</a>"
} else {
$('note-count').innerHTML = ""
}
},
// Create a new note
create: function() {
if (Note.debug) {
console.debug("Note.create")
}
Note.show()
var insertion_position = Note.getInsertionPosition()
var top = insertion_position[0]
var left = insertion_position[1]
var html = ''
html += '<div class="note-box unsaved" style="width: 150px; height: 150px; '
html += 'top: ' + top + 'px; '
html += 'left: ' + left + 'px;" '
html += 'id="note-box-' + Note.counter + '">'
html += '<div class="note-corner" id="note-corner-' + Note.counter + '"></div>'
html += '</div>'
html += '<div class="note-body" title="Click to edit" id="note-body-' + Note.counter + '"></div>'
$("note-container").insert({bottom: html})
var note = new Note(Note.counter, true, "")
Note.all.push(note)
Note.counter -= 1
},
// Find a suitable position to insert new notes
getInsertionPosition: function() {
if (Note.debug) {
console.debug("Note.getInsertionPosition")
}
// We want to show the edit box somewhere on the screen, but not outside the image.
var scroll_x = $("image").cumulativeScrollOffset()[0]
var scroll_y = $("image").cumulativeScrollOffset()[1]
var image_left = $("image").positionedOffset()[0]
var image_top = $("image").positionedOffset()[1]
var image_right = image_left + $("image").width
var image_bottom = image_top + $("image").height
var left = 0
var top = 0
if (scroll_x > image_left) {
left = scroll_x
} else {
left = image_left
}
if (scroll_y > image_top) {
top = scroll_y
} else {
top = image_top + 20
}
if (top > image_bottom) {
top = image_top + 20
}
return [top, left]
}
})
| isc |
decred/dcrd | blockchain/indexers/indexsubscriber.go | 10267 | // Copyright (c) 2021 The Decred developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package indexers
import (
"context"
"fmt"
"sync"
"sync/atomic"
"github.com/decred/dcrd/blockchain/v4/internal/progresslog"
"github.com/decred/dcrd/database/v3"
"github.com/decred/dcrd/dcrutil/v4"
)
// IndexNtfnType represents an index notification type.
type IndexNtfnType int
const (
// ConnectNtfn indicates the index notification signals a block
// connected to the main chain.
ConnectNtfn IndexNtfnType = iota
// DisconnectNtfn indicates the index notification signals a block
// disconnected from the main chain.
DisconnectNtfn
)
var (
// bufferSize represents the index notification buffer size.
bufferSize = 128
// noPrereqs indicates no index prerequisites.
noPrereqs = "none"
)
// IndexNtfn represents an index notification detailing a block connection
// or disconnection.
type IndexNtfn struct {
NtfnType IndexNtfnType
Block *dcrutil.Block
Parent *dcrutil.Block
PrevScripts PrevScripter
IsTreasuryEnabled bool
Done chan bool
}
// IndexSubscription represents a subscription for index updates.
type IndexSubscription struct {
id string
idx Indexer
subscriber *IndexSubscriber
mtx sync.Mutex
// prerequisite defines the notification processing hierarchy for this
// subscription. It is expected that the subscriber associated with the
// prerequisite provided processes notifications before they are
// delivered by this subscription to its subscriber. An empty string
// indicates the subscription has no prerequisite.
prerequisite string
// dependent defines the index subscription that requires the subscriber
// associated with this subscription to have processed incoming
// notifications before it does. A nil dependency indicates the subscription
// has no dependencies.
dependent *IndexSubscription
}
// newIndexSubscription initializes a new index subscription.
func newIndexSubscription(subber *IndexSubscriber, indexer Indexer, prereq string) *IndexSubscription {
return &IndexSubscription{
id: indexer.Name(),
idx: indexer,
prerequisite: prereq,
subscriber: subber,
}
}
// stop prevents any future index updates from being delivered and
// unsubscribes the associated subscription.
func (s *IndexSubscription) stop() error {
// If the subscription has a prerequisite, find it and remove the
// subscription as a dependency.
if s.prerequisite != noPrereqs {
s.mtx.Lock()
prereq, ok := s.subscriber.subscriptions[s.prerequisite]
s.mtx.Unlock()
if !ok {
return fmt.Errorf("no subscription found with id %s", s.prerequisite)
}
prereq.mtx.Lock()
prereq.dependent = nil
prereq.mtx.Unlock()
return nil
}
// If the subscription has a dependent, stop it as well.
if s.dependent != nil {
err := s.dependent.stop()
if err != nil {
return err
}
}
// If the subscription is independent, remove it from the
// index subscriber's subscriptions.
s.mtx.Lock()
delete(s.subscriber.subscriptions, s.id)
s.mtx.Unlock()
return nil
}
// IndexSubscriber subscribes clients for index updates.
type IndexSubscriber struct {
subscribers uint32 // update atomically.
c chan IndexNtfn
subscriptions map[string]*IndexSubscription
mtx sync.Mutex
ctx context.Context
cancel context.CancelFunc
quit chan struct{}
}
// NewIndexSubscriber creates a new index subscriber. It also starts the
// handler for incoming index update subscriptions.
func NewIndexSubscriber(sCtx context.Context) *IndexSubscriber {
ctx, cancel := context.WithCancel(sCtx)
s := &IndexSubscriber{
c: make(chan IndexNtfn, bufferSize),
subscriptions: make(map[string]*IndexSubscription),
ctx: ctx,
cancel: cancel,
quit: make(chan struct{}),
}
return s
}
// Subscribe subscribes an index for updates. The returned index subscription
// has functions to retrieve a channel that produces a stream of index updates
// and to stop the stream when the caller no longer wishes to receive updates.
func (s *IndexSubscriber) Subscribe(index Indexer, prerequisite string) (*IndexSubscription, error) {
sub := newIndexSubscription(s, index, prerequisite)
// If the subscription has a prequisite, find it and set the subscription
// as a dependency.
if prerequisite != noPrereqs {
s.mtx.Lock()
prereq, ok := s.subscriptions[prerequisite]
s.mtx.Unlock()
if !ok {
return nil, fmt.Errorf("no subscription found with id %s", prerequisite)
}
prereq.mtx.Lock()
defer prereq.mtx.Unlock()
if prereq.dependent != nil {
return nil, fmt.Errorf("%s already has a dependent set: %s",
prereq.id, prereq.dependent.id)
}
prereq.dependent = sub
atomic.AddUint32(&s.subscribers, 1)
return sub, nil
}
// If the subscription does not have a prerequisite, add it to the index
// subscriber's subscriptions.
s.mtx.Lock()
s.subscriptions[sub.id] = sub
s.mtx.Unlock()
atomic.AddUint32(&s.subscribers, 1)
return sub, nil
}
// Notify relays an index notification to subscribed indexes for processing.
func (s *IndexSubscriber) Notify(ntfn *IndexNtfn) {
subscribers := atomic.LoadUint32(&s.subscribers)
// Only relay notifications when there are subscribed indexes
// to be notified.
if subscribers > 0 {
select {
case <-s.quit:
case s.c <- *ntfn:
}
}
}
// findLowestIndexTipHeight determines the lowest index tip height among
// subscribed indexes and their dependencies.
func (s *IndexSubscriber) findLowestIndexTipHeight(queryer ChainQueryer) (int64, int64, error) {
// Find the lowest tip height to catch up among subscribed indexes.
bestHeight, _ := queryer.Best()
lowestHeight := bestHeight
for _, sub := range s.subscriptions {
tipHeight, tipHash, err := sub.idx.Tip()
if err != nil {
return 0, bestHeight, err
}
// Ensure the index tip is on the main chain.
if !queryer.MainChainHasBlock(tipHash) {
return 0, bestHeight, fmt.Errorf("%s: index tip (%s) is not on the "+
"main chain", sub.idx.Name(), tipHash)
}
if tipHeight < lowestHeight {
lowestHeight = tipHeight
}
// Update the lowest tip height if a dependent has a lower tip height.
dependent := sub.dependent
for dependent != nil {
tipHeight, _, err := sub.dependent.idx.Tip()
if err != nil {
return 0, bestHeight, err
}
if tipHeight < lowestHeight {
lowestHeight = tipHeight
}
dependent = dependent.dependent
}
}
return lowestHeight, bestHeight, nil
}
// CatchUp syncs all subscribed indexes to the the main chain by connecting
// blocks from after the lowest index tip to the current main chain tip.
//
// This should be called after all indexes have subscribed for updates.
func (s *IndexSubscriber) CatchUp(ctx context.Context, db database.DB, queryer ChainQueryer) error {
lowestHeight, bestHeight, err := s.findLowestIndexTipHeight(queryer)
if err != nil {
return err
}
// Nothing to do if all indexes are synced.
if bestHeight == lowestHeight {
return nil
}
// Create a progress logger for the indexing process below.
progressLogger := progresslog.NewBlockProgressLogger("Indexed", log)
// tip and need to be caught up, so log the details and loop through
// each block that needs to be indexed.
log.Infof("Catching up from height %d to %d", lowestHeight,
bestHeight)
var cachedParent *dcrutil.Block
for height := lowestHeight + 1; height <= bestHeight; height++ {
if interruptRequested(ctx) {
return indexerError(ErrInterruptRequested, interruptMsg)
}
hash, err := queryer.BlockHashByHeight(height)
if err != nil {
return err
}
// Ensure the next tip hash is on the main chain.
if !queryer.MainChainHasBlock(hash) {
msg := fmt.Sprintf("the next block being synced to (%s) "+
"at height %d is not on the main chain", hash, height)
return indexerError(ErrBlockNotOnMainChain, msg)
}
var parent *dcrutil.Block
if cachedParent == nil && height > 0 {
parentHash, err := queryer.BlockHashByHeight(height - 1)
if err != nil {
return err
}
parent, err = queryer.BlockByHash(parentHash)
if err != nil {
return err
}
} else {
parent = cachedParent
}
child, err := queryer.BlockByHash(hash)
if err != nil {
return err
}
// Construct and send the index notification.
var prevScripts PrevScripter
err = db.View(func(dbTx database.Tx) error {
if interruptRequested(ctx) {
return indexerError(ErrInterruptRequested, interruptMsg)
}
prevScripts, err = queryer.PrevScripts(dbTx, child)
if err != nil {
return err
}
return nil
})
if err != nil {
return err
}
isTreasuryEnabled, err := queryer.IsTreasuryAgendaActive(parent.Hash())
if err != nil {
return err
}
ntfn := &IndexNtfn{
NtfnType: ConnectNtfn,
Block: child,
Parent: parent,
PrevScripts: prevScripts,
IsTreasuryEnabled: isTreasuryEnabled,
}
// Relay the index update to subscribed indexes.
for _, sub := range s.subscriptions {
err := updateIndex(ctx, sub.idx, ntfn)
if err != nil {
s.cancel()
return err
}
}
cachedParent = child
progressLogger.LogBlockHeight(child.MsgBlock(), parent.MsgBlock())
}
log.Infof("Caught up to height %d", bestHeight)
return nil
}
// Run relays index notifications to subscribed indexes.
//
// This should be run as a goroutine.
func (s *IndexSubscriber) Run(ctx context.Context) {
for {
select {
case ntfn := <-s.c:
// Relay the index update to subscribed indexes.
for _, sub := range s.subscriptions {
err := updateIndex(ctx, sub.idx, &ntfn)
if err != nil {
log.Error(err)
s.cancel()
break
}
}
if ntfn.Done != nil {
close(ntfn.Done)
}
case <-ctx.Done():
log.Infof("Index subscriber shutting down")
close(s.quit)
// Stop all updates to subscribed indexes and terminate their
// processes.
for _, sub := range s.subscriptions {
err := sub.stop()
if err != nil {
log.Error("unable to stop index subscription: %v", err)
}
}
s.cancel()
return
}
}
}
| isc |
Brawl345/WiiDataDownloader-Module | HackMii Installer herunterladen.bat | 521 | @echo off
CLS
%header%
echo.
if not exist "Programme\HackMii Installer" mkdir "Programme\HackMii Installer"
echo Downloade den HackMii Installer...
start /min/wait Support\wget -c -l1 -r -nd --retr-symlinks -t10 -T30 --random-wait --reject "*.html" --reject "%2A" --reject "get.php@file=hackmii_installer_v1.0*" "http://bootmii.org/download/"
move get.php* hackmii-installer_v1.2.zip >NUL
start /min/wait Support\7za e -aoa hackmii-installer_v1.2.zip -o"Programme\HackMii Installer" *.elf -r
del hackmii*
:endedesmoduls | isc |
decred/decrediton | app/i18n/docs/pl/InfoModals/GapLimit.md | 1305 | # Limit rozbieżności
**Ostrzeżenie! Ustawienie granicy rozbieżności nie powinno być zmieniane.** Zwiększenie granicy rozbieżności może spowodować znaczny spadek wydajności.
Limit rozbieżności określa ilość adresów, które portfel wygeneruje i przeprowadzi prognozy, aby określić wykorzystanie. Domyślnie, limit rozbieżności jest ustawiony na 20. Oznacza to 2 rzeczy.
1. Kiedy portfel ładuje się po raz pierwszy, skanuje w poszukiwaniu adresów w użycia i oczekuje, że największa przerwa między adresami będzie wynosić 20;
2. Kiedy użytkownik otrzymuje nowo wygenerowane adresy, ich liczba wynosi tylko 20, następnie portfel wykonuje tą operację ponownie co powoduje, że luki pomiędzy adresami nie są większe niż 20.
Tak naprawdę są tylko dwa przypadki w których należy zmienić tę wartość:
1. Jeśli twój portfel został stworzony i używany intensywnie przed v1.0, może mieć duże luki adresowe. Jeśli przywracasz portfel z seeda i zauważysz, że brakuje funduszy, możesz zwiększyć ustawienie do 100 (następnie 1000 jeśli problem wciąż występuje), a następnie ponownie uruchom Decrediton. Po przywróceniu funduszy możesz powrócić do 20.
2. Jeśli chcesz być w stanie wygenerować więcej niż 20 adresów na raz, bez kolejkowania.
| isc |
k0gaMSX/scc | cc2/target/qbe/arch.h | 1127 | enum asmop {
ASNOP = 0,
ASSTB,
ASSTH,
ASSTW,
ASSTL,
ASSTM,
ASSTS,
ASSTD,
ASLDSB,
ASLDUB,
ASLDSH,
ASLDUH,
ASLDSW,
ASLDUW,
ASLDL,
ASLDS,
ASLDD,
ASADDW,
ASSUBW,
ASMULW,
ASMODW,
ASUMODW,
ASDIVW,
ASUDIVW,
ASSHLW,
ASSHRW,
ASUSHRW,
ASLTW,
ASULTW,
ASGTW,
ASUGTW,
ASLEW,
ASULEW,
ASGEW,
ASUGEW,
ASEQW,
ASNEW,
ASBANDW,
ASBORW,
ASBXORW,
ASADDL,
ASSUBL,
ASMULL,
ASMODL,
ASUMODL,
ASDIVL,
ASUDIVL,
ASSHLL,
ASSHRL,
ASUSHRL,
ASLTL,
ASULTL,
ASGTL,
ASUGTL,
ASLEL,
ASULEL,
ASGEL,
ASUGEL,
ASEQL,
ASNEL,
ASBANDL,
ASBORL,
ASBXORL,
ASADDS,
ASSUBS,
ASMULS,
ASDIVS,
ASLTS,
ASGTS,
ASLES,
ASGES,
ASEQS,
ASNES,
ASADDD,
ASSUBD,
ASMULD,
ASDIVD,
ASLTD,
ASGTD,
ASLED,
ASGED,
ASEQD,
ASNED,
ASEXTBW,
ASUEXTBW,
ASEXTBL,
ASUEXTBL,
ASEXTHW,
ASUEXTHW,
ASEXTHL,
ASUEXTHL,
ASEXTWL,
ASUEXTWL,
ASSTOL,
ASSTOW,
ASDTOL,
ASDTOW,
ASSWTOD,
ASSWTOS,
ASSLTOD,
ASSLTOS,
ASEXTS,
ASTRUNCD,
ASJMP,
ASBRANCH,
ASRET,
ASCALL,
ASCALLE,
ASCALLEX,
ASPAR,
ASPARE,
ASALLOC,
ASFORM,
ASCOPYB,
ASCOPYH,
ASCOPYW,
ASCOPYL,
ASCOPYS,
ASCOPYD,
ASVSTAR,
ASVARG,
};
| isc |
Dirbaio/btcd | wire/msgblock_test.go | 28011 | // Copyright (c) 2013-2015 The btcsuite developers
// Copyright (c) 2015 The Decred developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package wire_test
import (
"bytes"
"io"
"reflect"
"testing"
"time"
"github.com/davecgh/go-spew/spew"
"github.com/decred/dcrd/chaincfg/chainhash"
"github.com/decred/dcrd/wire"
"github.com/decred/dcrutil"
)
// TestBlock tests the MsgBlock API.
func TestBlock(t *testing.T) {
pver := wire.ProtocolVersion
// Test block header.
bh := wire.NewBlockHeader(
int32(pver), // Version
&testBlock.Header.PrevBlock, // PrevHash
&testBlock.Header.MerkleRoot, // MerkleRoot
&testBlock.Header.StakeRoot, // StakeRoot
uint16(0x0000), // VoteBits
[6]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, // FinalState
uint16(0x0000), // Voters
uint8(0x00), // FreshStake
uint8(0x00), // Revocations
uint32(0), // Poolsize
testBlock.Header.Bits, // Bits
int64(0x0000000000000000), // Sbits
uint32(1), // Height
uint32(1), // Size
testBlock.Header.Nonce, // Nonce
[36]byte{}, // ExtraData
)
// Ensure the command is expected value.
wantCmd := "block"
msg := wire.NewMsgBlock(bh)
if cmd := msg.Command(); cmd != wantCmd {
t.Errorf("NewMsgBlock: wrong command - got %v want %v",
cmd, wantCmd)
}
// Ensure max payload is expected value for latest protocol version.
// Num addresses (varInt) + max allowed addresses.
wantPayload := uint32(1000000)
maxPayload := msg.MaxPayloadLength(pver)
if maxPayload != wantPayload {
t.Errorf("MaxPayloadLength: wrong max payload length for "+
"protocol version %d - got %v, want %v", pver,
maxPayload, wantPayload)
}
// Ensure we get the same block header data back out.
if !reflect.DeepEqual(&msg.Header, bh) {
t.Errorf("NewMsgBlock: wrong block header - got %v, want %v",
spew.Sdump(&msg.Header), spew.Sdump(bh))
}
// Ensure transactions are added properly.
tx := testBlock.Transactions[0].Copy()
msg.AddTransaction(tx)
if !reflect.DeepEqual(msg.Transactions, testBlock.Transactions) {
t.Errorf("AddTransaction: wrong transactions - got %v, want %v",
spew.Sdump(msg.Transactions),
spew.Sdump(testBlock.Transactions))
}
// Ensure transactions are properly cleared.
msg.ClearTransactions()
if len(msg.Transactions) != 0 {
t.Errorf("ClearTransactions: wrong transactions - got %v, want %v",
len(msg.Transactions), 0)
}
// Ensure stake transactions are added properly.
stx := testBlock.STransactions[0].Copy()
msg.AddSTransaction(stx)
if !reflect.DeepEqual(msg.STransactions, testBlock.STransactions) {
t.Errorf("AddSTransaction: wrong transactions - got %v, want %v",
spew.Sdump(msg.STransactions),
spew.Sdump(testBlock.STransactions))
}
// Ensure transactions are properly cleared.
msg.ClearSTransactions()
if len(msg.STransactions) != 0 {
t.Errorf("ClearTransactions: wrong transactions - got %v, want %v",
len(msg.STransactions), 0)
}
return
}
// TestBlockTxShas tests the ability to generate a slice of all transaction
// hashes from a block accurately.
func TestBlockTxShas(t *testing.T) {
// Block 1, transaction 1 hash.
hashStr := "55a25248c04dd8b6599ca2a708413c00d79ae90ce075c54e8a967a647d7e4bea"
wantHash, err := chainhash.NewHashFromStr(hashStr)
if err != nil {
t.Errorf("NewShaHashFromStr: %v", err)
return
}
wantShas := []chainhash.Hash{*wantHash}
shas := testBlock.TxShas()
if !reflect.DeepEqual(shas, wantShas) {
t.Errorf("TxShas: wrong transaction hashes - got %v, want %v",
spew.Sdump(shas), spew.Sdump(wantShas))
}
}
// TestBlockSTxShas tests the ability to generate a slice of all stake transaction
// hashes from a block accurately.
func TestBlockSTxShas(t *testing.T) {
// Block 1, transaction 1 hash.
hashStr := "ae208a69f3ee088d0328126e3d9bef7652b108d1904f27b166c5999233a801d4"
wantHash, err := chainhash.NewHashFromStr(hashStr)
if err != nil {
t.Errorf("NewShaHashFromStr: %v", err)
return
}
wantShas := []chainhash.Hash{*wantHash}
shas := testBlock.STxShas()
if !reflect.DeepEqual(shas, wantShas) {
t.Errorf("STxShas: wrong transaction hashes - got %v, want %v",
spew.Sdump(shas), spew.Sdump(wantShas))
}
}
// TestBlockSha tests the ability to generate the hash of a block accurately.
func TestBlockSha(t *testing.T) {
// Block 1 hash.
hashStr := "152437dada95368c42b19febc1702939fa9c1ccdb6fd7284e5b7a19d8fe6df7a"
wantHash, err := chainhash.NewHashFromStr(hashStr)
if err != nil {
t.Errorf("NewShaHashFromStr: %v", err)
}
// Ensure the hash produced is expected.
blockHash := testBlock.BlockSha()
if !blockHash.IsEqual(wantHash) {
t.Errorf("BlockSha: wrong hash - got %v, want %v",
spew.Sprint(blockHash), spew.Sprint(wantHash))
}
}
// TestBlockWire tests the MsgBlock wire encode and decode for various numbers
// of transaction inputs and outputs and protocol versions.
func TestBlockWire(t *testing.T) {
tests := []struct {
in *wire.MsgBlock // Message to encode
out *wire.MsgBlock // Expected decoded message
buf []byte // Wire encoding
txLocs []wire.TxLoc // Expected transaction locations
sTxLocs []wire.TxLoc // Expected stake transaction locations
pver uint32 // Protocol version for wire encoding
}{
// Latest protocol version.
{
&testBlock,
&testBlock,
testBlockBytes,
testBlockTxLocs,
testBlockSTxLocs,
wire.ProtocolVersion,
},
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
// Encode the message to wire format.
var buf bytes.Buffer
err := test.in.BtcEncode(&buf, test.pver)
if err != nil {
t.Errorf("BtcEncode #%d error %v", i, err)
continue
}
if !bytes.Equal(buf.Bytes(), test.buf) {
t.Errorf("BtcEncode #%d\n got: %s want: %s", i,
spew.Sdump(buf.Bytes()), spew.Sdump(test.buf))
continue
}
// Decode the message from wire format.
var msg wire.MsgBlock
rbuf := bytes.NewReader(test.buf)
err = msg.BtcDecode(rbuf, test.pver)
if err != nil {
t.Errorf("BtcDecode #%d error %v", i, err)
continue
}
if !reflect.DeepEqual(&msg, test.out) {
t.Errorf("BtcDecode #%d\n got: %s want: %s", i,
spew.Sdump(&msg), spew.Sdump(test.out))
continue
}
}
}
// TestBlockWireErrors performs negative tests against wire encode and decode
// of MsgBlock to confirm error paths work correctly.
func TestBlockWireErrors(t *testing.T) {
// Use protocol version 60002 specifically here instead of the latest
// because the test data is using bytes encoded with that protocol
// version.
pver := uint32(60002)
tests := []struct {
in *wire.MsgBlock // Value to encode
buf []byte // Wire encoding
pver uint32 // Protocol version for wire encoding
max int // Max size of fixed buffer to induce errors
writeErr error // Expected write error
readErr error // Expected read error
}{ // Force error in version.
{&testBlock, testBlockBytes, pver, 0, io.ErrShortWrite, io.EOF}, // 0
// Force error in prev block hash.
{&testBlock, testBlockBytes, pver, 4, io.ErrShortWrite, io.EOF}, // 1
// Force error in merkle root.
{&testBlock, testBlockBytes, pver, 36, io.ErrShortWrite, io.EOF}, // 2
// Force error in stake root.
{&testBlock, testBlockBytes, pver, 68, io.ErrShortWrite, io.EOF}, // 3
// Force error in vote bits.
{&testBlock, testBlockBytes, pver, 100, io.ErrShortWrite, io.EOF}, // 4
// Force error in finalState.
{&testBlock, testBlockBytes, pver, 102, io.ErrShortWrite, io.EOF}, // 5
// Force error in voters.
{&testBlock, testBlockBytes, pver, 108, io.ErrShortWrite, io.EOF}, // 6
// Force error in freshstake.
{&testBlock, testBlockBytes, pver, 110, io.ErrShortWrite, io.EOF}, // 7
// Force error in revocations.
{&testBlock, testBlockBytes, pver, 111, io.ErrShortWrite, io.EOF}, // 8
// Force error in poolsize.
{&testBlock, testBlockBytes, pver, 112, io.ErrShortWrite, io.EOF}, // 9
// Force error in difficulty bits.
{&testBlock, testBlockBytes, pver, 116, io.ErrShortWrite, io.EOF}, // 10
// Force error in stake difficulty bits.
{&testBlock, testBlockBytes, pver, 120, io.ErrShortWrite, io.EOF}, // 11
// Force error in height.
{&testBlock, testBlockBytes, pver, 128, io.ErrShortWrite, io.EOF}, // 12
// Force error in size.
{&testBlock, testBlockBytes, pver, 132, io.ErrShortWrite, io.EOF}, // 13
// Force error in timestamp.
{&testBlock, testBlockBytes, pver, 136, io.ErrShortWrite, io.EOF}, // 14
// Force error in nonce.
{&testBlock, testBlockBytes, pver, 140, io.ErrShortWrite, io.EOF}, // 15
// Force error in tx count.
{&testBlock, testBlockBytes, pver, 180, io.ErrShortWrite, io.EOF}, // 16
// Force error in tx.
{&testBlock, testBlockBytes, pver, 181, io.ErrShortWrite, io.EOF}, // 17
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
// Encode to wire format.
w := newFixedWriter(test.max)
err := test.in.BtcEncode(w, test.pver)
if err != test.writeErr {
t.Errorf("BtcEncode #%d wrong error got: %v, want: %v",
i, err, test.writeErr)
continue
}
// Decode from wire format.
var msg wire.MsgBlock
r := newFixedReader(test.max, test.buf)
err = msg.BtcDecode(r, test.pver)
if err != test.readErr {
t.Errorf("BtcDecode #%d wrong error got: %v, want: %v",
i, err, test.readErr)
continue
}
}
}
// TestBlockSerialize tests MsgBlock serialize and deserialize.
func TestBlockSerialize(t *testing.T) {
tests := []struct {
in *wire.MsgBlock // Message to encode
out *wire.MsgBlock // Expected decoded message
buf []byte // Serialized data
txLocs []wire.TxLoc // Expected transaction locations
sTxLocs []wire.TxLoc // Expected stake transaction locations
}{
{
&testBlock,
&testBlock,
testBlockBytes,
testBlockTxLocs,
testBlockSTxLocs,
},
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
// Serialize the block.
var buf bytes.Buffer
err := test.in.Serialize(&buf)
if err != nil {
t.Errorf("Serialize #%d error %v", i, err)
continue
}
if !bytes.Equal(buf.Bytes(), test.buf) {
t.Errorf("Serialize #%d\n got: %s want: %s", i,
spew.Sdump(buf.Bytes()), spew.Sdump(test.buf))
continue
}
// Deserialize the block.
var block wire.MsgBlock
rbuf := bytes.NewReader(test.buf)
err = block.Deserialize(rbuf)
if err != nil {
t.Errorf("Deserialize #%d error %v", i, err)
continue
}
if !reflect.DeepEqual(&block, test.out) {
t.Errorf("Deserialize #%d\n got: %s want: %s", i,
spew.Sdump(&block), spew.Sdump(test.out))
continue
}
// Deserialize the block while gathering transaction location
// information.
var txLocBlock wire.MsgBlock
br := bytes.NewBuffer(test.buf)
txLocs, sTxLocs, err := txLocBlock.DeserializeTxLoc(br)
if err != nil {
t.Errorf("DeserializeTxLoc #%d error %v", i, err)
continue
}
if !reflect.DeepEqual(&txLocBlock, test.out) {
t.Errorf("DeserializeTxLoc #%d\n got: %s want: %s", i,
spew.Sdump(&txLocBlock), spew.Sdump(test.out))
continue
}
if !reflect.DeepEqual(txLocs, test.txLocs) {
t.Errorf("DeserializeTxLoc #%d\n got: %s want: %s", i,
spew.Sdump(txLocs), spew.Sdump(test.txLocs))
continue
}
if !reflect.DeepEqual(sTxLocs, test.sTxLocs) {
t.Errorf("DeserializeTxLoc, sTxLocs #%d\n got: %s want: %s", i,
spew.Sdump(sTxLocs), spew.Sdump(test.sTxLocs))
continue
}
}
}
// TestBlockSerializeErrors performs negative tests against wire encode and
// decode of MsgBlock to confirm error paths work correctly.
func TestBlockSerializeErrors(t *testing.T) {
tests := []struct {
in *wire.MsgBlock // Value to encode
buf []byte // Serialized data
max int // Max size of fixed buffer to induce errors
writeErr error // Expected write error
readErr error // Expected read error
}{
{&testBlock, testBlockBytes, 0, io.ErrShortWrite, io.EOF}, // 0
// Force error in prev block hash.
{&testBlock, testBlockBytes, 4, io.ErrShortWrite, io.EOF}, // 1
// Force error in merkle root.
{&testBlock, testBlockBytes, 36, io.ErrShortWrite, io.EOF}, // 2
// Force error in stake root.
{&testBlock, testBlockBytes, 68, io.ErrShortWrite, io.EOF}, // 3
// Force error in vote bits.
{&testBlock, testBlockBytes, 100, io.ErrShortWrite, io.EOF}, // 4
// Force error in finalState.
{&testBlock, testBlockBytes, 102, io.ErrShortWrite, io.EOF}, // 5
// Force error in voters.
{&testBlock, testBlockBytes, 108, io.ErrShortWrite, io.EOF}, // 8
// Force error in freshstake.
{&testBlock, testBlockBytes, 110, io.ErrShortWrite, io.EOF}, // 9
// Force error in revocations.
{&testBlock, testBlockBytes, 111, io.ErrShortWrite, io.EOF}, // 10
// Force error in poolsize.
{&testBlock, testBlockBytes, 112, io.ErrShortWrite, io.EOF}, // 11
// Force error in difficulty bits.
{&testBlock, testBlockBytes, 116, io.ErrShortWrite, io.EOF}, // 12
// Force error in stake difficulty bits.
{&testBlock, testBlockBytes, 120, io.ErrShortWrite, io.EOF}, // 13
// Force error in height.
{&testBlock, testBlockBytes, 128, io.ErrShortWrite, io.EOF}, // 14
// Force error in size.
{&testBlock, testBlockBytes, 132, io.ErrShortWrite, io.EOF}, // 15
// Force error in timestamp.
{&testBlock, testBlockBytes, 136, io.ErrShortWrite, io.EOF}, // 16
// Force error in nonce.
{&testBlock, testBlockBytes, 140, io.ErrShortWrite, io.EOF}, // 17
// Force error in tx count.
{&testBlock, testBlockBytes, 180, io.ErrShortWrite, io.EOF}, // 18
// Force error in tx.
{&testBlock, testBlockBytes, 181, io.ErrShortWrite, io.EOF}, // 19
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
// Serialize the block.
w := newFixedWriter(test.max)
err := test.in.Serialize(w)
if err != test.writeErr {
t.Errorf("Serialize #%d wrong error got: %v, want: %v",
i, err, test.writeErr)
continue
}
// Deserialize the block.
var block wire.MsgBlock
r := newFixedReader(test.max, test.buf)
err = block.Deserialize(r)
if err != test.readErr {
t.Errorf("Deserialize #%d wrong error got: %v, want: %v",
i, err, test.readErr)
continue
}
var txLocBlock wire.MsgBlock
br := bytes.NewBuffer(test.buf[0:test.max])
_, _, err = txLocBlock.DeserializeTxLoc(br)
if err != test.readErr {
t.Errorf("DeserializeTxLoc #%d wrong error got: %v, want: %v",
i, err, test.readErr)
continue
}
}
}
// TestBlockOverflowErrors performs tests to ensure deserializing blocks which
// are intentionally crafted to use large values for the number of transactions
// are handled properly. This could otherwise potentially be used as an attack
// vector.
func TestBlockOverflowErrors(t *testing.T) {
// Use protocol version 70001 specifically here instead of the latest
// protocol version because the test data is using bytes encoded with
// that version.
pver := uint32(1)
tests := []struct {
buf []byte // Wire encoding
pver uint32 // Protocol version for wire encoding
err error // Expected error
}{
// Block that claims to have ~uint64(0) transactions.
{
[]byte{
0x01, 0x00, 0x00, 0x00, // Version 1
0x6f, 0xe2, 0x8c, 0x0a, 0xb6, 0xf1, 0xb3, 0x72,
0xc1, 0xa6, 0xa2, 0x46, 0xae, 0x63, 0xf7, 0x4f,
0x93, 0x1e, 0x83, 0x65, 0xe1, 0x5a, 0x08, 0x9c,
0x68, 0xd6, 0x19, 0x00, 0x00, 0x00, 0x00, 0x00, // PrevBlock
0x98, 0x20, 0x51, 0xfd, 0x1e, 0x4b, 0xa7, 0x44,
0xbb, 0xbe, 0x68, 0x0e, 0x1f, 0xee, 0x14, 0x67,
0x7b, 0xa1, 0xa3, 0xc3, 0x54, 0x0b, 0xf7, 0xb1,
0xcd, 0xb6, 0x06, 0xe8, 0x57, 0x23, 0x3e, 0x0e, // MerkleRoot
0x98, 0x20, 0x51, 0xfd, 0x1e, 0x4b, 0xa7, 0x44,
0xbb, 0xbe, 0x68, 0x0e, 0x1f, 0xee, 0x14, 0x67,
0x7b, 0xa1, 0xa3, 0xc3, 0x54, 0x0b, 0xf7, 0xb1,
0xcd, 0xb6, 0x06, 0xe8, 0x57, 0x23, 0x3e, 0x0e, // StakeRoot
0x00, 0x00, // VoteBits
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // FinalState
0x00, 0x00, // Voters
0x00, // FreshStake
0x00, // Revocations
0x00, 0x00, 0x00, 0x00, // Poolsize
0xff, 0xff, 0x00, 0x1d, // Bits
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // SBits
0x01, 0x00, 0x00, 0x00, // Height
0x01, 0x00, 0x00, 0x00, // Size
0x61, 0xbc, 0x66, 0x49, // Timestamp
0x01, 0xe3, 0x62, 0x99, // Nonce
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // ExtraData
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, // TxnCount
}, pver, &wire.MessageError{},
},
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
// Decode from wire format.
var msg wire.MsgBlock
r := bytes.NewReader(test.buf)
err := msg.BtcDecode(r, test.pver)
if reflect.TypeOf(err) != reflect.TypeOf(test.err) {
t.Errorf("BtcDecode #%d wrong error got: %v, want: %v",
i, err, reflect.TypeOf(test.err))
continue
}
// Deserialize from wire format.
r = bytes.NewReader(test.buf)
err = msg.Deserialize(r)
if reflect.TypeOf(err) != reflect.TypeOf(test.err) {
t.Errorf("Deserialize #%d wrong error got: %v, want: %v",
i, err, reflect.TypeOf(test.err))
continue
}
// Deserialize with transaction location info from wire format.
br := bytes.NewBuffer(test.buf)
_, _, err = msg.DeserializeTxLoc(br)
if reflect.TypeOf(err) != reflect.TypeOf(test.err) {
t.Errorf("DeserializeTxLoc #%d wrong error got: %v, "+
"want: %v", i, err, reflect.TypeOf(test.err))
continue
}
}
}
// TestBlockSerializeSize performs tests to ensure the serialize size for
// various blocks is accurate.
func TestBlockSerializeSize(t *testing.T) {
// Block with no transactions.
noTxBlock := wire.NewMsgBlock(&testBlock.Header)
tests := []struct {
in *wire.MsgBlock // Block to encode
size int // Expected serialized size
}{
// Block with no transactions (header + 2x numtx)
{noTxBlock, 182},
// First block in the mainnet block chain.
{&testBlock, len(testBlockBytes)},
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
serializedSize := test.in.SerializeSize()
if serializedSize != test.size {
t.Errorf("MsgBlock.SerializeSize: #%d got: %d, want: "+
"%d", i, serializedSize, test.size)
continue
}
}
}
// testBlock is a basic normative block that is used throughout tests.
var testBlock = wire.MsgBlock{
Header: wire.BlockHeader{
Version: 1,
PrevBlock: chainhash.Hash([chainhash.HashSize]byte{ // Make go vet happy.
0x6f, 0xe2, 0x8c, 0x0a, 0xb6, 0xf1, 0xb3, 0x72,
0xc1, 0xa6, 0xa2, 0x46, 0xae, 0x63, 0xf7, 0x4f,
0x93, 0x1e, 0x83, 0x65, 0xe1, 0x5a, 0x08, 0x9c,
0x68, 0xd6, 0x19, 0x00, 0x00, 0x00, 0x00, 0x00,
}),
MerkleRoot: chainhash.Hash([chainhash.HashSize]byte{ // Make go vet happy.
0x98, 0x20, 0x51, 0xfd, 0x1e, 0x4b, 0xa7, 0x44,
0xbb, 0xbe, 0x68, 0x0e, 0x1f, 0xee, 0x14, 0x67,
0x7b, 0xa1, 0xa3, 0xc3, 0x54, 0x0b, 0xf7, 0xb1,
0xcd, 0xb6, 0x06, 0xe8, 0x57, 0x23, 0x3e, 0x0e,
}),
StakeRoot: chainhash.Hash([chainhash.HashSize]byte{ // Make go vet happy.
0x98, 0x20, 0x51, 0xfd, 0x1e, 0x4b, 0xa7, 0x44,
0xbb, 0xbe, 0x68, 0x0e, 0x1f, 0xee, 0x14, 0x67,
0x7b, 0xa1, 0xa3, 0xc3, 0x54, 0x0b, 0xf7, 0xb1,
0xcd, 0xb6, 0x06, 0xe8, 0x57, 0x23, 0x3e, 0x0e,
}),
VoteBits: uint16(0x0000),
FinalState: [6]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
Voters: uint16(0x0000),
FreshStake: uint8(0x00),
Revocations: uint8(0x00),
PoolSize: uint32(0x00000000), // Poolsize
Bits: 0x1d00ffff, // 486604799
SBits: int64(0x0000000000000000),
Height: uint32(1),
Size: uint32(1),
Timestamp: time.Unix(0x4966bc61, 0), // 2009-01-08 20:54:25 -0600 CST
Nonce: 0x9962e301, // 2573394689
ExtraData: [36]byte{},
},
Transactions: []*wire.MsgTx{
{
Version: 1,
TxIn: []*wire.TxIn{
{
PreviousOutPoint: wire.OutPoint{
Hash: chainhash.Hash{},
Index: 0xffffffff,
Tree: dcrutil.TxTreeRegular,
},
Sequence: 0xffffffff,
ValueIn: 0x1616161616161616,
BlockHeight: 0x17171717,
BlockIndex: 0x18181818,
SignatureScript: []byte{
0xff, 0xff, 0xff, 0xff, 0x01, 0x00, 0xf2,
},
},
},
TxOut: []*wire.TxOut{
{
Value: 0x3333333333333333,
Version: 0x9898,
PkScript: []byte{
0x41, // OP_DATA_65
0x04, 0x96, 0xb5, 0x38, 0xe8, 0x53, 0x51, 0x9c,
0x72, 0x6a, 0x2c, 0x91, 0xe6, 0x1e, 0xc1, 0x16,
0x00, 0xae, 0x13, 0x90, 0x81, 0x3a, 0x62, 0x7c,
0x66, 0xfb, 0x8b, 0xe7, 0x94, 0x7b, 0xe6, 0x3c,
0x52, 0xda, 0x75, 0x89, 0x37, 0x95, 0x15, 0xd4,
0xe0, 0xa6, 0x04, 0xf8, 0x14, 0x17, 0x81, 0xe6,
0x22, 0x94, 0x72, 0x11, 0x66, 0xbf, 0x62, 0x1e,
0x73, 0xa8, 0x2c, 0xbf, 0x23, 0x42, 0xc8, 0x58,
0xee, // 65-byte signature
0xac, // OP_CHECKSIG
},
},
},
LockTime: 0x11111111,
Expiry: 0x22222222,
},
},
STransactions: []*wire.MsgTx{
{
Version: 1,
TxIn: []*wire.TxIn{
{
PreviousOutPoint: wire.OutPoint{
Hash: chainhash.Hash{},
Index: 0xffffffff,
Tree: dcrutil.TxTreeStake,
},
Sequence: 0xffffffff,
ValueIn: 0x1313131313131313,
BlockHeight: 0x14141414,
BlockIndex: 0x15151515,
SignatureScript: []byte{
0xff, 0xff, 0xff, 0xff, 0x01, 0x00, 0xf2,
},
},
},
TxOut: []*wire.TxOut{
{
Value: 0x3333333333333333,
Version: 0x1212,
PkScript: []byte{
0x41, // OP_DATA_65
0x04, 0x96, 0xb5, 0x38, 0xe8, 0x53, 0x51, 0x9c,
0x72, 0x6a, 0x2c, 0x91, 0xe6, 0x1e, 0xc1, 0x16,
0x00, 0xae, 0x13, 0x90, 0x81, 0x3a, 0x62, 0x7c,
0x66, 0xfb, 0x8b, 0xe7, 0x94, 0x7b, 0xe6, 0x3c,
0x52, 0xda, 0x75, 0x89, 0x37, 0x95, 0x15, 0xd4,
0xe0, 0xa6, 0x04, 0xf8, 0x14, 0x17, 0x81, 0xe6,
0x22, 0x94, 0x72, 0x11, 0x66, 0xbf, 0x62, 0x1e,
0x73, 0xa8, 0x2c, 0xbf, 0x23, 0x42, 0xc8, 0x58,
0xee, // 65-byte signature
0xac, // OP_CHECKSIG
},
},
},
LockTime: 0x11111111,
Expiry: 0x22222222,
},
},
}
// testBlockBytes is the serialized bytes for the above test block (testBlock).
var testBlockBytes = []byte{
// Begin block header
0x01, 0x00, 0x00, 0x00, // Version 1 [0]
0x6f, 0xe2, 0x8c, 0x0a, 0xb6, 0xf1, 0xb3, 0x72,
0xc1, 0xa6, 0xa2, 0x46, 0xae, 0x63, 0xf7, 0x4f,
0x93, 0x1e, 0x83, 0x65, 0xe1, 0x5a, 0x08, 0x9c,
0x68, 0xd6, 0x19, 0x00, 0x00, 0x00, 0x00, 0x00, // PrevBlock [4]
0x98, 0x20, 0x51, 0xfd, 0x1e, 0x4b, 0xa7, 0x44,
0xbb, 0xbe, 0x68, 0x0e, 0x1f, 0xee, 0x14, 0x67,
0x7b, 0xa1, 0xa3, 0xc3, 0x54, 0x0b, 0xf7, 0xb1,
0xcd, 0xb6, 0x06, 0xe8, 0x57, 0x23, 0x3e, 0x0e, // MerkleRoot [36]
0x98, 0x20, 0x51, 0xfd, 0x1e, 0x4b, 0xa7, 0x44,
0xbb, 0xbe, 0x68, 0x0e, 0x1f, 0xee, 0x14, 0x67,
0x7b, 0xa1, 0xa3, 0xc3, 0x54, 0x0b, 0xf7, 0xb1,
0xcd, 0xb6, 0x06, 0xe8, 0x57, 0x23, 0x3e, 0x0e, // StakeRoot [68]
0x00, 0x00, // VoteBits [100]
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // FinalState [102]
0x00, 0x00, // Voters [108]
0x00, // FreshStake [110]
0x00, // Revocations [111]
0x00, 0x00, 0x00, 0x00, // Poolsize [112]
0xff, 0xff, 0x00, 0x1d, // Bits [116]
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // SBits [120]
0x01, 0x00, 0x00, 0x00, // Height [128]
0x01, 0x00, 0x00, 0x00, // Size [132]
0x61, 0xbc, 0x66, 0x49, // Timestamp [136]
0x01, 0xe3, 0x62, 0x99, // Nonce [140]
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // ExtraData [144]
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
// Announce number of txs
0x01, // TxnCount [180]
// Begin bogus normal txs
0x01, 0x00, 0x00, 0x00, // Version [181]
0x01, // Varint for number of transaction inputs [185]
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Previous output hash [186]
0xff, 0xff, 0xff, 0xff, // Prevous output index [218]
0x00, // Previous output tree [222]
0xff, 0xff, 0xff, 0xff, // Sequence [223]
0x01, // Varint for number of transaction outputs [227]
0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, // Transaction amount [228]
0x98, 0x98, // Script version
0x43, // Varint for length of pk script
0x41, // OP_DATA_65
0x04, 0x96, 0xb5, 0x38, 0xe8, 0x53, 0x51, 0x9c,
0x72, 0x6a, 0x2c, 0x91, 0xe6, 0x1e, 0xc1, 0x16,
0x00, 0xae, 0x13, 0x90, 0x81, 0x3a, 0x62, 0x7c,
0x66, 0xfb, 0x8b, 0xe7, 0x94, 0x7b, 0xe6, 0x3c,
0x52, 0xda, 0x75, 0x89, 0x37, 0x95, 0x15, 0xd4,
0xe0, 0xa6, 0x04, 0xf8, 0x14, 0x17, 0x81, 0xe6,
0x22, 0x94, 0x72, 0x11, 0x66, 0xbf, 0x62, 0x1e,
0x73, 0xa8, 0x2c, 0xbf, 0x23, 0x42, 0xc8, 0x58,
0xee, // 65-byte signature
0xac, // OP_CHECKSIG
0x11, 0x11, 0x11, 0x11, // Lock time
0x22, 0x22, 0x22, 0x22, // Expiry
0x01, // Varint for number of signatures
0x16, 0x16, 0x16, 0x16, 0x16, 0x16, 0x16, 0x16, // ValueIn
0x17, 0x17, 0x17, 0x17, // BlockHeight
0x18, 0x18, 0x18, 0x18, // BlockIndex
0x07, // SigScript length
0xff, 0xff, 0xff, 0xff, 0x01, 0x00, 0xf2, // Signature script (coinbase)
// Announce number of stake txs
0x01, // TxnCount for stake tx
// Begin bogus stake txs
0x01, 0x00, 0x00, 0x00, // Version
0x01, // Varint for number of transaction inputs
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Previous output hash
0xff, 0xff, 0xff, 0xff, // Prevous output index
0x01, // Previous output tree
0xff, 0xff, 0xff, 0xff, // Sequence
0x01, // Varint for number of transaction outputs
0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, // Transaction amount
0x12, 0x12, // Script version
0x43, // Varint for length of pk script
0x41, // OP_DATA_65
0x04, 0x96, 0xb5, 0x38, 0xe8, 0x53, 0x51, 0x9c,
0x72, 0x6a, 0x2c, 0x91, 0xe6, 0x1e, 0xc1, 0x16,
0x00, 0xae, 0x13, 0x90, 0x81, 0x3a, 0x62, 0x7c,
0x66, 0xfb, 0x8b, 0xe7, 0x94, 0x7b, 0xe6, 0x3c,
0x52, 0xda, 0x75, 0x89, 0x37, 0x95, 0x15, 0xd4,
0xe0, 0xa6, 0x04, 0xf8, 0x14, 0x17, 0x81, 0xe6,
0x22, 0x94, 0x72, 0x11, 0x66, 0xbf, 0x62, 0x1e,
0x73, 0xa8, 0x2c, 0xbf, 0x23, 0x42, 0xc8, 0x58,
0xee, // 65-byte signature
0xac, // OP_CHECKSIG
0x11, 0x11, 0x11, 0x11, // Lock time
0x22, 0x22, 0x22, 0x22, // Expiry
0x01, // Varint for number of signatures
0x13, 0x13, 0x13, 0x13, 0x13, 0x13, 0x13, 0x13, // ValueIn
0x14, 0x14, 0x14, 0x14, // BlockHeight
0x15, 0x15, 0x15, 0x15, // BlockIndex
0x07, // SigScript length
0xff, 0xff, 0xff, 0xff, 0x01, 0x00, 0xf2, // Signature script (coinbase)
}
// Transaction location information for the test block transactions.
var testBlockTxLocs = []wire.TxLoc{
{TxStart: 181, TxLen: 158},
}
// Transaction location information for the test block stake transactions.
var testBlockSTxLocs = []wire.TxLoc{
{TxStart: 340, TxLen: 158},
}
| isc |
bsander/dJSON | test/dJSON.spec.js | 3391 | describe('dJSON', function () {
'use strict';
var chai = require('chai');
var expect = chai.expect;
var dJSON = require('../lib/dJSON');
var path = 'x.y["q.{r}"].z';
var obj;
beforeEach(function () {
obj = {
x: {
y: {
'q.{r}': {
z: 635
},
q: {
r: {
z: 1
}
}
}
},
'x-y': 5,
falsy: false
};
});
it('gets a value from an object with a path containing properties which contain a period', function () {
expect(dJSON.get(obj, path)).to.equal(635);
expect(dJSON.get(obj, 'x.y.q.r.z')).to.equal(1);
});
it('sets a value from an object with a path containing properties which contain a period', function () {
dJSON.set(obj, path, 17771);
expect(dJSON.get(obj, path)).to.equal(17771);
expect(dJSON.get(obj, 'x.y.q.r.z')).to.equal(1);
});
it('will return undefined when requesting a property with a dash directly', function () {
expect(dJSON.get(obj, 'x-y')).to.be.undefined;
});
it('will return the proper value when requesting a property with a dash by square bracket notation', function () {
expect(dJSON.get(obj, '["x-y"]')).to.equal(5);
});
it('returns a value that is falsy', function () {
expect(dJSON.get(obj, 'falsy')).to.equal(false);
});
it('sets a value that is falsy', function () {
dJSON.set(obj, 'new', false);
expect(dJSON.get(obj, 'new')).to.equal(false);
});
it('uses an empty object as default for the value in the set method', function () {
var newObj = {};
dJSON.set(newObj, 'foo.bar.lorem');
expect(newObj).to.deep.equal({
foo: {
bar: {
lorem: {}
}
}
});
});
it('does not create an object when a path exists as empty string', function () {
var newObj = {
nestedObject: {
anArray: [
'i have a value',
''
]
}
};
var newPath = 'nestedObject.anArray[1]';
dJSON.set(newObj, newPath, 17771);
expect(newObj).to.deep.equal({
nestedObject: {
anArray: [
'i have a value',
17771
]
}
});
});
it('creates an object from a path with a left curly brace', function () {
var newObj = {};
dJSON.set(newObj, path.replace('}', ''), 'foo');
expect(newObj).to.be.deep.equal({
x: {
y: {
'q.{r': {
z: 'foo'
}
}
}
});
});
it('creates an object from a path with a right curly brace', function () {
var newObj = {};
dJSON.set(newObj, path.replace('{', ''), 'foo');
expect(newObj).to.be.deep.equal({
x: {
y: {
'q.r}': {
z: 'foo'
}
}
}
});
});
it('creates an object from a path with curly braces', function () {
var newObj = {};
dJSON.set(newObj, path, 'foo');
expect(newObj).to.be.deep.equal({
x: {
y: {
'q.{r}': {
z: 'foo'
}
}
}
});
});
it('creates an object from a path without curly braces', function () {
var newObj = {};
dJSON.set(newObj, path.replace('{', '').replace('}', ''), 'foo');
expect(newObj).to.be.deep.equal({
x: {
y: {
'q.r': {
z: 'foo'
}
}
}
});
});
});
| isc |
ibara/chargend | chargend.h | 7754 | /*
* 94 shifted lines of 72 ASCII characters.
*/
static const char *characters[] = {
"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefgh",
"\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghi",
"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghij",
"$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijk",
"%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijkl",
"&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklm",
"'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmn",
"()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmno",
")*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnop",
"*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopq",
"+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqr",
",-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrs",
"-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrst",
"./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstu",
"/0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuv",
"0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvw",
"123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwx",
"23456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxy",
"3456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz",
"456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{",
"56789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|",
"6789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}",
"789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~",
"89:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!",
"9:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"",
":;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#",
";<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$",
"<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%",
"=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&",
">?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'",
"?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'(",
"@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()",
"ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*",
"BCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+",
"CDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,",
"DEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-",
"EFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-.",
"FGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./",
"GHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0",
"HIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./01",
"IJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./012",
"JKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123",
"KLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./01234",
"LMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./012345",
"MNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456",
"NOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./01234567",
"OPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./012345678",
"PQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789",
"QRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:",
"RSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;",
"STUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<",
"TUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=",
"UVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>",
"VWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?",
"WXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@",
"XYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@A",
"YZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@AB",
"Z[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABC",
"[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCD",
"\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDE",
"]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEF",
"^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFG",
"_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGH",
"`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHI",
"abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJ",
"bcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJK",
"cdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKL",
"defghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLM",
"efghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMN",
"fghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNO",
"ghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOP",
"hijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQ",
"ijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQR",
"jklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRS",
"klmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRST",
"lmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTU",
"mnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUV",
"nopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVW",
"opqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWX",
"pqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXY",
"qrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ",
"rstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[",
"stuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\",
"tuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]",
"uvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^",
"vwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_",
"wxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`",
"xyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`a",
"yz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`ab",
"z{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abc",
"{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcd",
"|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcde",
"}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdef",
"~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefg"
};
| isc |
GitHub Code Dataset
Dataset Description
The GitHub Code dataset consists of 115M code files from GitHub in 32 programming languages with 60 extensions totaling in 1TB of data. The dataset was created from the public GitHub dataset on Google BiqQuery.
How to use it
The GitHub Code dataset is a very large dataset so for most use cases it is recommended to make use of the streaming API of datasets
. You can load and iterate through the dataset with the following two lines of code:
from datasets import load_dataset
ds = load_dataset("codeparrot/github-code", streaming=True, split="train")
print(next(iter(ds)))
#OUTPUT:
{
'code': "import mod189 from './mod189';\nvar value=mod189+1;\nexport default value;\n",
'repo_name': 'MirekSz/webpack-es6-ts',
'path': 'app/mods/mod190.js',
'language': 'JavaScript',
'license': 'isc',
'size': 73
}
You can see that besides the code, repo name, and path also the programming language, license, and the size of the file are part of the dataset. You can also filter the dataset for any subset of the 30 included languages (see the full list below) in the dataset. Just pass the list of languages as a list. E.g. if your dream is to build a Codex model for Dockerfiles use the following configuration:
ds = load_dataset("codeparrot/github-code", streaming=True, split="train", languages=["Dockerfile"])
print(next(iter(ds))["code"])
#OUTPUT:
"""\
FROM rockyluke/ubuntu:precise
ENV DEBIAN_FRONTEND="noninteractive" \
TZ="Europe/Amsterdam"
...
"""
We also have access to the license of the origin repo of a file so we can filter for licenses in the same way we filtered for languages:
ds = load_dataset("codeparrot/github-code", streaming=True, split="train", licenses=["mit", "isc"])
licenses = []
for element in iter(ds).take(10_000):
licenses.append(element["license"])
print(Counter(licenses))
#OUTPUT:
Counter({'mit': 9896, 'isc': 104})
Naturally, you can also download the full dataset. Note that this will download ~300GB compressed text data and the uncompressed dataset will take up ~1TB of storage:
ds = load_dataset("codeparrot/github-code", split="train")
Data Structure
Data Instances
{
'code': "import mod189 from './mod189';\nvar value=mod189+1;\nexport default value;\n",
'repo_name': 'MirekSz/webpack-es6-ts',
'path': 'app/mods/mod190.js',
'language': 'JavaScript',
'license': 'isc',
'size': 73
}
Data Fields
Field | Type | Description |
---|---|---|
code | string | content of source file |
repo_name | string | name of the GitHub repository |
path | string | path of file in GitHub repository |
language | string | programming language as inferred by extension |
license | string | license of GitHub repository |
size | int | size of source file in bytes |
Data Splits
The dataset only contains a train split.
Languages
The dataset contains 30 programming languages with over 60 extensions:
{
"Assembly": [".asm"],
"Batchfile": [".bat", ".cmd"],
"C": [".c", ".h"],
"C#": [".cs"],
"C++": [".cpp", ".hpp", ".c++", ".h++", ".cc", ".hh", ".C", ".H"],
"CMake": [".cmake"],
"CSS": [".css"],
"Dockerfile": [".dockerfile", "Dockerfile"],
"FORTRAN": ['.f90', '.f', '.f03', '.f08', '.f77', '.f95', '.for', '.fpp'],
"GO": [".go"],
"Haskell": [".hs"],
"HTML":[".html"],
"Java": [".java"],
"JavaScript": [".js"],
"Julia": [".jl"],
"Lua": [".lua"],
"Makefile": ["Makefile"],
"Markdown": [".md", ".markdown"],
"PHP": [".php", ".php3", ".php4", ".php5", ".phps", ".phpt"],
"Perl": [".pl", ".pm", ".pod", ".perl"],
"PowerShell": ['.ps1', '.psd1', '.psm1'],
"Python": [".py"],
"Ruby": [".rb"],
"Rust": [".rs"],
"SQL": [".sql"],
"Scala": [".scala"],
"Shell": [".sh", ".bash", ".command", ".zsh"],
"TypeScript": [".ts", ".tsx"],
"TeX": [".tex"],
"Visual Basic": [".vb"]
}
Licenses
Each example is also annotated with the license of the associated repository. There are in total 15 licenses:
[
'mit',
'apache-2.0',
'gpl-3.0',
'gpl-2.0',
'bsd-3-clause',
'agpl-3.0',
'lgpl-3.0',
'lgpl-2.1',
'bsd-2-clause',
'cc0-1.0',
'epl-1.0',
'mpl-2.0',
'unlicense',
'isc',
'artistic-2.0'
]
Dataset Statistics
The dataset contains 115M files and the sum of all the source code file sizes is 873 GB (note that the size of the dataset is larger due to the extra fields). A breakdown per language is given in the plot and table below:
Language | File Count | Size (GB) | |
---|---|---|---|
0 | Java | 19548190 | 107.70 |
1 | C | 14143113 | 183.83 |
2 | JavaScript | 11839883 | 87.82 |
3 | HTML | 11178557 | 118.12 |
4 | PHP | 11177610 | 61.41 |
5 | Markdown | 8464626 | 23.09 |
6 | C++ | 7380520 | 87.73 |
7 | Python | 7226626 | 52.03 |
8 | C# | 6811652 | 36.83 |
9 | Ruby | 4473331 | 10.95 |
10 | GO | 2265436 | 19.28 |
11 | TypeScript | 1940406 | 24.59 |
12 | CSS | 1734406 | 22.67 |
13 | Shell | 1385648 | 3.01 |
14 | Scala | 835755 | 3.87 |
15 | Makefile | 679430 | 2.92 |
16 | SQL | 656671 | 5.67 |
17 | Lua | 578554 | 2.81 |
18 | Perl | 497949 | 4.70 |
19 | Dockerfile | 366505 | 0.71 |
20 | Haskell | 340623 | 1.85 |
21 | Rust | 322431 | 2.68 |
22 | TeX | 251015 | 2.15 |
23 | Batchfile | 236945 | 0.70 |
24 | CMake | 175282 | 0.54 |
25 | Visual Basic | 155652 | 1.91 |
26 | FORTRAN | 142038 | 1.62 |
27 | PowerShell | 136846 | 0.69 |
28 | Assembly | 82905 | 0.78 |
29 | Julia | 58317 | 0.29 |
Dataset Creation
The dataset was created in two steps:
- Files of with the extensions given in the list above were retrieved from the GitHub dataset on BigQuery (full query here). The query was executed on Mar 16, 2022, 6:23:39 PM UTC+1.
- Files with lines longer than 1000 characters and duplicates (exact duplicates ignoring whitespaces) were dropped (full preprocessing script here).
Considerations for Using the Data
The dataset consists of source code from a wide range of repositories. As such they can potentially include harmful or biased code as well as sensitive information like passwords or usernames.
Releases
You can load any older version of the dataset with the revision
argument:
ds = load_dataset("codeparrot/github-code", revision="v1.0")
v1.0
- Initial release of dataset
- The query was executed on Feb 14, 2022, 12:03:16 PM UTC+1
v1.1
- Fix missing Scala/TypeScript
- Fix deduplication issue with inconsistent Python
hash
- The query was executed on Mar 16, 2022, 6:23:39 PM UTC+1
- Downloads last month
- 62,009