hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
ab6992c1283bc3f1bbc5b6b3842a016a1cbe02f5 | 43,469 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Metadata encoding
use util::ppaux::ty_to_str;
use std::{ebml, map};
use std::map::HashMap;
use io::WriterUtil;
use writer = std::ebml::writer;
use syntax::ast::*;
use syntax::print::pprust;
use syntax::{ast_util, visit};
use syntax::ast_util::*;
use metadata::common::*;
use middle::ty;
use middle::ty::node_id_to_type;
use middle::resolve;
use syntax::ast_map;
use syntax::attr;
use str::to_bytes;
use syntax::ast;
use syntax::diagnostic::span_handler;
use hash::{Hash, HashUtil};
use to_bytes::IterBytes;
export encode_parms;
export encode_metadata;
export encoded_ty;
export reachable;
export encode_inlined_item;
export metadata_encoding_version;
// used by astencode:
export def_to_str;
export encode_ctxt;
export write_type;
export write_vstore;
export encode_def_id;
type abbrev_map = map::HashMap<ty::t, tyencode::ty_abbrev>;
type encode_inlined_item = fn@(ecx: @encode_ctxt,
ebml_w: writer::Encoder,
path: ast_map::path,
ii: ast::inlined_item);
type encode_parms = {
diag: span_handler,
tcx: ty::ctxt,
reachable: HashMap<ast::node_id, ()>,
reexports2: middle::resolve::ExportMap2,
item_symbols: HashMap<ast::node_id, ~str>,
discrim_symbols: HashMap<ast::node_id, ~str>,
link_meta: link_meta,
cstore: cstore::CStore,
encode_inlined_item: encode_inlined_item
};
type stats = {
mut inline_bytes: uint,
mut attr_bytes: uint,
mut dep_bytes: uint,
mut item_bytes: uint,
mut index_bytes: uint,
mut zero_bytes: uint,
mut total_bytes: uint,
mut n_inlines: uint
};
enum encode_ctxt = {
diag: span_handler,
tcx: ty::ctxt,
stats: stats,
reachable: HashMap<ast::node_id, ()>,
reexports2: middle::resolve::ExportMap2,
item_symbols: HashMap<ast::node_id, ~str>,
discrim_symbols: HashMap<ast::node_id, ~str>,
link_meta: link_meta,
cstore: cstore::CStore,
encode_inlined_item: encode_inlined_item,
type_abbrevs: abbrev_map
};
fn reachable(ecx: @encode_ctxt, id: node_id) -> bool {
ecx.reachable.contains_key(id)
}
fn encode_name(ecx: @encode_ctxt, ebml_w: writer::Encoder, name: ident) {
ebml_w.wr_tagged_str(tag_paths_data_name, ecx.tcx.sess.str_of(name));
}
fn encode_impl_type_basename(ecx: @encode_ctxt, ebml_w: writer::Encoder,
name: ident) {
ebml_w.wr_tagged_str(tag_item_impl_type_basename,
ecx.tcx.sess.str_of(name));
}
fn encode_def_id(ebml_w: writer::Encoder, id: def_id) {
ebml_w.wr_tagged_str(tag_def_id, def_to_str(id));
}
fn encode_region_param(ecx: @encode_ctxt, ebml_w: writer::Encoder,
it: @ast::item) {
let opt_rp = ecx.tcx.region_paramd_items.find(it.id);
for opt_rp.each |rp| {
do ebml_w.wr_tag(tag_region_param) {
(*rp).encode(&ebml_w);
}
}
}
fn encode_mutability(ebml_w: writer::Encoder, mt: struct_mutability) {
do ebml_w.wr_tag(tag_struct_mut) {
let val = match mt {
struct_immutable => 'a',
struct_mutable => 'm'
};
ebml_w.writer.write(&[val as u8]);
}
}
type entry<T> = {val: T, pos: uint};
fn add_to_index(ecx: @encode_ctxt, ebml_w: writer::Encoder, path: &[ident],
index: &mut ~[entry<~str>], name: ident) {
let mut full_path = ~[];
full_path.push_all(path);
full_path.push(name);
index.push(
{val: ast_util::path_name_i(full_path,
ecx.tcx.sess.parse_sess.interner),
pos: ebml_w.writer.tell()});
}
fn encode_trait_ref(ebml_w: writer::Encoder, ecx: @encode_ctxt,
t: @trait_ref) {
ebml_w.start_tag(tag_impl_trait);
encode_type(ecx, ebml_w, node_id_to_type(ecx.tcx, t.ref_id));
ebml_w.end_tag();
}
// Item info table encoding
fn encode_family(ebml_w: writer::Encoder, c: char) {
ebml_w.start_tag(tag_items_data_item_family);
ebml_w.writer.write(&[c as u8]);
ebml_w.end_tag();
}
fn def_to_str(did: def_id) -> ~str { fmt!("%d:%d", did.crate, did.node) }
fn encode_ty_type_param_bounds(ebml_w: writer::Encoder, ecx: @encode_ctxt,
params: @~[ty::param_bounds]) {
let ty_str_ctxt = @{diag: ecx.diag,
ds: def_to_str,
tcx: ecx.tcx,
reachable: |a| reachable(ecx, a),
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)};
for params.each |param| {
ebml_w.start_tag(tag_items_data_item_ty_param_bounds);
tyencode::enc_bounds(ebml_w.writer, ty_str_ctxt, *param);
ebml_w.end_tag();
}
}
fn encode_type_param_bounds(ebml_w: writer::Encoder, ecx: @encode_ctxt,
params: ~[ty_param]) {
let ty_param_bounds =
@params.map(|param| ecx.tcx.ty_param_bounds.get(param.id));
encode_ty_type_param_bounds(ebml_w, ecx, ty_param_bounds);
}
fn encode_variant_id(ebml_w: writer::Encoder, vid: def_id) {
ebml_w.start_tag(tag_items_data_item_variant);
ebml_w.writer.write(str::to_bytes(def_to_str(vid)));
ebml_w.end_tag();
}
fn write_type(ecx: @encode_ctxt, ebml_w: writer::Encoder, typ: ty::t) {
let ty_str_ctxt =
@{diag: ecx.diag,
ds: def_to_str,
tcx: ecx.tcx,
reachable: |a| reachable(ecx, a),
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)};
tyencode::enc_ty(ebml_w.writer, ty_str_ctxt, typ);
}
fn write_vstore(ecx: @encode_ctxt, ebml_w: writer::Encoder,
vstore: ty::vstore) {
let ty_str_ctxt =
@{diag: ecx.diag,
ds: def_to_str,
tcx: ecx.tcx,
reachable: |a| reachable(ecx, a),
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)};
tyencode::enc_vstore(ebml_w.writer, ty_str_ctxt, vstore);
}
fn encode_type(ecx: @encode_ctxt, ebml_w: writer::Encoder, typ: ty::t) {
ebml_w.start_tag(tag_items_data_item_type);
write_type(ecx, ebml_w, typ);
ebml_w.end_tag();
}
fn encode_symbol(ecx: @encode_ctxt, ebml_w: writer::Encoder, id: node_id) {
ebml_w.start_tag(tag_items_data_item_symbol);
let sym = match ecx.item_symbols.find(id) {
Some(ref x) => (*x),
None => {
ecx.diag.handler().bug(
fmt!("encode_symbol: id not found %d", id));
}
};
ebml_w.writer.write(str::to_bytes(sym));
ebml_w.end_tag();
}
fn encode_discriminant(ecx: @encode_ctxt, ebml_w: writer::Encoder,
id: node_id) {
ebml_w.start_tag(tag_items_data_item_symbol);
ebml_w.writer.write(str::to_bytes(ecx.discrim_symbols.get(id)));
ebml_w.end_tag();
}
fn encode_disr_val(_ecx: @encode_ctxt, ebml_w: writer::Encoder,
disr_val: int) {
ebml_w.start_tag(tag_disr_val);
ebml_w.writer.write(str::to_bytes(int::to_str(disr_val,10u)));
ebml_w.end_tag();
}
fn encode_parent_item(ebml_w: writer::Encoder, id: def_id) {
ebml_w.start_tag(tag_items_data_parent_item);
ebml_w.writer.write(str::to_bytes(def_to_str(id)));
ebml_w.end_tag();
}
fn encode_enum_variant_info(ecx: @encode_ctxt, ebml_w: writer::Encoder,
id: node_id, variants: ~[variant],
path: ast_map::path, index: @mut ~[entry<int>],
ty_params: ~[ty_param]) {
let mut disr_val = 0;
let mut i = 0;
let vi = ty::enum_variants(ecx.tcx, {crate: local_crate, node: id});
for variants.each |variant| {
index.push({val: variant.node.id, pos: ebml_w.writer.tell()});
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(variant.node.id));
encode_family(ebml_w, 'v');
encode_name(ecx, ebml_w, variant.node.name);
encode_parent_item(ebml_w, local_def(id));
encode_type(ecx, ebml_w,
node_id_to_type(ecx.tcx, variant.node.id));
match variant.node.kind {
ast::tuple_variant_kind(args)
if args.len() > 0 && ty_params.len() == 0 => {
encode_symbol(ecx, ebml_w, variant.node.id);
}
ast::tuple_variant_kind(_) | ast::struct_variant_kind(_) |
ast::enum_variant_kind(_) => {}
}
encode_discriminant(ecx, ebml_w, variant.node.id);
if vi[i].disr_val != disr_val {
encode_disr_val(ecx, ebml_w, vi[i].disr_val);
disr_val = vi[i].disr_val;
}
encode_type_param_bounds(ebml_w, ecx, ty_params);
encode_path(ecx, ebml_w, path, ast_map::path_name(variant.node.name));
ebml_w.end_tag();
disr_val += 1;
i += 1;
}
}
fn encode_path(ecx: @encode_ctxt, ebml_w: writer::Encoder,
path: ast_map::path, name: ast_map::path_elt) {
fn encode_path_elt(ecx: @encode_ctxt, ebml_w: writer::Encoder,
elt: ast_map::path_elt) {
let (tag, name) = match elt {
ast_map::path_mod(name) => (tag_path_elt_mod, name),
ast_map::path_name(name) => (tag_path_elt_name, name)
};
ebml_w.wr_tagged_str(tag, ecx.tcx.sess.str_of(name));
}
do ebml_w.wr_tag(tag_path) {
ebml_w.wr_tagged_u32(tag_path_len, (vec::len(path) + 1u) as u32);
for vec::each(path) |pe| {
encode_path_elt(ecx, ebml_w, *pe);
}
encode_path_elt(ecx, ebml_w, name);
}
}
fn encode_info_for_mod(ecx: @encode_ctxt, ebml_w: writer::Encoder,
md: _mod, id: node_id, path: ast_map::path,
name: ident) {
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(id));
encode_family(ebml_w, 'm');
encode_name(ecx, ebml_w, name);
debug!("(encoding info for module) encoding info for module ID %d", id);
// Encode info about all the module children.
for md.items.each |item| {
match item.node {
item_impl(*) | item_struct(*) => {
let (ident, did) = (item.ident, item.id);
debug!("(encoding info for module) ... encoding impl %s \
(%?/%?), exported? %?",
ecx.tcx.sess.str_of(ident),
did,
ast_map::node_id_to_str(ecx.tcx.items, did, ecx.tcx
.sess.parse_sess.interner),
ast_util::is_exported(ident, md));
ebml_w.start_tag(tag_mod_impl);
ebml_w.wr_str(def_to_str(local_def(did)));
ebml_w.end_tag();
}
_ => {} // XXX: Encode these too.
}
}
encode_path(ecx, ebml_w, path, ast_map::path_mod(name));
// Encode the reexports of this module.
debug!("(encoding info for module) encoding reexports for %d", id);
match ecx.reexports2.find(id) {
Some(ref exports) => {
debug!("(encoding info for module) found reexports for %d", id);
for (*exports).each |exp| {
debug!("(encoding info for module) reexport '%s' for %d",
exp.name, id);
ebml_w.start_tag(tag_items_data_item_reexport);
ebml_w.start_tag(tag_items_data_item_reexport_def_id);
ebml_w.wr_str(def_to_str(exp.def_id));
ebml_w.end_tag();
ebml_w.start_tag(tag_items_data_item_reexport_name);
ebml_w.wr_str(exp.name);
ebml_w.end_tag();
ebml_w.end_tag();
}
}
None => {
debug!("(encoding info for module) found no reexports for %d",
id);
}
}
ebml_w.end_tag();
}
fn encode_visibility(ebml_w: writer::Encoder, visibility: visibility) {
encode_family(ebml_w, match visibility {
public => 'g',
private => 'j',
inherited => 'N'
});
}
fn encode_self_type(ebml_w: writer::Encoder, self_type: ast::self_ty_) {
ebml_w.start_tag(tag_item_trait_method_self_ty);
// Encode the base self type.
let ch;
match self_type {
sty_static => { ch = 's' as u8; }
sty_by_ref => { ch = 'r' as u8; }
sty_value => { ch = 'v' as u8; }
sty_region(_) => { ch = '&' as u8; }
sty_box(_) => { ch = '@' as u8; }
sty_uniq(_) => { ch = '~' as u8; }
}
ebml_w.writer.write(&[ ch ]);
// Encode mutability.
match self_type {
sty_static | sty_by_ref | sty_value => { /* No-op. */ }
sty_region(m_imm) | sty_box(m_imm) | sty_uniq(m_imm) => {
ebml_w.writer.write(&[ 'i' as u8 ]);
}
sty_region(m_mutbl) | sty_box(m_mutbl) | sty_uniq(m_mutbl) => {
ebml_w.writer.write(&[ 'm' as u8 ]);
}
sty_region(m_const) | sty_box(m_const) | sty_uniq(m_const) => {
ebml_w.writer.write(&[ 'c' as u8 ]);
}
}
ebml_w.end_tag();
}
fn encode_method_sort(ebml_w: writer::Encoder, sort: char) {
ebml_w.start_tag(tag_item_trait_method_sort);
ebml_w.writer.write(&[ sort as u8 ]);
ebml_w.end_tag();
}
/* Returns an index of items in this class */
fn encode_info_for_struct(ecx: @encode_ctxt, ebml_w: writer::Encoder,
path: ast_map::path,
fields: ~[@struct_field],
global_index: @mut~[entry<int>]) -> ~[entry<int>] {
/* Each class has its own index, since different classes
may have fields with the same name */
let index = @mut ~[];
let tcx = ecx.tcx;
/* We encode both private and public fields -- need to include
private fields to get the offsets right */
for fields.each |field| {
match field.node.kind {
named_field(nm, mt, vis) => {
let id = field.node.id;
index.push({val: id, pos: ebml_w.writer.tell()});
global_index.push({val: id,
pos: ebml_w.writer.tell()});
ebml_w.start_tag(tag_items_data_item);
debug!("encode_info_for_struct: doing %s %d",
tcx.sess.str_of(nm), id);
encode_visibility(ebml_w, vis);
encode_name(ecx, ebml_w, nm);
encode_path(ecx, ebml_w, path, ast_map::path_name(nm));
encode_type(ecx, ebml_w, node_id_to_type(tcx, id));
encode_mutability(ebml_w, mt);
encode_def_id(ebml_w, local_def(id));
ebml_w.end_tag();
}
unnamed_field => {}
}
}
*index
}
// This is for encoding info for ctors and dtors
fn encode_info_for_ctor(ecx: @encode_ctxt, ebml_w: writer::Encoder,
id: node_id, ident: ident, path: ast_map::path,
item: Option<inlined_item>, tps: ~[ty_param]) {
ebml_w.start_tag(tag_items_data_item);
encode_name(ecx, ebml_w, ident);
encode_def_id(ebml_w, local_def(id));
encode_family(ebml_w, purity_fn_family(ast::impure_fn));
encode_type_param_bounds(ebml_w, ecx, tps);
let its_ty = node_id_to_type(ecx.tcx, id);
debug!("fn name = %s ty = %s its node id = %d",
ecx.tcx.sess.str_of(ident),
util::ppaux::ty_to_str(ecx.tcx, its_ty), id);
encode_type(ecx, ebml_w, its_ty);
encode_path(ecx, ebml_w, path, ast_map::path_name(ident));
match item {
Some(ref it) => {
(ecx.encode_inlined_item)(ecx, ebml_w, path, (*it));
}
None => {
encode_symbol(ecx, ebml_w, id);
}
}
ebml_w.end_tag();
}
fn encode_info_for_method(ecx: @encode_ctxt, ebml_w: writer::Encoder,
impl_path: ast_map::path, should_inline: bool,
parent_id: node_id,
m: @method, all_tps: ~[ty_param]) {
debug!("encode_info_for_method: %d %s %u", m.id,
ecx.tcx.sess.str_of(m.ident), all_tps.len());
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(m.id));
match m.self_ty.node {
ast::sty_static => {
encode_family(ebml_w, purity_static_method_family(m.purity));
}
_ => encode_family(ebml_w, purity_fn_family(m.purity))
}
encode_type_param_bounds(ebml_w, ecx, all_tps);
encode_type(ecx, ebml_w, node_id_to_type(ecx.tcx, m.id));
encode_name(ecx, ebml_w, m.ident);
encode_path(ecx, ebml_w, impl_path, ast_map::path_name(m.ident));
encode_self_type(ebml_w, m.self_ty.node);
if all_tps.len() > 0u || should_inline {
(ecx.encode_inlined_item)(
ecx, ebml_w, impl_path,
ii_method(local_def(parent_id), m));
} else {
encode_symbol(ecx, ebml_w, m.id);
}
ebml_w.end_tag();
}
fn purity_fn_family(p: purity) -> char {
match p {
unsafe_fn => 'u',
pure_fn => 'p',
impure_fn => 'f',
extern_fn => 'e'
}
}
fn purity_static_method_family(p: purity) -> char {
match p {
unsafe_fn => 'U',
pure_fn => 'P',
impure_fn => 'F',
_ => fail ~"extern fn can't be static"
}
}
fn should_inline(attrs: ~[attribute]) -> bool {
match attr::find_inline_attr(attrs) {
attr::ia_none | attr::ia_never => false,
attr::ia_hint | attr::ia_always => true
}
}
fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: writer::Encoder,
item: @item, index: @mut ~[entry<int>],
path: ast_map::path) {
let tcx = ecx.tcx;
let must_write =
match item.node {
item_enum(_, _) | item_impl(*)
| item_trait(*) | item_struct(*) => true,
_ => false
};
if !must_write && !reachable(ecx, item.id) { return; }
fn add_to_index_(item: @item, ebml_w: writer::Encoder,
index: @mut ~[entry<int>]) {
index.push({val: item.id, pos: ebml_w.writer.tell()});
}
let add_to_index = |copy ebml_w| add_to_index_(item, ebml_w, index);
debug!("encoding info for item at %s",
ecx.tcx.sess.codemap.span_to_str(item.span));
match item.node {
item_const(_, _) => {
add_to_index();
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(item.id));
encode_family(ebml_w, 'c');
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
encode_symbol(ecx, ebml_w, item.id);
encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident));
ebml_w.end_tag();
}
item_fn(_, purity, tps, _) => {
add_to_index();
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(item.id));
encode_family(ebml_w, purity_fn_family(purity));
encode_type_param_bounds(ebml_w, ecx, tps);
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident));
encode_attributes(ebml_w, item.attrs);
if tps.len() > 0u || should_inline(item.attrs) {
(ecx.encode_inlined_item)(ecx, ebml_w, path, ii_item(item));
} else {
encode_symbol(ecx, ebml_w, item.id);
}
ebml_w.end_tag();
}
item_mod(m) => {
add_to_index();
encode_info_for_mod(ecx, ebml_w, m, item.id, path, item.ident);
}
item_foreign_mod(_) => {
add_to_index();
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(item.id));
encode_family(ebml_w, 'n');
encode_name(ecx, ebml_w, item.ident);
encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident));
ebml_w.end_tag();
}
item_ty(_, tps) => {
add_to_index();
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(item.id));
encode_family(ebml_w, 'y');
encode_type_param_bounds(ebml_w, ecx, tps);
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
encode_name(ecx, ebml_w, item.ident);
encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident));
encode_region_param(ecx, ebml_w, item);
ebml_w.end_tag();
}
item_enum(ref enum_definition, tps) => {
add_to_index();
do ebml_w.wr_tag(tag_items_data_item) {
encode_def_id(ebml_w, local_def(item.id));
encode_family(ebml_w, 't');
encode_type_param_bounds(ebml_w, ecx, tps);
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
encode_name(ecx, ebml_w, item.ident);
for (*enum_definition).variants.each |v| {
encode_variant_id(ebml_w, local_def(v.node.id));
}
(ecx.encode_inlined_item)(ecx, ebml_w, path, ii_item(item));
encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident));
encode_region_param(ecx, ebml_w, item);
}
encode_enum_variant_info(ecx,
ebml_w,
item.id,
(*enum_definition).variants,
path,
index,
tps);
}
item_struct(struct_def, tps) => {
/* First, encode the fields
These come first because we need to write them to make
the index, and the index needs to be in the item for the
class itself */
let idx = encode_info_for_struct(ecx, ebml_w, path,
struct_def.fields, index);
/* Encode the dtor */
do struct_def.dtor.iter |dtor| {
index.push({val: dtor.node.id, pos: ebml_w.writer.tell()});
encode_info_for_ctor(ecx, ebml_w, dtor.node.id,
ecx.tcx.sess.ident_of(
ecx.tcx.sess.str_of(item.ident) +
~"_dtor"),
path, if tps.len() > 0u {
Some(ii_dtor(*dtor, item.ident, tps,
local_def(item.id))) }
else { None }, tps);
}
/* Index the class*/
add_to_index();
/* Now, make an item for the class itself */
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(item.id));
encode_family(ebml_w, 'S');
encode_type_param_bounds(ebml_w, ecx, tps);
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
encode_name(ecx, ebml_w, item.ident);
encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident));
encode_region_param(ecx, ebml_w, item);
/* Encode the dtor */
/* Encode id for dtor */
do struct_def.dtor.iter |dtor| {
do ebml_w.wr_tag(tag_item_dtor) {
encode_def_id(ebml_w, local_def(dtor.node.id));
}
};
/* Encode def_ids for each field and method
for methods, write all the stuff get_trait_method
needs to know*/
for struct_def.fields.each |f| {
match f.node.kind {
named_field(ident, _, vis) => {
ebml_w.start_tag(tag_item_field);
encode_visibility(ebml_w, vis);
encode_name(ecx, ebml_w, ident);
encode_def_id(ebml_w, local_def(f.node.id));
ebml_w.end_tag();
}
unnamed_field => {}
}
}
/* Each class has its own index -- encode it */
let bkts = create_index(idx);
encode_index(ebml_w, bkts, write_int);
ebml_w.end_tag();
}
item_impl(tps, opt_trait, ty, methods) => {
add_to_index();
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(item.id));
encode_family(ebml_w, 'i');
encode_region_param(ecx, ebml_w, item);
encode_type_param_bounds(ebml_w, ecx, tps);
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
encode_name(ecx, ebml_w, item.ident);
encode_attributes(ebml_w, item.attrs);
match ty.node {
ast::ty_path(path, _) if path.idents.len() == 1 => {
encode_impl_type_basename(ecx, ebml_w,
ast_util::path_to_ident(path));
}
_ => {}
}
for methods.each |m| {
ebml_w.start_tag(tag_item_impl_method);
let method_def_id = local_def(m.id);
ebml_w.writer.write(str::to_bytes(def_to_str(method_def_id)));
ebml_w.end_tag();
}
do opt_trait.iter() |associated_trait| {
encode_trait_ref(ebml_w, ecx, *associated_trait);
}
encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident));
ebml_w.end_tag();
let impl_path = vec::append_one(path,
ast_map::path_name(item.ident));
for methods.each |m| {
index.push({val: m.id, pos: ebml_w.writer.tell()});
encode_info_for_method(ecx, ebml_w, impl_path,
should_inline(m.attrs), item.id, *m,
vec::append(tps, m.tps));
}
}
item_trait(tps, traits, ref ms) => {
let provided_methods = dvec::DVec();
add_to_index();
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(item.id));
encode_family(ebml_w, 'I');
encode_region_param(ecx, ebml_w, item);
encode_type_param_bounds(ebml_w, ecx, tps);
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
encode_name(ecx, ebml_w, item.ident);
encode_attributes(ebml_w, item.attrs);
let mut i = 0u;
for vec::each(*ty::trait_methods(tcx, local_def(item.id))) |mty| {
match (*ms)[i] {
required(ref ty_m) => {
ebml_w.start_tag(tag_item_trait_method);
encode_def_id(ebml_w, local_def((*ty_m).id));
encode_name(ecx, ebml_w, mty.ident);
encode_type_param_bounds(ebml_w, ecx, (*ty_m).tps);
encode_type(ecx, ebml_w, ty::mk_fn(tcx, mty.fty));
encode_family(ebml_w, purity_fn_family(mty.fty.meta.purity));
encode_self_type(ebml_w, mty.self_ty);
encode_method_sort(ebml_w, 'r');
ebml_w.end_tag();
}
provided(m) => {
provided_methods.push(m);
ebml_w.start_tag(tag_item_trait_method);
encode_def_id(ebml_w, local_def(m.id));
encode_name(ecx, ebml_w, mty.ident);
encode_type_param_bounds(ebml_w, ecx, m.tps);
encode_type(ecx, ebml_w, ty::mk_fn(tcx, mty.fty));
encode_family(ebml_w, purity_fn_family(mty.fty.meta.purity));
encode_self_type(ebml_w, mty.self_ty);
encode_method_sort(ebml_w, 'p');
ebml_w.end_tag();
}
}
i += 1u;
}
encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident));
for traits.each |associated_trait| {
encode_trait_ref(ebml_w, ecx, *associated_trait)
}
ebml_w.end_tag();
// Now, output all of the static methods as items. Note that for the
// method info, we output static methods with type signatures as
// written. Here, we output the *real* type signatures. I feel like
// maybe we should only ever handle the real type signatures.
for vec::each((*ms)) |m| {
let ty_m = ast_util::trait_method_to_ty_method(*m);
if ty_m.self_ty.node != ast::sty_static { loop; }
index.push({val: ty_m.id, pos: ebml_w.writer.tell()});
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(ty_m.id));
encode_parent_item(ebml_w, local_def(item.id));
encode_name(ecx, ebml_w, ty_m.ident);
encode_family(ebml_w,
purity_static_method_family(ty_m.purity));
let polyty = ecx.tcx.tcache.get(local_def(ty_m.id));
encode_ty_type_param_bounds(ebml_w, ecx, polyty.bounds);
encode_type(ecx, ebml_w, polyty.ty);
let m_path = vec::append_one(path,
ast_map::path_name(item.ident));
encode_path(ecx, ebml_w, m_path, ast_map::path_name(ty_m.ident));
ebml_w.end_tag();
}
// Finally, output all the provided methods as items.
for provided_methods.each |m| {
index.push({val: m.id, pos: ebml_w.writer.tell()});
encode_info_for_method(ecx, ebml_w, path, true, item.id, *m,
m.tps);
}
}
item_mac(*) => fail ~"item macros unimplemented"
}
}
fn encode_info_for_foreign_item(ecx: @encode_ctxt, ebml_w: writer::Encoder,
nitem: @foreign_item,
index: @mut ~[entry<int>],
path: ast_map::path, abi: foreign_abi) {
if !reachable(ecx, nitem.id) { return; }
index.push({val: nitem.id, pos: ebml_w.writer.tell()});
ebml_w.start_tag(tag_items_data_item);
match nitem.node {
foreign_item_fn(_, purity, tps) => {
encode_def_id(ebml_w, local_def(nitem.id));
encode_family(ebml_w, purity_fn_family(purity));
encode_type_param_bounds(ebml_w, ecx, tps);
encode_type(ecx, ebml_w, node_id_to_type(ecx.tcx, nitem.id));
if abi == foreign_abi_rust_intrinsic {
(ecx.encode_inlined_item)(ecx, ebml_w, path,
ii_foreign(nitem));
} else {
encode_symbol(ecx, ebml_w, nitem.id);
}
encode_path(ecx, ebml_w, path, ast_map::path_name(nitem.ident));
}
foreign_item_const(*) => {
encode_def_id(ebml_w, local_def(nitem.id));
encode_family(ebml_w, 'c');
encode_type(ecx, ebml_w, node_id_to_type(ecx.tcx, nitem.id));
encode_symbol(ecx, ebml_w, nitem.id);
encode_path(ecx, ebml_w, path, ast_map::path_name(nitem.ident));
}
}
ebml_w.end_tag();
}
fn encode_info_for_items(ecx: @encode_ctxt, ebml_w: writer::Encoder,
crate: @crate) -> ~[entry<int>] {
let index = @mut ~[];
ebml_w.start_tag(tag_items_data);
index.push({val: crate_node_id, pos: ebml_w.writer.tell()});
encode_info_for_mod(ecx, ebml_w, crate.node.module,
crate_node_id, ~[],
syntax::parse::token::special_idents::invalid);
visit::visit_crate(*crate, (), visit::mk_vt(@{
visit_expr: |_e, _cx, _v| { },
visit_item: |i, cx, v, copy ebml_w| {
visit::visit_item(i, cx, v);
match ecx.tcx.items.get(i.id) {
ast_map::node_item(_, pt) => {
encode_info_for_item(ecx, ebml_w, i, index, *pt);
}
_ => fail ~"bad item"
}
},
visit_foreign_item: |ni, cx, v, copy ebml_w| {
visit::visit_foreign_item(ni, cx, v);
match ecx.tcx.items.get(ni.id) {
ast_map::node_foreign_item(_, abi, pt) => {
encode_info_for_foreign_item(ecx, ebml_w, ni,
index, *pt, abi);
}
// case for separate item and foreign-item tables
_ => fail ~"bad foreign item"
}
}
,.. *visit::default_visitor()
}));
ebml_w.end_tag();
return *index;
}
// Path and definition ID indexing
fn create_index<T: Copy Hash IterBytes>(index: ~[entry<T>]) ->
~[@~[entry<T>]] {
let mut buckets: ~[@mut ~[entry<T>]] = ~[];
for uint::range(0u, 256u) |_i| { buckets.push(@mut ~[]); };
for index.each |elt| {
let h = elt.val.hash() as uint;
buckets[h % 256].push(*elt);
}
let mut buckets_frozen = ~[];
for buckets.each |bucket| {
buckets_frozen.push(@**bucket);
}
return buckets_frozen;
}
fn encode_index<T>(ebml_w: writer::Encoder, buckets: ~[@~[entry<T>]],
write_fn: fn(io::Writer, T)) {
let writer = ebml_w.writer;
ebml_w.start_tag(tag_index);
let mut bucket_locs: ~[uint] = ~[];
ebml_w.start_tag(tag_index_buckets);
for buckets.each |bucket| {
bucket_locs.push(ebml_w.writer.tell());
ebml_w.start_tag(tag_index_buckets_bucket);
for vec::each(**bucket) |elt| {
ebml_w.start_tag(tag_index_buckets_bucket_elt);
assert elt.pos < 0xffff_ffff;
writer.write_be_u32(elt.pos as u32);
write_fn(writer, elt.val);
ebml_w.end_tag();
}
ebml_w.end_tag();
}
ebml_w.end_tag();
ebml_w.start_tag(tag_index_table);
for bucket_locs.each |pos| {
assert *pos < 0xffff_ffff;
writer.write_be_u32(*pos as u32);
}
ebml_w.end_tag();
ebml_w.end_tag();
}
fn write_str(writer: io::Writer, &&s: ~str) { writer.write_str(s); }
fn write_int(writer: io::Writer, &&n: int) {
assert n < 0x7fff_ffff;
writer.write_be_u32(n as u32);
}
fn encode_meta_item(ebml_w: writer::Encoder, mi: meta_item) {
match mi.node {
meta_word(ref name) => {
ebml_w.start_tag(tag_meta_item_word);
ebml_w.start_tag(tag_meta_item_name);
ebml_w.writer.write(str::to_bytes((*name)));
ebml_w.end_tag();
ebml_w.end_tag();
}
meta_name_value(ref name, value) => {
match value.node {
lit_str(value) => {
ebml_w.start_tag(tag_meta_item_name_value);
ebml_w.start_tag(tag_meta_item_name);
ebml_w.writer.write(str::to_bytes((*name)));
ebml_w.end_tag();
ebml_w.start_tag(tag_meta_item_value);
ebml_w.writer.write(str::to_bytes(*value));
ebml_w.end_tag();
ebml_w.end_tag();
}
_ => {/* FIXME (#623): encode other variants */ }
}
}
meta_list(ref name, items) => {
ebml_w.start_tag(tag_meta_item_list);
ebml_w.start_tag(tag_meta_item_name);
ebml_w.writer.write(str::to_bytes((*name)));
ebml_w.end_tag();
for items.each |inner_item| {
encode_meta_item(ebml_w, **inner_item);
}
ebml_w.end_tag();
}
}
}
fn encode_attributes(ebml_w: writer::Encoder, attrs: ~[attribute]) {
ebml_w.start_tag(tag_attributes);
for attrs.each |attr| {
ebml_w.start_tag(tag_attribute);
encode_meta_item(ebml_w, attr.node.value);
ebml_w.end_tag();
}
ebml_w.end_tag();
}
// So there's a special crate attribute called 'link' which defines the
// metadata that Rust cares about for linking crates. This attribute requires
// 'name' and 'vers' items, so if the user didn't provide them we will throw
// them in anyway with default values.
fn synthesize_crate_attrs(ecx: @encode_ctxt, crate: @crate) -> ~[attribute] {
fn synthesize_link_attr(ecx: @encode_ctxt, items: ~[@meta_item]) ->
attribute {
assert (ecx.link_meta.name != ~"");
assert (ecx.link_meta.vers != ~"");
let name_item =
attr::mk_name_value_item_str(~"name", ecx.link_meta.name);
let vers_item =
attr::mk_name_value_item_str(~"vers", ecx.link_meta.vers);
let other_items =
{
let tmp = attr::remove_meta_items_by_name(items, ~"name");
attr::remove_meta_items_by_name(tmp, ~"vers")
};
let meta_items = vec::append(~[name_item, vers_item], other_items);
let link_item = attr::mk_list_item(~"link", meta_items);
return attr::mk_attr(link_item);
}
let mut attrs: ~[attribute] = ~[];
let mut found_link_attr = false;
for crate.node.attrs.each |attr| {
attrs.push(
if attr::get_attr_name(*attr) != ~"link" {
*attr
} else {
match attr.node.value.node {
meta_list(_, l) => {
found_link_attr = true;;
synthesize_link_attr(ecx, l)
}
_ => *attr
}
});
}
if !found_link_attr { attrs.push(synthesize_link_attr(ecx, ~[])); }
return attrs;
}
fn encode_crate_deps(ecx: @encode_ctxt, ebml_w: writer::Encoder,
cstore: cstore::CStore) {
fn get_ordered_deps(ecx: @encode_ctxt, cstore: cstore::CStore)
-> ~[decoder::crate_dep] {
type hashkv = @{key: crate_num, val: cstore::crate_metadata};
type numdep = decoder::crate_dep;
// Pull the cnums and name,vers,hash out of cstore
let mut deps: ~[numdep] = ~[];
do cstore::iter_crate_data(cstore) |key, val| {
let dep = {cnum: key, name: ecx.tcx.sess.ident_of(val.name),
vers: decoder::get_crate_vers(val.data),
hash: decoder::get_crate_hash(val.data)};
deps.push(dep);
};
// Sort by cnum
pure fn lteq(kv1: &numdep, kv2: &numdep) -> bool {
kv1.cnum <= kv2.cnum
}
std::sort::quick_sort(deps, lteq);
// Sanity-check the crate numbers
let mut expected_cnum = 1;
for deps.each |n| {
assert (n.cnum == expected_cnum);
expected_cnum += 1;
}
// mut -> immutable hack for vec::map
return vec::slice(deps, 0u, vec::len(deps));
}
// We're just going to write a list of crate 'name-hash-version's, with
// the assumption that they are numbered 1 to n.
// FIXME (#2166): This is not nearly enough to support correct versioning
// but is enough to get transitive crate dependencies working.
ebml_w.start_tag(tag_crate_deps);
for get_ordered_deps(ecx, cstore).each |dep| {
encode_crate_dep(ecx, ebml_w, *dep);
}
ebml_w.end_tag();
}
fn encode_crate_dep(ecx: @encode_ctxt, ebml_w: writer::Encoder,
dep: decoder::crate_dep) {
ebml_w.start_tag(tag_crate_dep);
ebml_w.start_tag(tag_crate_dep_name);
ebml_w.writer.write(str::to_bytes(ecx.tcx.sess.str_of(dep.name)));
ebml_w.end_tag();
ebml_w.start_tag(tag_crate_dep_vers);
ebml_w.writer.write(str::to_bytes(dep.vers));
ebml_w.end_tag();
ebml_w.start_tag(tag_crate_dep_hash);
ebml_w.writer.write(str::to_bytes(dep.hash));
ebml_w.end_tag();
ebml_w.end_tag();
}
fn encode_hash(ebml_w: writer::Encoder, hash: ~str) {
ebml_w.start_tag(tag_crate_hash);
ebml_w.writer.write(str::to_bytes(hash));
ebml_w.end_tag();
}
// NB: Increment this as you change the metadata encoding version.
const metadata_encoding_version : &[u8] = &[0x72, //'r' as u8,
0x75, //'u' as u8,
0x73, //'s' as u8,
0x74, //'t' as u8,
0, 0, 0, 1 ];
fn encode_metadata(parms: encode_parms, crate: @crate) -> ~[u8] {
let wr = @io::BytesWriter();
let stats =
{mut inline_bytes: 0,
mut attr_bytes: 0,
mut dep_bytes: 0,
mut item_bytes: 0,
mut index_bytes: 0,
mut zero_bytes: 0,
mut total_bytes: 0,
mut n_inlines: 0};
let ecx: @encode_ctxt = @encode_ctxt({
diag: parms.diag,
tcx: parms.tcx,
stats: move stats,
reachable: parms.reachable,
reexports2: parms.reexports2,
item_symbols: parms.item_symbols,
discrim_symbols: parms.discrim_symbols,
link_meta: parms.link_meta,
cstore: parms.cstore,
encode_inlined_item: parms.encode_inlined_item,
type_abbrevs: ty::new_ty_hash()
});
let ebml_w = writer::Encoder(wr as io::Writer);
encode_hash(ebml_w, ecx.link_meta.extras_hash);
let mut i = wr.pos;
let crate_attrs = synthesize_crate_attrs(ecx, crate);
encode_attributes(ebml_w, crate_attrs);
ecx.stats.attr_bytes = wr.pos - i;
i = wr.pos;
encode_crate_deps(ecx, ebml_w, ecx.cstore);
ecx.stats.dep_bytes = wr.pos - i;
// Encode and index the items.
ebml_w.start_tag(tag_items);
i = wr.pos;
let items_index = encode_info_for_items(ecx, ebml_w, crate);
ecx.stats.item_bytes = wr.pos - i;
i = wr.pos;
let items_buckets = create_index(items_index);
encode_index(ebml_w, items_buckets, write_int);
ecx.stats.index_bytes = wr.pos - i;
ebml_w.end_tag();
ecx.stats.total_bytes = wr.pos;
if (parms.tcx.sess.meta_stats()) {
do wr.bytes.borrow |v| {
do v.each |e| {
if *e == 0 {
ecx.stats.zero_bytes += 1;
}
true
}
}
io::println("metadata stats:");
io::println(fmt!(" inline bytes: %u", ecx.stats.inline_bytes));
io::println(fmt!(" attribute bytes: %u", ecx.stats.attr_bytes));
io::println(fmt!(" dep bytes: %u", ecx.stats.dep_bytes));
io::println(fmt!(" item bytes: %u", ecx.stats.item_bytes));
io::println(fmt!(" index bytes: %u", ecx.stats.index_bytes));
io::println(fmt!(" zero bytes: %u", ecx.stats.zero_bytes));
io::println(fmt!(" total bytes: %u", ecx.stats.total_bytes));
}
// Pad this, since something (LLVM, presumably) is cutting off the
// remaining % 4 bytes.
wr.write(&[0u8, 0u8, 0u8, 0u8]);
// FIXME #3396: weird bug here, for reasons unclear this emits random
// looking bytes (mostly 0x1) if we use the version byte-array constant
// above; so we use a string constant inline instead.
//
// Should be:
//
// vec::from_slice(metadata_encoding_version) +
(do str::as_bytes(&~"rust\x00\x00\x00\x01") |bytes| {
vec::slice(*bytes, 0, 8)
}) + flate::deflate_bytes(wr.bytes.check_out(|buf| buf))
}
// Get the encoded string for a type
fn encoded_ty(tcx: ty::ctxt, t: ty::t) -> ~str {
let cx = @{diag: tcx.diag,
ds: def_to_str,
tcx: tcx,
reachable: |_id| false,
abbrevs: tyencode::ac_no_abbrevs};
do io::with_str_writer |wr| {
tyencode::enc_ty(wr, cx, t);
}
}
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
| 35.776955 | 78 | 0.571465 |
f5e951dc1a381189e90ce4da813a90d6882eedf3 | 3,486 | #[doc = r" Value read from the register"]
pub struct R {
bits: u16,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u16,
}
impl super::PACKET_RAM_0_318 {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct LSBYTER {
bits: u8,
}
impl LSBYTER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct MSBYTER {
bits: u8,
}
impl MSBYTER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Proxy"]
pub struct _LSBYTEW<'a> {
w: &'a mut W,
}
impl<'a> _LSBYTEW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 255;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u16) << OFFSET);
self.w.bits |= ((value & MASK) as u16) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _MSBYTEW<'a> {
w: &'a mut W,
}
impl<'a> _MSBYTEW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 255;
const OFFSET: u8 = 8;
self.w.bits &= !((MASK as u16) << OFFSET);
self.w.bits |= ((value & MASK) as u16) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u16 {
self.bits
}
#[doc = "Bits 0:7 - LSBYTE"]
#[inline]
pub fn lsbyte(&self) -> LSBYTER {
let bits = {
const MASK: u8 = 255;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u16) as u8
};
LSBYTER { bits }
}
#[doc = "Bits 8:15 - MSBYTE"]
#[inline]
pub fn msbyte(&self) -> MSBYTER {
let bits = {
const MASK: u8 = 255;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u16) as u8
};
MSBYTER { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u16) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:7 - LSBYTE"]
#[inline]
pub fn lsbyte(&mut self) -> _LSBYTEW {
_LSBYTEW { w: self }
}
#[doc = "Bits 8:15 - MSBYTE"]
#[inline]
pub fn msbyte(&mut self) -> _MSBYTEW {
_MSBYTEW { w: self }
}
}
| 23.714286 | 59 | 0.49082 |
16d8ada9f24c0857f93c2c29f662da0bfbd1d76c | 21,798 | //! List of the active feature gates.
use super::{State, Feature};
use syntax_pos::edition::Edition;
use syntax_pos::Span;
use syntax_pos::symbol::{Symbol, sym};
macro_rules! set {
($field: ident) => {{
fn f(features: &mut Features, _: Span) {
features.$field = true;
}
f as fn(&mut Features, Span)
}}
}
macro_rules! declare_features {
($(
$(#[doc = $doc:tt])* (active, $feature:ident, $ver:expr, $issue:expr, $edition:expr),
)+) => {
/// Represents active features that are currently being implemented or
/// currently being considered for addition/removal.
pub const ACTIVE_FEATURES:
&[Feature] =
&[$(
// (sym::$feature, $ver, $issue, $edition, set!($feature))
Feature {
state: State::Active { set: set!($feature) },
name: sym::$feature,
since: $ver,
issue: $issue,
edition: $edition,
description: concat!($($doc,)*),
}
),+];
/// A set of features to be used by later passes.
#[derive(Clone, Default)]
pub struct Features {
/// `#![feature]` attrs for language features, for error reporting.
pub declared_lang_features: Vec<(Symbol, Span, Option<Symbol>)>,
/// `#![feature]` attrs for non-language (library) features.
pub declared_lib_features: Vec<(Symbol, Span)>,
$(
$(#[doc = $doc])*
pub $feature: bool
),+
}
impl Features {
pub fn walk_feature_fields(&self, mut f: impl FnMut(&str, bool)) {
$(f(stringify!($feature), self.$feature);)+
}
}
};
}
impl Feature {
/// Sets this feature in `Features`. Panics if called on a non-active feature.
pub fn set(&self, features: &mut Features, span: Span) {
match self.state {
State::Active { set } => set(features, span),
_ => panic!("called `set` on feature `{}` which is not `active`", self.name)
}
}
}
// If you change this, please modify `src/doc/unstable-book` as well.
//
// Don't ever remove anything from this list; move them to `removed.rs`.
//
// The version numbers here correspond to the version in which the current status
// was set. This is most important for knowing when a particular feature became
// stable (active).
//
// Note that the features are grouped into internal/user-facing and then
// sorted by version inside those groups. This is enforced with tidy.
//
// N.B., `tools/tidy/src/features.rs` parses this information directly out of the
// source, so take care when modifying it.
declare_features! (
// -------------------------------------------------------------------------
// feature-group-start: internal feature gates
// -------------------------------------------------------------------------
// no-tracking-issue-start
/// Allows using compiler's own crates.
(active, rustc_private, "1.0.0", Some(27812), None),
/// Allows using the `rust-intrinsic`'s "ABI".
(active, intrinsics, "1.0.0", None, None),
/// Allows using `#[lang = ".."]` attribute for linking items to special compiler logic.
(active, lang_items, "1.0.0", None, None),
/// Allows using the `#[stable]` and `#[unstable]` attributes.
(active, staged_api, "1.0.0", None, None),
/// Allows using `#[allow_internal_unstable]`. This is an
/// attribute on `macro_rules!` and can't use the attribute handling
/// below (it has to be checked before expansion possibly makes
/// macros disappear).
(active, allow_internal_unstable, "1.0.0", None, None),
/// Allows using `#[allow_internal_unsafe]`. This is an
/// attribute on `macro_rules!` and can't use the attribute handling
/// below (it has to be checked before expansion possibly makes
/// macros disappear).
(active, allow_internal_unsafe, "1.0.0", None, None),
/// Allows using `#[rustc_const_unstable(feature = "foo", ..)]` which
/// lets a function to be `const` when opted into with `#![feature(foo)]`.
(active, rustc_const_unstable, "1.0.0", None, None),
/// no-tracking-issue-end
/// Allows using `#[link_name="llvm.*"]`.
(active, link_llvm_intrinsics, "1.0.0", Some(29602), None),
/// Allows using `rustc_*` attributes (RFC 572).
(active, rustc_attrs, "1.0.0", Some(29642), None),
/// Allows using the `box $expr` syntax.
(active, box_syntax, "1.0.0", Some(49733), None),
/// Allows using `#[main]` to replace the entrypoint `#[lang = "start"]` calls.
(active, main, "1.0.0", Some(29634), None),
/// Allows using `#[start]` on a function indicating that it is the program entrypoint.
(active, start, "1.0.0", Some(29633), None),
/// Allows using the `#[fundamental]` attribute.
(active, fundamental, "1.0.0", Some(29635), None),
/// Allows using the `rust-call` ABI.
(active, unboxed_closures, "1.0.0", Some(29625), None),
/// Allows using the `#[linkage = ".."]` attribute.
(active, linkage, "1.0.0", Some(29603), None),
/// Allows features specific to OIBIT (auto traits).
(active, optin_builtin_traits, "1.0.0", Some(13231), None),
/// Allows using `box` in patterns (RFC 469).
(active, box_patterns, "1.0.0", Some(29641), None),
// no-tracking-issue-start
/// Allows using `#[prelude_import]` on glob `use` items.
(active, prelude_import, "1.2.0", None, None),
// no-tracking-issue-end
// no-tracking-issue-start
/// Allows using `#[omit_gdb_pretty_printer_section]`.
(active, omit_gdb_pretty_printer_section, "1.5.0", None, None),
/// Allows using the `vectorcall` ABI.
(active, abi_vectorcall, "1.7.0", None, None),
// no-tracking-issue-end
/// Allows using `#[structural_match]` which indicates that a type is structurally matchable.
(active, structural_match, "1.8.0", Some(31434), None),
/// Allows using the `may_dangle` attribute (RFC 1327).
(active, dropck_eyepatch, "1.10.0", Some(34761), None),
/// Allows using the `#![panic_runtime]` attribute.
(active, panic_runtime, "1.10.0", Some(32837), None),
/// Allows declaring with `#![needs_panic_runtime]` that a panic runtime is needed.
(active, needs_panic_runtime, "1.10.0", Some(32837), None),
// no-tracking-issue-start
/// Allows identifying the `compiler_builtins` crate.
(active, compiler_builtins, "1.13.0", None, None),
/// Allows using the `unadjusted` ABI; perma-unstable.
(active, abi_unadjusted, "1.16.0", None, None),
/// Allows identifying crates that contain sanitizer runtimes.
(active, sanitizer_runtime, "1.17.0", None, None),
/// Used to identify crates that contain the profiler runtime.
(active, profiler_runtime, "1.18.0", None, None),
/// Allows using the `thiscall` ABI.
(active, abi_thiscall, "1.19.0", None, None),
/// Allows using `#![needs_allocator]`, an implementation detail of `#[global_allocator]`.
(active, allocator_internals, "1.20.0", None, None),
/// Added for testing E0705; perma-unstable.
(active, test_2018_feature, "1.31.0", None, Some(Edition::Edition2018)),
// no-tracking-issue-end
// -------------------------------------------------------------------------
// feature-group-end: internal feature gates
// -------------------------------------------------------------------------
// -------------------------------------------------------------------------
// feature-group-start: actual feature gates (target features)
// -------------------------------------------------------------------------
// FIXME: Document these and merge with the list below.
// Unstable `#[target_feature]` directives.
(active, arm_target_feature, "1.27.0", Some(44839), None),
(active, aarch64_target_feature, "1.27.0", Some(44839), None),
(active, hexagon_target_feature, "1.27.0", Some(44839), None),
(active, powerpc_target_feature, "1.27.0", Some(44839), None),
(active, mips_target_feature, "1.27.0", Some(44839), None),
(active, avx512_target_feature, "1.27.0", Some(44839), None),
(active, mmx_target_feature, "1.27.0", Some(44839), None),
(active, sse4a_target_feature, "1.27.0", Some(44839), None),
(active, tbm_target_feature, "1.27.0", Some(44839), None),
(active, wasm_target_feature, "1.30.0", Some(44839), None),
(active, adx_target_feature, "1.32.0", Some(44839), None),
(active, cmpxchg16b_target_feature, "1.32.0", Some(44839), None),
(active, movbe_target_feature, "1.34.0", Some(44839), None),
(active, rtm_target_feature, "1.35.0", Some(44839), None),
(active, f16c_target_feature, "1.36.0", Some(44839), None),
// -------------------------------------------------------------------------
// feature-group-end: actual feature gates (target features)
// -------------------------------------------------------------------------
// -------------------------------------------------------------------------
// feature-group-start: actual feature gates
// -------------------------------------------------------------------------
/// Allows using the `#[link_args]` attribute.
(active, link_args, "1.0.0", Some(29596), None),
/// Allows defining identifiers beyond ASCII.
(active, non_ascii_idents, "1.0.0", Some(55467), None),
/// Allows using `#[plugin_registrar]` on functions.
(active, plugin_registrar, "1.0.0", Some(29597), None),
/// Allows using `#![plugin(myplugin)]`.
(active, plugin, "1.0.0", Some(29597), None),
/// Allows using `#[thread_local]` on `static` items.
(active, thread_local, "1.0.0", Some(29594), None),
/// Allows the use of SIMD types in functions declared in `extern` blocks.
(active, simd_ffi, "1.0.0", Some(27731), None),
/// Allows using non lexical lifetimes (RFC 2094).
(active, nll, "1.0.0", Some(43234), None),
/// Allows using slice patterns.
(active, slice_patterns, "1.0.0", Some(62254), None),
/// Allows the definition of `const` functions with some advanced features.
(active, const_fn, "1.2.0", Some(57563), None),
/// Allows associated type defaults.
(active, associated_type_defaults, "1.2.0", Some(29661), None),
/// Allows `#![no_core]`.
(active, no_core, "1.3.0", Some(29639), None),
/// Allows default type parameters to influence type inference.
(active, default_type_parameter_fallback, "1.3.0", Some(27336), None),
/// Allows `repr(simd)` and importing the various simd intrinsics.
(active, repr_simd, "1.4.0", Some(27731), None),
/// Allows `extern "platform-intrinsic" { ... }`.
(active, platform_intrinsics, "1.4.0", Some(27731), None),
/// Allows `#[unwind(..)]`.
///
/// Permits specifying whether a function should permit unwinding or abort on unwind.
(active, unwind_attributes, "1.4.0", Some(58760), None),
/// Allows `#[no_debug]`.
(active, no_debug, "1.5.0", Some(29721), None),
/// Allows attributes on expressions and non-item statements.
(active, stmt_expr_attributes, "1.6.0", Some(15701), None),
/// Allows the use of type ascription in expressions.
(active, type_ascription, "1.6.0", Some(23416), None),
/// Allows `cfg(target_thread_local)`.
(active, cfg_target_thread_local, "1.7.0", Some(29594), None),
/// Allows specialization of implementations (RFC 1210).
(active, specialization, "1.7.0", Some(31844), None),
/// Allows using `#[naked]` on functions.
(active, naked_functions, "1.9.0", Some(32408), None),
/// Allows `cfg(target_has_atomic = "...")`.
(active, cfg_target_has_atomic, "1.9.0", Some(32976), None),
/// Allows `X..Y` patterns.
(active, exclusive_range_pattern, "1.11.0", Some(37854), None),
/// Allows exhaustive pattern matching on types that contain uninhabited types.
(active, exhaustive_patterns, "1.13.0", Some(51085), None),
/// Allows `union`s to implement `Drop`. Moreover, `union`s may now include fields
/// that don't implement `Copy` as long as they don't have any drop glue.
/// This is checked recursively. On encountering type variable where no progress can be made,
/// `T: Copy` is used as a substitute for "no drop glue".
///
/// NOTE: A limited form of `union U { ... }` was accepted in 1.19.0.
(active, untagged_unions, "1.13.0", Some(55149), None),
/// Allows `#[link(..., cfg(..))]`.
(active, link_cfg, "1.14.0", Some(37406), None),
/// Allows `extern "ptx-*" fn()`.
(active, abi_ptx, "1.15.0", Some(38788), None),
/// Allows the `#[repr(i128)]` attribute for enums.
(active, repr128, "1.16.0", Some(35118), None),
/// Allows `#[link(kind="static-nobundle"...)]`.
(active, static_nobundle, "1.16.0", Some(37403), None),
/// Allows `extern "msp430-interrupt" fn()`.
(active, abi_msp430_interrupt, "1.16.0", Some(38487), None),
/// Allows declarative macros 2.0 (`macro`).
(active, decl_macro, "1.17.0", Some(39412), None),
/// Allows `extern "x86-interrupt" fn()`.
(active, abi_x86_interrupt, "1.17.0", Some(40180), None),
/// Allows overlapping impls of marker traits.
(active, overlapping_marker_traits, "1.18.0", Some(29864), None),
/// Allows a test to fail without failing the whole suite.
(active, allow_fail, "1.19.0", Some(46488), None),
/// Allows unsized tuple coercion.
(active, unsized_tuple_coercion, "1.20.0", Some(42877), None),
/// Allows defining generators.
(active, generators, "1.21.0", Some(43122), None),
/// Allows `#[doc(cfg(...))]`.
(active, doc_cfg, "1.21.0", Some(43781), None),
/// Allows `#[doc(masked)]`.
(active, doc_masked, "1.21.0", Some(44027), None),
/// Allows `#[doc(spotlight)]`.
(active, doc_spotlight, "1.22.0", Some(45040), None),
/// Allows `#[doc(include = "some-file")]`.
(active, external_doc, "1.22.0", Some(44732), None),
/// Allows using `crate` as visibility modifier, synonymous with `pub(crate)`.
(active, crate_visibility_modifier, "1.23.0", Some(53120), None),
/// Allows defining `extern type`s.
(active, extern_types, "1.23.0", Some(43467), None),
/// Allows trait methods with arbitrary self types.
(active, arbitrary_self_types, "1.23.0", Some(44874), None),
/// Allows in-band quantification of lifetime bindings (e.g., `fn foo(x: &'a u8) -> &'a u8`).
(active, in_band_lifetimes, "1.23.0", Some(44524), None),
/// Allows associated types to be generic, e.g., `type Foo<T>;` (RFC 1598).
(active, generic_associated_types, "1.23.0", Some(44265), None),
/// Allows defining `trait X = A + B;` alias items.
(active, trait_alias, "1.24.0", Some(41517), None),
/// Allows infering `'static` outlives requirements (RFC 2093).
(active, infer_static_outlives_requirements, "1.26.0", Some(54185), None),
/// Allows accessing fields of unions inside `const` functions.
(active, const_fn_union, "1.27.0", Some(51909), None),
/// Allows casting raw pointers to `usize` during const eval.
(active, const_raw_ptr_to_usize_cast, "1.27.0", Some(51910), None),
/// Allows dereferencing raw pointers during const eval.
(active, const_raw_ptr_deref, "1.27.0", Some(51911), None),
/// Allows comparing raw pointers during const eval.
(active, const_compare_raw_pointers, "1.27.0", Some(53020), None),
/// Allows `#[doc(alias = "...")]`.
(active, doc_alias, "1.27.0", Some(50146), None),
/// Allows inconsistent bounds in where clauses.
(active, trivial_bounds, "1.28.0", Some(48214), None),
/// Allows `'a: { break 'a; }`.
(active, label_break_value, "1.28.0", Some(48594), None),
/// Allows using `#[doc(keyword = "...")]`.
(active, doc_keyword, "1.28.0", Some(51315), None),
/// Allows reinterpretation of the bits of a value of one type as another
/// type during const eval.
(active, const_transmute, "1.29.0", Some(53605), None),
/// Allows using `try {...}` expressions.
(active, try_blocks, "1.29.0", Some(31436), None),
/// Allows defining an `#[alloc_error_handler]`.
(active, alloc_error_handler, "1.29.0", Some(51540), None),
/// Allows using the `amdgpu-kernel` ABI.
(active, abi_amdgpu_kernel, "1.29.0", Some(51575), None),
/// Allows panicking during const eval (producing compile-time errors).
(active, const_panic, "1.30.0", Some(51999), None),
/// Allows `#[marker]` on certain traits allowing overlapping implementations.
(active, marker_trait_attr, "1.30.0", Some(29864), None),
/// Allows macro invocations on modules expressions and statements and
/// procedural macros to expand to non-items.
(active, proc_macro_hygiene, "1.30.0", Some(54727), None),
/// Allows unsized rvalues at arguments and parameters.
(active, unsized_locals, "1.30.0", Some(48055), None),
/// Allows custom test frameworks with `#![test_runner]` and `#[test_case]`.
(active, custom_test_frameworks, "1.30.0", Some(50297), None),
/// Allows non-builtin attributes in inner attribute position.
(active, custom_inner_attributes, "1.30.0", Some(54726), None),
/// Allows `impl Trait` in bindings (`let`, `const`, `static`).
(active, impl_trait_in_bindings, "1.30.0", Some(63065), None),
/// Allows using `reason` in lint attributes and the `#[expect(lint)]` lint check.
(active, lint_reasons, "1.31.0", Some(54503), None),
/// Allows exhaustive integer pattern matching on `usize` and `isize`.
(active, precise_pointer_size_matching, "1.32.0", Some(56354), None),
/// Allows using `#[ffi_returns_twice]` on foreign functions.
(active, ffi_returns_twice, "1.34.0", Some(58314), None),
/// Allows const generic types (e.g. `struct Foo<const N: usize>(...);`).
(active, const_generics, "1.34.0", Some(44580), None),
/// Allows using `#[optimize(X)]`.
(active, optimize_attribute, "1.34.0", Some(54882), None),
/// Allows using C-variadics.
(active, c_variadic, "1.34.0", Some(44930), None),
/// Allows the user of associated type bounds.
(active, associated_type_bounds, "1.34.0", Some(52662), None),
/// Allows `if/while p && let q = r && ...` chains.
(active, let_chains, "1.37.0", Some(53667), None),
/// Allows #[repr(transparent)] on enums (RFC 2645).
(active, transparent_enums, "1.37.0", Some(60405), None),
/// Allows #[repr(transparent)] on unions (RFC 2645).
(active, transparent_unions, "1.37.0", Some(60405), None),
/// Allows explicit discriminants on non-unit enum variants.
(active, arbitrary_enum_discriminant, "1.37.0", Some(60553), None),
/// Allows `impl Trait` with multiple unrelated lifetimes.
(active, member_constraints, "1.37.0", Some(61977), None),
/// Allows `async || body` closures.
(active, async_closure, "1.37.0", Some(62290), None),
/// Allows `[x; N]` where `x` is a constant (RFC 2203).
(active, const_in_array_repeat_expressions, "1.37.0", Some(49147), None),
/// Allows `impl Trait` to be used inside type aliases (RFC 2515).
(active, type_alias_impl_trait, "1.38.0", Some(63063), None),
/// Allows the use of or-patterns (e.g., `0 | 1`).
(active, or_patterns, "1.38.0", Some(54883), None),
/// Allows the definition of `const extern fn` and `const unsafe extern fn`.
(active, const_extern_fn, "1.40.0", Some(64926), None),
/// Allows the use of raw-dylibs (RFC 2627).
(active, raw_dylib, "1.40.0", Some(58713), None),
/// Allows `#[track_caller]` to be used which provides
/// accurate caller location reporting during panic (RFC 2091).
(active, track_caller, "1.40.0", Some(47809), None),
/// Allows making `dyn Trait` well-formed even if `Trait` is not object safe.
/// In that case, `dyn Trait: Trait` does not hold. Moreover, coercions and
/// casts in safe Rust to `dyn Trait` for such a `Trait` is also forbidden.
(active, object_safe_for_dispatch, "1.40.0", Some(43561), None),
/// Allows using the `efiapi` ABI.
(active, abi_efiapi, "1.40.0", Some(65815), None),
/// Allows `&raw const $place_expr` and `&raw mut $place_expr` expressions.
(active, raw_ref_op, "1.41.0", Some(64490), None),
/// Allows diverging expressions to fall back to `!` rather than `()`.
(active, never_type_fallback, "1.41.0", Some(65992), None),
/// Allows using the `#[register_attr]` attribute.
(active, register_attr, "1.41.0", Some(66080), None),
/// Allows using the `#[register_tool]` attribute.
(active, register_tool, "1.41.0", Some(66079), None),
/// Allows the use of `if` and `match` in constants.
(active, const_if_match, "1.41.0", Some(49146), None),
/// Allows the use of `#[cfg(sanitize = "option")]`; set when -Zsanitizer is used.
(active, cfg_sanitize, "1.41.0", Some(39699), None),
// -------------------------------------------------------------------------
// feature-group-end: actual feature gates
// -------------------------------------------------------------------------
);
/// Some features are known to be incomplete and using them is likely to have
/// unanticipated results, such as compiler crashes. We warn the user about these
/// to alert them.
pub const INCOMPLETE_FEATURES: &[Symbol] = &[
sym::impl_trait_in_bindings,
sym::generic_associated_types,
sym::const_generics,
sym::or_patterns,
sym::let_chains,
sym::raw_dylib,
sym::track_caller,
];
| 39.850091 | 97 | 0.60111 |
561435f1b728aea34ebf3d05e589884d2574399e | 102 | pub mod configuration;
pub mod domain;
pub mod gql;
pub mod routes;
pub mod session;
pub mod startup;
| 14.571429 | 22 | 0.764706 |
e477687ac114053c722dc820d3c2a32f52a4b2c8 | 9,210 | //! An asynchronous, pipelined, PostgreSQL client.
//!
//! # Example
//!
//! ```no_run
//! use tokio_postgres::{NoTls, Error};
//!
//! # #[cfg(not(feature = "runtime"))] fn main() {}
//! # #[cfg(feature = "runtime")]
//! #[tokio::main] // By default, tokio_postgres uses the tokio crate as its runtime.
//! async fn main() -> Result<(), Error> {
//! // Connect to the database.
//! let (client, connection) =
//! tokio_postgres::connect("host=localhost user=postgres", NoTls).await?;
//!
//! // The connection object performs the actual communication with the database,
//! // so spawn it off to run on its own.
//! tokio::spawn(async move {
//! if let Err(e) = connection.await {
//! eprintln!("connection error: {}", e);
//! }
//! });
//!
//! // Now we can execute a simple statement that just returns its parameter.
//! let rows = client
//! .query("SELECT $1::TEXT", &[&"hello world"])
//! .await?;
//!
//! // And then check that we got back the same string we sent over.
//! let value: &str = rows[0].get(0);
//! assert_eq!(value, "hello world");
//!
//! Ok(())
//! }
//! ```
//!
//! # Behavior
//!
//! Calling a method like `Client::query` on its own does nothing. The associated request is not sent to the database
//! until the future returned by the method is first polled. Requests are executed in the order that they are first
//! polled, not in the order that their futures are created.
//!
//! # Pipelining
//!
//! The client supports *pipelined* requests. Pipelining can improve performance in use cases in which multiple,
//! independent queries need to be executed. In a traditional workflow, each query is sent to the server after the
//! previous query completes. In contrast, pipelining allows the client to send all of the queries to the server up
//! front, minimizing time spent by one side waiting for the other to finish sending data:
//!
//! ```not_rust
//! Sequential Pipelined
//! | Client | Server | | Client | Server |
//! |----------------|-----------------| |----------------|-----------------|
//! | send query 1 | | | send query 1 | |
//! | | process query 1 | | send query 2 | process query 1 |
//! | receive rows 1 | | | send query 3 | process query 2 |
//! | send query 2 | | | receive rows 1 | process query 3 |
//! | | process query 2 | | receive rows 2 | |
//! | receive rows 2 | | | receive rows 3 | |
//! | send query 3 | |
//! | | process query 3 |
//! | receive rows 3 | |
//! ```
//!
//! In both cases, the PostgreSQL server is executing the queries sequentially - pipelining just allows both sides of
//! the connection to work concurrently when possible.
//!
//! Pipelining happens automatically when futures are polled concurrently (for example, by using the futures `join`
//! combinator):
//!
//! ```rust
//! use futures::future;
//! use std::future::Future;
//! use tokio_postgres::{Client, Error, Statement};
//!
//! async fn pipelined_prepare(
//! client: &Client,
//! ) -> Result<(Statement, Statement), Error>
//! {
//! future::try_join(
//! client.prepare("SELECT * FROM foo"),
//! client.prepare("INSERT INTO bar (id, name) VALUES ($1, $2)")
//! ).await
//! }
//! ```
//!
//! # Runtime
//!
//! The client works with arbitrary `AsyncRead + AsyncWrite` streams. Convenience APIs are provided to handle the
//! connection process, but these are gated by the `runtime` Cargo feature, which is enabled by default. If disabled,
//! all dependence on the tokio runtime is removed.
//!
//! # SSL/TLS support
//!
//! TLS support is implemented via external libraries. `Client::connect` and `Config::connect` take a TLS implementation
//! as an argument. The `NoTls` type in this crate can be used when TLS is not required. Otherwise, the
//! `postgres-openssl` and `postgres-native-tls` crates provide implementations backed by the `openssl` and `native-tls`
//! crates, respectively.
//!
//! # Features
//!
//! The following features can be enabled from `Cargo.toml`:
//!
//! | Feature | Description | Extra dependencies | Default |
//! | ------- | ----------- | ------------------ | ------- |
//! | `runtime` | Enable convenience API for the connection process based on the `tokio` crate. | [tokio](https://crates.io/crates/tokio) 1.0 with the features `net` and `time` | yes |
//! | `with-bit-vec-0_6` | Enable support for the `bit-vec` crate. | [bit-vec](https://crates.io/crates/bit-vec) 0.6 | no |
//! | `with-chrono-0_4` | Enable support for the `chrono` crate. | [chrono](https://crates.io/crates/chrono) 0.4 | no |
//! | `with-eui48-0_4` | Enable support for the `eui48` crate. | [eui48](https://crates.io/crates/eui48) 0.4 | no |
//! | `with-geo-types-0_4` | Enable support for the 0.4 version of the `geo-types` crate. | [geo-types](https://crates.io/crates/geo-types/0.4.0) 0.4 | no |
//! | `with-geo-types-0_5` | Enable support for the 0.5 version of the `geo-types` crate. | [geo-types](https://crates.io/crates/geo-types/0.5.0) 0.5 | no |
//! | `with-serde_json-1` | Enable support for the `serde_json` crate. | [serde_json](https://crates.io/crates/serde_json) 1.0 | no |
//! | `with-uuid-0_8` | Enable support for the `uuid` crate. | [uuid](https://crates.io/crates/uuid) 0.8 | no |
//! | `with-time-0_2` | Enable support for the `time` crate. | [time](https://crates.io/crates/time) 0.2 | no |
#![doc(html_root_url = "https://docs.rs/tokio-postgres/0.7")]
#![warn(rust_2018_idioms, clippy::all, missing_docs)]
pub use crate::cancel_token::CancelToken;
pub use crate::client::Client;
pub use crate::config::Config;
pub use crate::connection::Connection;
pub use crate::copy_in::CopyInSink;
pub use crate::copy_out::CopyOutStream;
use crate::error::DbError;
pub use crate::error::Error;
pub use crate::generic_client::GenericClient;
pub use crate::portal::Portal;
pub use crate::query::RowStream;
pub use crate::row::{Row, SimpleQueryRow};
pub use crate::simple_query::SimpleQueryStream;
#[cfg(feature = "runtime")]
pub use crate::socket::Socket;
pub use crate::statement::{Column, Statement};
#[cfg(feature = "runtime")]
use crate::tls::MakeTlsConnect;
pub use crate::tls::NoTls;
pub use crate::to_statement::ToStatement;
pub use crate::transaction::Transaction;
pub use crate::transaction_builder::{IsolationLevel, TransactionBuilder};
use crate::types::ToSql;
pub mod binary_copy;
mod bind;
#[cfg(feature = "runtime")]
mod cancel_query;
mod cancel_query_raw;
mod cancel_token;
mod client;
mod codec;
pub mod config;
#[cfg(feature = "runtime")]
mod connect;
mod connect_raw;
#[cfg(feature = "runtime")]
mod connect_socket;
mod connect_tls;
mod connection;
mod copy_in;
mod copy_out;
pub mod error;
mod generic_client;
mod maybe_tls_stream;
mod portal;
mod prepare;
mod query;
pub mod row;
mod simple_query;
#[cfg(feature = "runtime")]
mod socket;
mod statement;
pub mod tls;
mod to_statement;
mod transaction;
mod transaction_builder;
pub mod types;
/// A convenience function which parses a connection string and connects to the database.
///
/// See the documentation for [`Config`] for details on the connection string format.
///
/// Requires the `runtime` Cargo feature (enabled by default).
///
/// [`Config`]: config/struct.Config.html
#[cfg(feature = "runtime")]
pub async fn connect<T>(
config: &str,
tls: T,
) -> Result<(Client, Connection<Socket, T::Stream>), Error>
where
T: MakeTlsConnect<Socket>,
{
let config = config.parse::<Config>()?;
config.connect(tls).await
}
/// An asynchronous notification.
#[derive(Clone, Debug)]
pub struct Notification {
process_id: i32,
channel: String,
payload: String,
}
impl Notification {
/// The process ID of the notifying backend process.
pub fn process_id(&self) -> i32 {
self.process_id
}
/// The name of the channel that the notify has been raised on.
pub fn channel(&self) -> &str {
&self.channel
}
/// The "payload" string passed from the notifying process.
pub fn payload(&self) -> &str {
&self.payload
}
}
/// An asynchronous message from the server.
#[allow(clippy::large_enum_variant)]
#[derive(Debug, Clone)]
#[non_exhaustive]
pub enum AsyncMessage {
/// A notice.
///
/// Notices use the same format as errors, but aren't "errors" per-se.
Notice(DbError),
/// A notification.
///
/// Connections can subscribe to notifications with the `LISTEN` command.
Notification(Notification),
}
/// Message returned by the `SimpleQuery` stream.
#[non_exhaustive]
pub enum SimpleQueryMessage {
/// A row of data.
Row(SimpleQueryRow),
/// A statement in the query has completed.
///
/// The number of rows modified or selected is returned.
CommandComplete(u64),
}
fn slice_iter<'a>(
s: &'a [&'a (dyn ToSql + Sync)],
) -> impl ExactSizeIterator<Item = &'a dyn ToSql> + 'a {
s.iter().map(|s| *s as _)
}
| 36.547619 | 184 | 0.632573 |
f8ad962bc5fef6d6e6cd7397c60c9155abba929c | 1,539 | // Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
use eui48;
use std::fmt;
use crate::fail::Fail;
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct MacAddress(eui48::MacAddress);
impl MacAddress {
pub const fn new(bytes: [u8; 6]) -> Self {
MacAddress(eui48::MacAddress::new(bytes))
}
pub fn from_bytes(bytes: &[u8]) -> Self {
MacAddress(eui48::MacAddress::from_bytes(bytes).unwrap())
}
pub fn octets(&self) -> [u8; 6] {
self.0.to_array()
}
pub fn broadcast() -> MacAddress {
MacAddress(eui48::MacAddress::broadcast())
}
pub fn nil() -> MacAddress {
MacAddress(eui48::MacAddress::nil())
}
pub fn is_nil(self) -> bool {
self.0.is_nil()
}
pub fn is_broadcast(self) -> bool {
self.0.is_broadcast()
}
pub fn is_unicast(self) -> bool {
self.0.is_unicast()
}
pub fn to_canonical(self) -> String {
self.0.to_canonical()
}
pub fn as_bytes(&self) -> &[u8] {
self.0.as_bytes()
}
pub fn parse_str(s: &str) -> Result<Self, Fail> {
Ok(Self(eui48::MacAddress::parse_str(s)?))
}
pub fn to_array(self) -> [u8; 6] {
self.0.to_array()
}
}
impl fmt::Display for MacAddress {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
impl fmt::Debug for MacAddress {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "MacAddress({})", &self.to_canonical())
}
}
| 21.676056 | 65 | 0.568551 |
569063b15f7af871eab3299315f9dc8bdcc3545b | 726 | use std::fmt::{Debug, Formatter, Result};
#[derive(Copy, Clone, Eq, PartialEq)]
pub enum ParseErrKind {
BeginWithIllegalChar,
IllegalCharEncounter,
InvalidParameters,
}
impl Debug for ParseErrKind {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
match self {
ParseErrKind::BeginWithIllegalChar => {
write!(f, "{}", "The string to be parsed begin with an illegal character")
},
ParseErrKind::IllegalCharEncounter => {
write!(f, "{}", "Illegal character encountered during parsing")
},
ParseErrKind::InvalidParameters => {
write!(f, "{}", "Invalid parameters")
}
}
}
} | 30.25 | 90 | 0.560606 |
29540b3e5159f8f0e4879fa5aa32d569891e5637 | 757 | // vec2.rs
// A Vec of even numbers is given. Your task is to complete the loop
// so that each number in the Vec is multiplied by 2.
//
// Make me pass the test!
//
// Execute the command `rustlings hint collections2` if you need
// hints.
fn vec_loop(mut v: Vec<i32>) -> Vec<i32> {
for i in v.iter_mut() {
*i *= 2 ;
}
// At this point, `v` should be equal to [4, 8, 12, 16, 20].
v
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_vec_loop() {
let v: Vec<i32> = (1..).filter(|x| x % 2 == 0).take(5).collect();
let ans = vec_loop(v.clone());
assert_eq!(
ans,
v.iter()
.map(|x| x * 2)
.collect::<Vec<i32>>()
);
}
}
| 20.459459 | 73 | 0.500661 |
8ac4a107353cc90f0ee2421245fffbbb05c98375 | 2,350 | // Copyright (c) 2017 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use format::Format;
/// Describes an individual `Vertex`. In other words a collection of attributes that can be read
/// from a vertex shader.
///
/// At this stage, the vertex is in a "raw" format. For example a `[f32; 4]` can match both a
/// `vec4` or a `float[4]`. The way the things are bound depends on the shader.
pub unsafe trait Vertex: 'static + Send + Sync {
/// Returns the characteristics of a vertex member by its name.
fn member(name: &str) -> Option<VertexMemberInfo>;
}
unsafe impl Vertex for () {
#[inline]
fn member(_: &str) -> Option<VertexMemberInfo> {
None
}
}
/// Information about a member of a vertex struct.
pub struct VertexMemberInfo {
/// Offset of the member in bytes from the start of the struct.
pub offset: usize,
/// Type of data. This is used to check that the interface is matching.
pub ty: VertexMemberTy,
/// Number of consecutive elements of that type.
pub array_size: usize,
}
/// Type of a member of a vertex struct.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[allow(missing_docs)]
pub enum VertexMemberTy {
I8,
U8,
I16,
U16,
I32,
U32,
F32,
F64,
}
impl VertexMemberTy {
/// Returns true if a combination of `(type, array_size)` matches a format.
#[inline]
pub fn matches(&self, array_size: usize, format: Format, num_locs: u32) -> bool {
// TODO: implement correctly
let my_size = match *self {
VertexMemberTy::I8 => 1,
VertexMemberTy::U8 => 1,
VertexMemberTy::I16 => 2,
VertexMemberTy::U16 => 2,
VertexMemberTy::I32 => 4,
VertexMemberTy::U32 => 4,
VertexMemberTy::F32 => 4,
VertexMemberTy::F64 => 8,
};
let format_size = match format.size() {
None => return false,
Some(s) => s,
};
array_size * my_size == format_size * num_locs as usize
}
}
| 30.519481 | 96 | 0.628085 |
fc44b882e799633b29d10239820ce4141a9037ce | 17,372 | #[doc = "Reader of register P5_4"]
pub type R = crate::R<u32, super::P5_4>;
#[doc = "Writer for register P5_4"]
pub type W = crate::W<u32, super::P5_4>;
#[doc = "Register P5_4 `reset()`'s with value 0x30"]
impl crate::ResetValue for super::P5_4 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x30
}
}
#[doc = "Selects pin function for pin P5\\[4\\]\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum FUNC_A {
#[doc = "0: General purpose digital input/output\r\n pin."]
P5_4 = 0,
#[doc = "1: RS-485/EIA-485 output enable signal for UART0."]
U0_OE = 1,
#[doc = "3: Match output for Timer 3, channel 3."]
T3_MAT3 = 3,
#[doc = "4: Transmitter output for USART4 (input/output in smart card mode)."]
U4_TXD = 4,
}
impl From<FUNC_A> for u8 {
#[inline(always)]
fn from(variant: FUNC_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `FUNC`"]
pub type FUNC_R = crate::R<u8, FUNC_A>;
impl FUNC_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<u8, FUNC_A> {
use crate::Variant::*;
match self.bits {
0 => Val(FUNC_A::P5_4),
1 => Val(FUNC_A::U0_OE),
3 => Val(FUNC_A::T3_MAT3),
4 => Val(FUNC_A::U4_TXD),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `P5_4`"]
#[inline(always)]
pub fn is_p5_4(&self) -> bool {
*self == FUNC_A::P5_4
}
#[doc = "Checks if the value of the field is `U0_OE`"]
#[inline(always)]
pub fn is_u0_oe(&self) -> bool {
*self == FUNC_A::U0_OE
}
#[doc = "Checks if the value of the field is `T3_MAT3`"]
#[inline(always)]
pub fn is_t3_mat3(&self) -> bool {
*self == FUNC_A::T3_MAT3
}
#[doc = "Checks if the value of the field is `U4_TXD`"]
#[inline(always)]
pub fn is_u4_txd(&self) -> bool {
*self == FUNC_A::U4_TXD
}
}
#[doc = "Write proxy for field `FUNC`"]
pub struct FUNC_W<'a> {
w: &'a mut W,
}
impl<'a> FUNC_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: FUNC_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "General purpose digital input/output pin."]
#[inline(always)]
pub fn p5_4(self) -> &'a mut W {
self.variant(FUNC_A::P5_4)
}
#[doc = "RS-485/EIA-485 output enable signal for UART0."]
#[inline(always)]
pub fn u0_oe(self) -> &'a mut W {
self.variant(FUNC_A::U0_OE)
}
#[doc = "Match output for Timer 3, channel 3."]
#[inline(always)]
pub fn t3_mat3(self) -> &'a mut W {
self.variant(FUNC_A::T3_MAT3)
}
#[doc = "Transmitter output for USART4 (input/output in smart card mode)."]
#[inline(always)]
pub fn u4_txd(self) -> &'a mut W {
self.variant(FUNC_A::U4_TXD)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x07) | ((value as u32) & 0x07);
self.w
}
}
#[doc = "Selects function mode (on-chip pull-up/pull-down resistor control).\n\nValue on reset: 2"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum MODE_A {
#[doc = "0: Inactive (no pull-down/pull-up resistor\r\n enabled)."]
INACTIVE_NO_PULL_DO = 0,
#[doc = "1: Pull-down resistor enabled."]
PULL_DOWN_RESISTOR_E = 1,
#[doc = "2: Pull-up resistor enabled."]
PULL_UP_RESISTOR_ENA = 2,
#[doc = "3: Repeater mode."]
REPEATER_MODE_ = 3,
}
impl From<MODE_A> for u8 {
#[inline(always)]
fn from(variant: MODE_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `MODE`"]
pub type MODE_R = crate::R<u8, MODE_A>;
impl MODE_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> MODE_A {
match self.bits {
0 => MODE_A::INACTIVE_NO_PULL_DO,
1 => MODE_A::PULL_DOWN_RESISTOR_E,
2 => MODE_A::PULL_UP_RESISTOR_ENA,
3 => MODE_A::REPEATER_MODE_,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `INACTIVE_NO_PULL_DO`"]
#[inline(always)]
pub fn is_inactive_no_pull_do(&self) -> bool {
*self == MODE_A::INACTIVE_NO_PULL_DO
}
#[doc = "Checks if the value of the field is `PULL_DOWN_RESISTOR_E`"]
#[inline(always)]
pub fn is_pull_down_resistor_e(&self) -> bool {
*self == MODE_A::PULL_DOWN_RESISTOR_E
}
#[doc = "Checks if the value of the field is `PULL_UP_RESISTOR_ENA`"]
#[inline(always)]
pub fn is_pull_up_resistor_ena(&self) -> bool {
*self == MODE_A::PULL_UP_RESISTOR_ENA
}
#[doc = "Checks if the value of the field is `REPEATER_MODE_`"]
#[inline(always)]
pub fn is_repeater_mode_(&self) -> bool {
*self == MODE_A::REPEATER_MODE_
}
}
#[doc = "Write proxy for field `MODE`"]
pub struct MODE_W<'a> {
w: &'a mut W,
}
impl<'a> MODE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: MODE_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "Inactive (no pull-down/pull-up resistor enabled)."]
#[inline(always)]
pub fn inactive_no_pull_do(self) -> &'a mut W {
self.variant(MODE_A::INACTIVE_NO_PULL_DO)
}
#[doc = "Pull-down resistor enabled."]
#[inline(always)]
pub fn pull_down_resistor_e(self) -> &'a mut W {
self.variant(MODE_A::PULL_DOWN_RESISTOR_E)
}
#[doc = "Pull-up resistor enabled."]
#[inline(always)]
pub fn pull_up_resistor_ena(self) -> &'a mut W {
self.variant(MODE_A::PULL_UP_RESISTOR_ENA)
}
#[doc = "Repeater mode."]
#[inline(always)]
pub fn repeater_mode_(self) -> &'a mut W {
self.variant(MODE_A::REPEATER_MODE_)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 3)) | (((value as u32) & 0x03) << 3);
self.w
}
}
#[doc = "Hysteresis.\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum HYS_A {
#[doc = "0: Disable."]
DISABLE_ = 0,
#[doc = "1: Enable."]
ENABLE_ = 1,
}
impl From<HYS_A> for bool {
#[inline(always)]
fn from(variant: HYS_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `HYS`"]
pub type HYS_R = crate::R<bool, HYS_A>;
impl HYS_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> HYS_A {
match self.bits {
false => HYS_A::DISABLE_,
true => HYS_A::ENABLE_,
}
}
#[doc = "Checks if the value of the field is `DISABLE_`"]
#[inline(always)]
pub fn is_disable_(&self) -> bool {
*self == HYS_A::DISABLE_
}
#[doc = "Checks if the value of the field is `ENABLE_`"]
#[inline(always)]
pub fn is_enable_(&self) -> bool {
*self == HYS_A::ENABLE_
}
}
#[doc = "Write proxy for field `HYS`"]
pub struct HYS_W<'a> {
w: &'a mut W,
}
impl<'a> HYS_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: HYS_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Disable."]
#[inline(always)]
pub fn disable_(self) -> &'a mut W {
self.variant(HYS_A::DISABLE_)
}
#[doc = "Enable."]
#[inline(always)]
pub fn enable_(self) -> &'a mut W {
self.variant(HYS_A::ENABLE_)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Invert input\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum INV_A {
#[doc = "0: Input not inverted (HIGH on pin reads as 1, LOW on pin\r\n reads as 0)."]
INPUT_NOT_INVERTED_ = 0,
#[doc = "1: Input inverted (HIGH on pin reads as 0, LOW on pin reads as\r\n 1)."]
INPUT_INVERTED_HIGH = 1,
}
impl From<INV_A> for bool {
#[inline(always)]
fn from(variant: INV_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `INV`"]
pub type INV_R = crate::R<bool, INV_A>;
impl INV_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> INV_A {
match self.bits {
false => INV_A::INPUT_NOT_INVERTED_,
true => INV_A::INPUT_INVERTED_HIGH,
}
}
#[doc = "Checks if the value of the field is `INPUT_NOT_INVERTED_`"]
#[inline(always)]
pub fn is_input_not_inverted_(&self) -> bool {
*self == INV_A::INPUT_NOT_INVERTED_
}
#[doc = "Checks if the value of the field is `INPUT_INVERTED_HIGH`"]
#[inline(always)]
pub fn is_input_inverted_high(&self) -> bool {
*self == INV_A::INPUT_INVERTED_HIGH
}
}
#[doc = "Write proxy for field `INV`"]
pub struct INV_W<'a> {
w: &'a mut W,
}
impl<'a> INV_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: INV_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Input not inverted (HIGH on pin reads as 1, LOW on pin reads as 0)."]
#[inline(always)]
pub fn input_not_inverted_(self) -> &'a mut W {
self.variant(INV_A::INPUT_NOT_INVERTED_)
}
#[doc = "Input inverted (HIGH on pin reads as 0, LOW on pin reads as 1)."]
#[inline(always)]
pub fn input_inverted_high(self) -> &'a mut W {
self.variant(INV_A::INPUT_INVERTED_HIGH)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Driver slew rate\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SLEW_A {
#[doc = "0: Standard mode, output slew rate control is enabled. More\r\n outputs can be switched simultaneously."]
STANDARD = 0,
#[doc = "1: Fast mode, slew rate control is disabled. Refer to the\r\n appropriate specific device data sheet for details."]
FAST = 1,
}
impl From<SLEW_A> for bool {
#[inline(always)]
fn from(variant: SLEW_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `SLEW`"]
pub type SLEW_R = crate::R<bool, SLEW_A>;
impl SLEW_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SLEW_A {
match self.bits {
false => SLEW_A::STANDARD,
true => SLEW_A::FAST,
}
}
#[doc = "Checks if the value of the field is `STANDARD`"]
#[inline(always)]
pub fn is_standard(&self) -> bool {
*self == SLEW_A::STANDARD
}
#[doc = "Checks if the value of the field is `FAST`"]
#[inline(always)]
pub fn is_fast(&self) -> bool {
*self == SLEW_A::FAST
}
}
#[doc = "Write proxy for field `SLEW`"]
pub struct SLEW_W<'a> {
w: &'a mut W,
}
impl<'a> SLEW_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SLEW_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Standard mode, output slew rate control is enabled. More outputs can be switched simultaneously."]
#[inline(always)]
pub fn standard(self) -> &'a mut W {
self.variant(SLEW_A::STANDARD)
}
#[doc = "Fast mode, slew rate control is disabled. Refer to the appropriate specific device data sheet for details."]
#[inline(always)]
pub fn fast(self) -> &'a mut W {
self.variant(SLEW_A::FAST)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
#[doc = "Open-drain mode.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum OD_A {
#[doc = "0: Disable."]
DISABLE_ = 0,
#[doc = "1: Open-drain mode enabled. This is not a true open-drain\r\n mode. Input cannot be pulled up above VDD."]
OPEN_DRAIN_MODE_ENAB = 1,
}
impl From<OD_A> for bool {
#[inline(always)]
fn from(variant: OD_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `OD`"]
pub type OD_R = crate::R<bool, OD_A>;
impl OD_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> OD_A {
match self.bits {
false => OD_A::DISABLE_,
true => OD_A::OPEN_DRAIN_MODE_ENAB,
}
}
#[doc = "Checks if the value of the field is `DISABLE_`"]
#[inline(always)]
pub fn is_disable_(&self) -> bool {
*self == OD_A::DISABLE_
}
#[doc = "Checks if the value of the field is `OPEN_DRAIN_MODE_ENAB`"]
#[inline(always)]
pub fn is_open_drain_mode_enab(&self) -> bool {
*self == OD_A::OPEN_DRAIN_MODE_ENAB
}
}
#[doc = "Write proxy for field `OD`"]
pub struct OD_W<'a> {
w: &'a mut W,
}
impl<'a> OD_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: OD_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Disable."]
#[inline(always)]
pub fn disable_(self) -> &'a mut W {
self.variant(OD_A::DISABLE_)
}
#[doc = "Open-drain mode enabled. This is not a true open-drain mode. Input cannot be pulled up above VDD."]
#[inline(always)]
pub fn open_drain_mode_enab(self) -> &'a mut W {
self.variant(OD_A::OPEN_DRAIN_MODE_ENAB)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
impl R {
#[doc = "Bits 0:2 - Selects pin function for pin P5\\[4\\]"]
#[inline(always)]
pub fn func(&self) -> FUNC_R {
FUNC_R::new((self.bits & 0x07) as u8)
}
#[doc = "Bits 3:4 - Selects function mode (on-chip pull-up/pull-down resistor control)."]
#[inline(always)]
pub fn mode(&self) -> MODE_R {
MODE_R::new(((self.bits >> 3) & 0x03) as u8)
}
#[doc = "Bit 5 - Hysteresis."]
#[inline(always)]
pub fn hys(&self) -> HYS_R {
HYS_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - Invert input"]
#[inline(always)]
pub fn inv(&self) -> INV_R {
INV_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 9 - Driver slew rate"]
#[inline(always)]
pub fn slew(&self) -> SLEW_R {
SLEW_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 10 - Open-drain mode."]
#[inline(always)]
pub fn od(&self) -> OD_R {
OD_R::new(((self.bits >> 10) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:2 - Selects pin function for pin P5\\[4\\]"]
#[inline(always)]
pub fn func(&mut self) -> FUNC_W {
FUNC_W { w: self }
}
#[doc = "Bits 3:4 - Selects function mode (on-chip pull-up/pull-down resistor control)."]
#[inline(always)]
pub fn mode(&mut self) -> MODE_W {
MODE_W { w: self }
}
#[doc = "Bit 5 - Hysteresis."]
#[inline(always)]
pub fn hys(&mut self) -> HYS_W {
HYS_W { w: self }
}
#[doc = "Bit 6 - Invert input"]
#[inline(always)]
pub fn inv(&mut self) -> INV_W {
INV_W { w: self }
}
#[doc = "Bit 9 - Driver slew rate"]
#[inline(always)]
pub fn slew(&mut self) -> SLEW_W {
SLEW_W { w: self }
}
#[doc = "Bit 10 - Open-drain mode."]
#[inline(always)]
pub fn od(&mut self) -> OD_W {
OD_W { w: self }
}
}
| 30.911032 | 159 | 0.548757 |
d9fef64826adee07cbdc4643f0da076a5801ff96 | 16,310 | use crate::{
config, id,
stake_state::{StakeAccount, StakeState},
};
use bincode::deserialize;
use log::*;
use serde_derive::{Deserialize, Serialize};
use solana_sdk::{
account::KeyedAccount,
instruction::{AccountMeta, Instruction, InstructionError},
pubkey::Pubkey,
system_instruction, sysvar,
};
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)]
pub enum StakeInstruction {
/// `Delegate` a stake to a particular node
///
/// Expects 3 Accounts:
/// 0 - Uninitialized StakeAccount to be delegated <= must have this signature
/// 1 - VoteAccount to which this Stake will be delegated
/// 2 - Clock sysvar Account that carries clock bank epoch
/// 3 - Config Account that carries stake config
///
/// The u64 is the portion of the Stake account balance to be activated,
/// must be less than StakeAccount.lamports
///
DelegateStake(u64),
/// Redeem credits in the stake account
///
/// Expects 4 Accounts:
/// 0 - Delegate StakeAccount to be updated with rewards
/// 1 - VoteAccount to which the Stake is delegated,
/// 2 - RewardsPool Stake Account from which to redeem credits
/// 3 - Rewards sysvar Account that carries points values
/// 4 - StakeHistory sysvar that carries stake warmup/cooldown history
RedeemVoteCredits,
/// Withdraw unstaked lamports from the stake account
///
/// Expects 3 Accounts:
/// 0 - Delegate StakeAccount
/// 1 - System account to which the lamports will be transferred,
/// 2 - Syscall Account that carries epoch
/// 3 - StakeHistory sysvar that carries stake warmup/cooldown history
///
/// The u64 is the portion of the Stake account balance to be withdrawn,
/// must be <= StakeAccount.lamports - staked lamports
Withdraw(u64),
/// Deactivates the stake in the account
///
/// Expects 2 Accounts:
/// 0 - Delegate StakeAccount
/// 1 - VoteAccount to which the Stake is delegated
/// 2 - Syscall Account that carries epoch
Deactivate,
}
pub fn create_stake_account(
from_pubkey: &Pubkey,
staker_pubkey: &Pubkey,
lamports: u64,
) -> Vec<Instruction> {
vec![system_instruction::create_account(
from_pubkey,
staker_pubkey,
lamports,
std::mem::size_of::<StakeState>() as u64,
&id(),
)]
}
pub fn create_stake_account_and_delegate_stake(
from_pubkey: &Pubkey,
staker_pubkey: &Pubkey,
vote_pubkey: &Pubkey,
lamports: u64,
) -> Vec<Instruction> {
let mut instructions = create_stake_account(from_pubkey, staker_pubkey, lamports);
instructions.push(delegate_stake(staker_pubkey, vote_pubkey, lamports));
instructions
}
pub fn redeem_vote_credits(stake_pubkey: &Pubkey, vote_pubkey: &Pubkey) -> Instruction {
let account_metas = vec![
AccountMeta::new(*stake_pubkey, false),
AccountMeta::new_credit_only(*vote_pubkey, false),
AccountMeta::new(crate::rewards_pools::random_id(), false),
AccountMeta::new_credit_only(sysvar::rewards::id(), false),
AccountMeta::new_credit_only(sysvar::stake_history::id(), false),
];
Instruction::new(id(), &StakeInstruction::RedeemVoteCredits, account_metas)
}
pub fn delegate_stake(stake_pubkey: &Pubkey, vote_pubkey: &Pubkey, stake: u64) -> Instruction {
let account_metas = vec![
AccountMeta::new(*stake_pubkey, true),
AccountMeta::new_credit_only(*vote_pubkey, false),
AccountMeta::new_credit_only(sysvar::clock::id(), false),
AccountMeta::new_credit_only(crate::config::id(), false),
];
Instruction::new(id(), &StakeInstruction::DelegateStake(stake), account_metas)
}
pub fn withdraw(stake_pubkey: &Pubkey, to_pubkey: &Pubkey, lamports: u64) -> Instruction {
let account_metas = vec![
AccountMeta::new(*stake_pubkey, true),
AccountMeta::new_credit_only(*to_pubkey, false),
AccountMeta::new_credit_only(sysvar::clock::id(), false),
AccountMeta::new_credit_only(sysvar::stake_history::id(), false),
];
Instruction::new(id(), &StakeInstruction::Withdraw(lamports), account_metas)
}
pub fn deactivate_stake(stake_pubkey: &Pubkey, vote_pubkey: &Pubkey) -> Instruction {
let account_metas = vec![
AccountMeta::new(*stake_pubkey, true),
AccountMeta::new_credit_only(*vote_pubkey, false),
AccountMeta::new_credit_only(sysvar::clock::id(), false),
];
Instruction::new(id(), &StakeInstruction::Deactivate, account_metas)
}
pub fn process_instruction(
_program_id: &Pubkey,
keyed_accounts: &mut [KeyedAccount],
data: &[u8],
) -> Result<(), InstructionError> {
solana_logger::setup();
trace!("process_instruction: {:?}", data);
trace!("keyed_accounts: {:?}", keyed_accounts);
if keyed_accounts.is_empty() {
Err(InstructionError::InvalidInstructionData)?;
}
let (me, rest) = &mut keyed_accounts.split_at_mut(1);
let me = &mut me[0];
// TODO: data-driven unpack and dispatch of KeyedAccounts
match deserialize(data).map_err(|_| InstructionError::InvalidInstructionData)? {
StakeInstruction::DelegateStake(stake) => {
if rest.len() != 3 {
Err(InstructionError::InvalidInstructionData)?;
}
let vote = &rest[0];
me.delegate_stake(
vote,
stake,
&sysvar::clock::from_keyed_account(&rest[1])?,
&config::from_keyed_account(&rest[2])?,
)
}
StakeInstruction::RedeemVoteCredits => {
if rest.len() != 4 {
Err(InstructionError::InvalidInstructionData)?;
}
let (vote, rest) = rest.split_at_mut(1);
let vote = &mut vote[0];
let (rewards_pool, rest) = rest.split_at_mut(1);
let rewards_pool = &mut rewards_pool[0];
me.redeem_vote_credits(
vote,
rewards_pool,
&sysvar::rewards::from_keyed_account(&rest[0])?,
&sysvar::stake_history::from_keyed_account(&rest[1])?,
)
}
StakeInstruction::Withdraw(lamports) => {
if rest.len() != 3 {
Err(InstructionError::InvalidInstructionData)?;
}
let (to, sysvar) = &mut rest.split_at_mut(1);
let mut to = &mut to[0];
me.withdraw(
lamports,
&mut to,
&sysvar::clock::from_keyed_account(&sysvar[0])?,
&sysvar::stake_history::from_keyed_account(&sysvar[1])?,
)
}
StakeInstruction::Deactivate => {
if rest.len() != 2 {
Err(InstructionError::InvalidInstructionData)?;
}
let (vote, rest) = rest.split_at_mut(1);
let vote = &mut vote[0];
let clock = &rest[0];
me.deactivate_stake(vote, &sysvar::clock::from_keyed_account(&clock)?)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use bincode::serialize;
use solana_sdk::{account::Account, sysvar::stake_history::StakeHistory};
fn process_instruction(instruction: &Instruction) -> Result<(), InstructionError> {
let mut accounts: Vec<_> = instruction
.accounts
.iter()
.map(|meta| {
if sysvar::clock::check_id(&meta.pubkey) {
sysvar::clock::create_account(1, 0, 0, 0, 0)
} else if sysvar::rewards::check_id(&meta.pubkey) {
sysvar::rewards::create_account(1, 0.0, 0.0)
} else if sysvar::stake_history::check_id(&meta.pubkey) {
sysvar::stake_history::create_account(1, &StakeHistory::default())
} else if config::check_id(&meta.pubkey) {
config::create_account(1, &config::Config::default())
} else {
Account::default()
}
})
.collect();
{
let mut keyed_accounts: Vec<_> = instruction
.accounts
.iter()
.zip(accounts.iter_mut())
.map(|(meta, account)| KeyedAccount::new(&meta.pubkey, meta.is_signer, account))
.collect();
super::process_instruction(&Pubkey::default(), &mut keyed_accounts, &instruction.data)
}
}
#[test]
fn test_stake_process_instruction() {
assert_eq!(
process_instruction(&redeem_vote_credits(&Pubkey::default(), &Pubkey::default())),
Err(InstructionError::InvalidAccountData),
);
assert_eq!(
process_instruction(&delegate_stake(&Pubkey::default(), &Pubkey::default(), 0)),
Err(InstructionError::InvalidAccountData),
);
assert_eq!(
process_instruction(&withdraw(&Pubkey::default(), &Pubkey::new_rand(), 100)),
Err(InstructionError::InvalidAccountData),
);
assert_eq!(
process_instruction(&deactivate_stake(&Pubkey::default(), &Pubkey::default())),
Err(InstructionError::InvalidAccountData),
);
}
#[test]
fn test_stake_process_instruction_decode_bail() {
// these will not call stake_state, have bogus contents
// gets the first check
assert_eq!(
super::process_instruction(
&Pubkey::default(),
&mut [KeyedAccount::new(
&Pubkey::default(),
false,
&mut Account::default(),
)],
&serialize(&StakeInstruction::DelegateStake(0)).unwrap(),
),
Err(InstructionError::InvalidInstructionData),
);
// gets the sub-check for number of args
assert_eq!(
super::process_instruction(
&Pubkey::default(),
&mut [KeyedAccount::new(
&Pubkey::default(),
false,
&mut Account::default()
),],
&serialize(&StakeInstruction::DelegateStake(0)).unwrap(),
),
Err(InstructionError::InvalidInstructionData),
);
assert_eq!(
super::process_instruction(
&Pubkey::default(),
&mut [
KeyedAccount::new(&Pubkey::default(), false, &mut Account::default()),
KeyedAccount::new(&Pubkey::default(), false, &mut Account::default()),
],
&serialize(&StakeInstruction::RedeemVoteCredits).unwrap(),
),
Err(InstructionError::InvalidInstructionData),
);
// gets the check non-deserialize-able account in delegate_stake
assert_eq!(
super::process_instruction(
&Pubkey::default(),
&mut [
KeyedAccount::new(&Pubkey::default(), true, &mut Account::default()),
KeyedAccount::new(&Pubkey::default(), false, &mut Account::default()),
KeyedAccount::new(
&sysvar::clock::id(),
false,
&mut sysvar::clock::create_account(1, 0, 0, 0, 0)
),
KeyedAccount::new(
&config::id(),
false,
&mut config::create_account(1, &config::Config::default())
),
],
&serialize(&StakeInstruction::DelegateStake(0)).unwrap(),
),
Err(InstructionError::InvalidAccountData),
);
// gets the deserialization checks in redeem_vote_credits
assert_eq!(
super::process_instruction(
&Pubkey::default(),
&mut [
KeyedAccount::new(&Pubkey::default(), false, &mut Account::default()),
KeyedAccount::new(&Pubkey::default(), false, &mut Account::default()),
KeyedAccount::new(&Pubkey::default(), false, &mut Account::default()),
KeyedAccount::new(
&sysvar::rewards::id(),
false,
&mut sysvar::rewards::create_account(1, 0.0, 0.0)
),
KeyedAccount::new(
&sysvar::stake_history::id(),
false,
&mut sysvar::stake_history::create_account(1, &StakeHistory::default())
),
],
&serialize(&StakeInstruction::RedeemVoteCredits).unwrap(),
),
Err(InstructionError::InvalidAccountData),
);
// Tests 3rd keyed account is of correct type (Clock instead of rewards) in withdraw
assert_eq!(
super::process_instruction(
&Pubkey::default(),
&mut [
KeyedAccount::new(&Pubkey::default(), false, &mut Account::default()),
KeyedAccount::new(&Pubkey::default(), false, &mut Account::default()),
KeyedAccount::new(
&sysvar::rewards::id(),
false,
&mut sysvar::rewards::create_account(1, 0.0, 0.0)
),
KeyedAccount::new(
&sysvar::stake_history::id(),
false,
&mut sysvar::stake_history::create_account(1, &StakeHistory::default())
),
],
&serialize(&StakeInstruction::Withdraw(42)).unwrap(),
),
Err(InstructionError::InvalidArgument),
);
// Tests correct number of accounts are provided in withdraw
assert_eq!(
super::process_instruction(
&Pubkey::default(),
&mut [
KeyedAccount::new(&Pubkey::default(), false, &mut Account::default()),
KeyedAccount::new(
&sysvar::clock::id(),
false,
&mut sysvar::rewards::create_account(1, 0.0, 0.0)
),
KeyedAccount::new(
&sysvar::stake_history::id(),
false,
&mut sysvar::stake_history::create_account(1, &StakeHistory::default())
),
],
&serialize(&StakeInstruction::Withdraw(42)).unwrap(),
),
Err(InstructionError::InvalidInstructionData),
);
// Tests 2nd keyed account is of correct type (Clock instead of rewards) in deactivate
assert_eq!(
super::process_instruction(
&Pubkey::default(),
&mut [
KeyedAccount::new(&Pubkey::default(), false, &mut Account::default()),
KeyedAccount::new(&Pubkey::default(), false, &mut Account::default()),
KeyedAccount::new(
&sysvar::rewards::id(),
false,
&mut sysvar::rewards::create_account(1, 0.0, 0.0)
),
],
&serialize(&StakeInstruction::Deactivate).unwrap(),
),
Err(InstructionError::InvalidArgument),
);
// Tests correct number of accounts are provided in deactivate
assert_eq!(
super::process_instruction(
&Pubkey::default(),
&mut [
KeyedAccount::new(&Pubkey::default(), false, &mut Account::default()),
KeyedAccount::new(
&sysvar::clock::id(),
false,
&mut sysvar::rewards::create_account(1, 0.0, 0.0)
),
],
&serialize(&StakeInstruction::Deactivate).unwrap(),
),
Err(InstructionError::InvalidInstructionData),
);
}
}
| 37.930233 | 98 | 0.539117 |
6aac80e67454f137bea7fd62fe81f70da8b024d5 | 4,681 | extern crate specs;
use specs::prelude::*;
use crate::{MyTurn, Faction, Position, Map, raws::Reaction, Viewshed, WantsToFlee,
WantsToApproach, Chasing, SpecialAbilities, WantsToCastSpell, Name, SpellTemplate};
pub struct VisibleAI {}
impl<'a> System<'a> for VisibleAI {
#[allow(clippy::type_complexity)]
type SystemData = (
ReadStorage<'a, MyTurn>,
ReadStorage<'a, Faction>,
ReadStorage<'a, Position>,
ReadExpect<'a, Map>,
WriteStorage<'a, WantsToApproach>,
WriteStorage<'a, WantsToFlee>,
Entities<'a>,
ReadExpect<'a, Entity>,
ReadStorage<'a, Viewshed>,
WriteStorage<'a, Chasing>,
ReadStorage<'a, SpecialAbilities>,
WriteExpect<'a, rltk::RandomNumberGenerator>,
WriteStorage<'a, WantsToCastSpell>,
ReadStorage<'a, Name>,
ReadStorage<'a, SpellTemplate>
);
fn run(&mut self, data : Self::SystemData) {
let (turns, factions, positions, map, mut want_approach, mut want_flee, entities, player,
viewsheds, mut chasing, abilities, mut rng, mut casting, names, spells) = data;
for (entity, _turn, my_faction, pos, viewshed) in (&entities, &turns, &factions, &positions, &viewsheds).join() {
if entity != *player {
let my_idx = map.xy_idx(pos.x, pos.y);
let mut reactions : Vec<(usize, Reaction, Entity)> = Vec::new();
let mut flee : Vec<i32> = Vec::new();
for visible_tile in viewshed.visible_tiles.iter() {
let idx = map.xy_idx(visible_tile.x, visible_tile.y);
if my_idx != idx {
evaluate(idx, &map, &factions, &my_faction.name, &mut reactions);
}
}
let mut done = false;
for reaction in reactions.iter() {
match reaction.1 {
Reaction::Attack => {
if let Some(abilities) = abilities.get(entity) {
let range = rltk::DistanceAlg::Pythagoras.distance2d(
rltk::Point::new(pos.x, pos.y),
rltk::Point::new(reaction.0 as i32 % map.width, reaction.0 as i32 / map.width)
);
for ability in abilities.abilities.iter() {
if range >= ability.min_range && range <= ability.range &&
rng.roll_dice(1,100) <= (ability.chance * 100.0) as i32
{
use crate::raws::find_spell_entity_by_name;
casting.insert(
entity,
WantsToCastSpell{
spell : find_spell_entity_by_name(&ability.spell, &names, &spells, &entities).unwrap(),
target : Some(rltk::Point::new(reaction.0 as i32 % map.width, reaction.0 as i32 / map.width))}
).expect("Unable to insert");
done = true;
}
}
}
if !done {
want_approach.insert(entity, WantsToApproach{ idx: reaction.0 as i32 }).expect("Unable to insert");
chasing.insert(entity, Chasing{ target: reaction.2}).expect("Unable to insert");
done = true;
}
}
Reaction::Flee => {
flee.push(reaction.0 as i32);
}
_ => {}
}
}
if !done && !flee.is_empty() {
want_flee.insert(entity, WantsToFlee{ indices : flee }).expect("Unable to insert");
}
}
}
}
}
fn evaluate(idx : usize, map : &Map, factions : &ReadStorage<Faction>, my_faction : &str, reactions : &mut Vec<(usize, Reaction, Entity)>) {
for other_entity in map.tile_content[idx].iter() {
if let Some(faction) = factions.get(*other_entity) {
reactions.push((
idx,
crate::raws::faction_reaction(my_faction, &faction.name, &crate::raws::RAWS.lock().unwrap()),
*other_entity
));
}
}
}
| 46.346535 | 142 | 0.462722 |
fbfbbefb66b80a0086cef868e15227407f6bc555 | 8,805 | // Copyright 2022 OpenStax Poland
// Licensed under the MIT license. See LICENSE file in the project root for
// full license text.
use std::str;
use crate::{syntax::*, mail::syntax as mail, mime::encoding::Charset};
use super::encoding::CharsetError;
// -------------- RFC 2045: MIME Part One: Format of Internet Message Bodies ---
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum MimeVersion {
/// MIME 1.0
Mime10,
}
pub fn version(buf: &mut Buffer) -> Result<MimeVersion> {
// version := "MIME-Version" ":" 1*DIGIT "." 1*DIGIT
// Note that despite comments and spaces not being present in grammar,
// examples in the same section include them.
buf.atomic(|buf| {
let location = buf.location();
buf.maybe(mail::cfws);
let major = read_number(buf, 10, 1, 1)?;
buf.maybe(mail::comment);
buf.expect(b".")?;
buf.maybe(mail::comment);
let minor = read_number(buf, 10, 1, 1)?;
buf.maybe(mail::cfws);
match (major, minor) {
(1, 0) => Ok(MimeVersion::Mime10),
_ => Err(Located::new(location, format!("unsupported MIME version {major}.{minor}"))),
}
})
}
#[derive(Clone, Copy, Debug)]
pub struct ContentType<'a> {
pub type_: &'a str,
pub subtype: &'a str,
pub parameters: &'a [u8],
}
impl Default for ContentType<'_> {
fn default() -> Self {
ContentType {
type_: "text",
subtype: "plain",
parameters: b";charset=us-ascii",
}
}
}
impl<'a> ContentType<'a> {
pub fn parameters(&self) -> impl Iterator<Item = Parameter<'a>> {
let mut buf = Buffer::new(self.parameters);
std::iter::from_fn(move || {
if buf.expect(b";").is_ok() {
Some(parameter(&mut buf).unwrap())
} else {
None
}
})
}
}
pub fn content_type<'a>(buf: &mut Buffer<'a>) -> Result<ContentType<'a>> {
// content := "Content-Type" ":" type "/" subtype *(";" parameter)
buf.atomic(|buf| {
buf.maybe(mail::cfws);
let type_ = token(buf)?;
buf.expect(b"/")?;
let subtype = token(buf)?;
let parameters = buf.take_matching(|buf| {
while buf.expect(b";").is_ok() {
parameter(buf)?;
}
Ok(())
})?;
Ok(ContentType { type_, subtype, parameters })
})
}
#[derive(Clone, Copy, Debug)]
pub struct Parameter<'a> {
pub attribute: &'a str,
pub value: mail::Quoted<'a>,
}
impl<'a> Parse<'a> for Parameter<'a> {
fn parse(from: &mut Buffer<'a>) -> Result<Self> {
parameter(from)
}
}
fn parameter<'a>(buf: &mut Buffer<'a>) -> Result<Parameter<'a>> {
// parameter := attribute "=" value
// attribute := token
// value := token / quoted-string
buf.atomic(|buf| {
buf.maybe(mail::cfws);
let attribute = token(buf)?;
buf.expect(b"=")?;
let value = token(buf).map(mail::Quoted).or_else(|_| mail::quoted_string(buf))?;
buf.maybe(mail::cfws);
Ok(Parameter { attribute, value })
})
}
fn token<'a>(buf: &mut Buffer<'a>) -> Result<&'a str> {
// token := 1*<any (US-ASCII) CHAR except SPACE, CTLs, or tspecials>
buf.atomic(|buf| {
let value = buf.take_while(
|b, _| b.is_ascii() && !b.is_ascii_control() && b != b' ' && !is_tspecial(b));
if value.is_empty() {
buf.error("expected a token")
} else {
Ok(str::from_utf8(value).unwrap())
}
})
}
fn is_tspecial(ch: u8) -> bool {
// tspecials := "(" / ")" / "<" / ">" / "@" / "," / ";" / ":" / "\" / <"> "/" / "[" / "]"
// / "?" / "="
matches!(ch, b'(' | b')' | b'<' | b'>' | b'@' | b',' | b';' | b':' | b'\\' | b'"' | b'/' | b'['
| b']' | b'?' | b'=')
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum TransferEncoding {
_7Bit,
_8Bit,
Binary,
QuotedPrintable,
Base64,
}
impl Default for TransferEncoding {
fn default() -> Self {
TransferEncoding::_7Bit
}
}
pub fn content_transfer_encoding(buf: &mut Buffer) -> Result<TransferEncoding> {
// encoding := "Content-Transfer-Encoding" ":" mechanism
// mechanism := "7bit" / "8bit" / "binary" / "quoted-printable" / "base64" /
buf.atomic(|buf| {
buf.maybe(mail::cfws);
let location = buf.location();
let mechanism = token(buf)?;
if mechanism.eq_ignore_ascii_case("7bit") {
Ok(TransferEncoding::_7Bit)
} else if mechanism.eq_ignore_ascii_case("8bit") {
Ok(TransferEncoding::_8Bit)
} else if mechanism.eq_ignore_ascii_case("binary") {
Ok(TransferEncoding::Binary)
} else if mechanism.eq_ignore_ascii_case("quoted-printable") {
Ok(TransferEncoding::QuotedPrintable)
} else if mechanism.eq_ignore_ascii_case("base64") {
Ok(TransferEncoding::Base64)
} else {
Err(Located::new(location, format!("unsupported transfer encoding {mechanism}")))
}
})
}
#[derive(Clone, Copy, Debug)]
pub enum Header<'a> {
Version(MimeVersion),
ContentType(ContentType<'a>),
ContentTransferEncoding(TransferEncoding),
ContentId(mail::MessageIdRef<'a>),
ContentDescription(mail::Folded<'a>),
}
pub fn header<'a>(name: &str, buf: &mut Buffer<'a>) -> Result<Option<Header<'a>>> {
Ok(Some(if name.eq_ignore_ascii_case("MIME-Version") {
Header::Version(version(buf)?)
} else if name.eq_ignore_ascii_case("Content-Type") {
Header::ContentType(content_type(buf)?)
} else if name.eq_ignore_ascii_case("Content-Transfer-Encoding") {
Header::ContentTransferEncoding(content_transfer_encoding(buf)?)
} else if name.eq_ignore_ascii_case("Content-ID") {
Header::ContentId(mail::msg_id(buf)?)
} else if name.eq_ignore_ascii_case("Content-Description") {
Header::ContentDescription(mail::unstructured(buf)?)
} else {
return Ok(None);
}))
}
// --- RFC 2047: MIME Part Three: Message Header Extensions for Non-ASCII Text -
#[derive(Clone, Copy, Debug)]
pub struct EncodedWord<'a> {
pub charset: &'a str,
pub encoding: WordEncoding,
pub encoded_text: &'a [u8],
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum WordEncoding {
Base64,
Quoted,
}
impl<'a> EncodedWord<'a> {
pub fn decode(self) -> Result<String, CharsetError> {
let charset = Charset::by_name(self.charset).ok_or(CharsetError)?;
let data = match self.encoding {
WordEncoding::Base64 => base64::decode(self.encoded_text).map_err(|_| CharsetError)?,
WordEncoding::Quoted => {
let mut result = Vec::with_capacity(self.encoded_text.len());
let mut rest = self.encoded_text;
while let Some(inx) = rest.iter().position(|b| matches!(b, b'=' | b'_')) {
result.extend_from_slice(&rest[..inx]);
match rest[inx] {
b'_' => {
result.push(32);
rest = &rest[inx + 1..];
}
b'=' if rest.len() > inx + 3 => {
let byte = std::str::from_utf8(&rest[inx + 1..inx + 3])
.map_err(|_| CharsetError)?;
let byte = u8::from_str_radix(byte, 16)
.map_err(|_| CharsetError)?;
result.push(byte);
rest = &rest[inx + 3..];
}
_ => return Err(CharsetError),
}
}
result.extend_from_slice(rest);
result
}
};
charset.decode(&data).map(|d| d.into_owned())
}
}
pub fn encoded_word<'a>(buf: &mut Buffer<'a>) -> Result<EncodedWord<'a>> {
buf.atomic(|buf| {
let start = buf.location();
buf.expect(b"=?")?;
let charset = token(buf)?;
buf.expect(b"?")?;
let encoding = token(buf)?;
let encoding = match_ignore_ascii_case! { encoding;
"B" => WordEncoding::Base64,
"Q" => WordEncoding::Quoted,
_ => return buf.error(format!("unknown encoding {encoding:?}")),
};
buf.expect(b"?")?;
let encoded_text = buf.take_while(|b, _| b.is_ascii_graphic() && b != b' ' && b != b'?');
buf.expect(b"?=")?;
let len = buf.location().offset - start.offset;
if len > 76 {
buf.error("too long encoded-word")
} else {
Ok(EncodedWord { charset, encoding, encoded_text })
}
})
}
| 31.003521 | 99 | 0.53322 |
f4c422547be60928a6a6a41ad0fe524c6a0f6a68 | 4,435 | use lazycell;
extern crate racer_cargo_metadata as metadata;
use self::lazycell::LazyCell;
use self::metadata::mapping::{Edition as Ed, PackageIdx, PackageMap};
use crate::project_model::{Edition, ProjectModelProvider};
use std::cell::Cell;
use std::path::{Path, PathBuf};
struct MetadataCache {
pkg_map: LazyCell<PackageMap>,
manifest_path: Option<PathBuf>,
failed_to_fill: Cell<bool>,
}
impl MetadataCache {
fn new(manifest_path: Option<PathBuf>) -> Self {
MetadataCache {
pkg_map: LazyCell::new(),
manifest_path,
failed_to_fill: Cell::new(false),
}
}
fn fill_impl(&self, manifest: &Path) -> Result<(), ()> {
let meta = metadata::run(manifest, true)
.or_else(|e| {
if let metadata::ErrorKind::Subprocess(ref s) = e {
// HACK: if --frozen failed, try again without --frozen
// see https://github.com/rust-lang/cargo/blob/master/src/cargo/ops/registry.rs#L344
if s.contains("--frozen") {
info!("MetadataCache: try again without --frozen");
return metadata::run(manifest, false);
}
}
Err(e)
})
.map_err(|e| {
warn!("Error in cargo metadata: {}", e);
})?;
let pkg_map = PackageMap::from_metadata(meta);
self.pkg_map.fill(pkg_map).map_err(|_| {
warn!("Error in initialize lazy cell");
})
}
fn fill(&self, manifest: &Path) {
if !self.pkg_map.filled() && !self.failed_to_fill.get() {
self.failed_to_fill.set(self.fill_impl(manifest).is_err());
}
}
fn setup(&self, manifest: &Path) -> Option<(&PackageMap, PackageIdx)> {
self.fill(manifest);
let pkg_map: &PackageMap = self.pkg_map.borrow().unwrap();
let idx = if manifest.is_relative() {
let path = manifest.canonicalize().ok()?;
pkg_map.get_idx(&path)?
} else {
pkg_map.get_idx(manifest)?
};
Some((pkg_map, idx))
}
}
impl ProjectModelProvider for MetadataCache {
fn edition(&self, manifest: &Path) -> Option<Edition> {
let (pkg_map, idx) = self.setup(manifest)?;
let edition = pkg_map.get_edition(idx);
Some(match edition {
Ed::Ed2015 => Edition::Ed2015,
Ed::Ed2018 => Edition::Ed2018,
})
}
fn discover_project_manifest(&self, path: &Path) -> Option<PathBuf> {
let cur_manifest = metadata::find_manifest(path)?;
let manifest = self.manifest_path.as_ref()?;
self.fill(manifest);
Some(cur_manifest)
}
fn search_dependencies(
&self,
manifest: &Path,
search_fn: Box<dyn Fn(&str) -> bool>,
) -> Vec<(String, PathBuf)> {
let (pkg_map, idx) = match self.setup(manifest) {
Some(x) => x,
None => return vec![],
};
let deps = pkg_map
.get_dependencies(idx)
.iter()
.filter(|(s, _)| search_fn(s))
.map(|(s, p)| (s.to_string(), p.to_path_buf()));
let lib = pkg_map
.get_lib(idx)
.filter(|t| search_fn(&t.name))
.map(|t| (t.name.to_string(), t.src_path.to_path_buf()));
deps.chain(lib).collect()
}
fn resolve_dependency(&self, manifest: &Path, libname: &str) -> Option<PathBuf> {
debug!(
"MetadataCache::resolve_dependency manifest: {:?} libname: {}",
manifest, libname
);
let (pkg_map, idx) = self.setup(manifest)?;
pkg_map
.get_src_path_from_libname(idx, libname)
.or_else(|| {
let hyphnated = libname.replace('_', "-");
pkg_map.get_src_path_from_libname(idx, &hyphnated)
})
.or_else(|| {
let target = pkg_map.get_lib(idx)?;
if target.name.replace('-', "_") == libname {
Some(&target.src_path)
} else {
None
}
})
.map(|p| p.to_owned())
}
}
pub fn project_model(project_path: Option<&Path>) -> Box<dyn ProjectModelProvider> {
let manifest = project_path.and_then(|p| metadata::find_manifest(p));
Box::new(MetadataCache::new(manifest))
}
| 35.48 | 104 | 0.53664 |
e4d65e9d7ee07c6ff1daba15cdbb3f43d1d10450 | 43 | pub mod source;
#[macro_use]
pub mod error; | 14.333333 | 15 | 0.744186 |
8a485e15e7370a4f3ed147cc82d2eeac25acf2c1 | 4,795 | use alloc::boxed::Box;
use alloc::string::String;
use alloc::vec::Vec;
use core::any::Any;
use crate::rsp::rsp::RSP;
use crate::rsp::rsp_assembler::{E, Element, GPR, RSPAssembler, VR, VSARAccumulator};
use crate::rsp::spmem::SPMEM;
use crate::tests::{Level, Test};
use crate::tests::soft_asserts::soft_assert_eq;
fn run_test(e: Element, expected_result: [u16; 8], expected_acc_top: [u16; 8], expected_acc_mid: [u16; 8], expected_acc_low: [u16; 8]) -> Result<(), String> {
// Prepare input data
SPMEM::write_vector16_into_dmem(0x00, &[0x0000, 0x0000, 0x0000, 0xE000, 0x8001, 0x8000, 0x7FFF, 0x8000]);
SPMEM::write_vector16_into_dmem(0x10, &[0x0000, 0x0001, 0xFFFF, 0xFFFF, 0x8000, 0x7FFF, 0x7FFF, 0x8000]);
// Assemble RSP program
let mut assembler = RSPAssembler::new(0);
assembler.write_lqv(VR::V0, E::_0, 0x000, GPR::R0);
assembler.write_lqv(VR::V1, E::_0, 0x010, GPR::R0);
assembler.write_lqv(VR::V6, E::_0, 0x000, GPR::R0);
assembler.write_lqv(VR::V7, E::_0, 0x010, GPR::R0);
assembler.write_vmulf(VR::V2, VR::V0, VR::V1, e);
assembler.write_vsar(VR::V3, VSARAccumulator::High);
assembler.write_vsar(VR::V4, VSARAccumulator::Mid);
assembler.write_vsar(VR::V5, VSARAccumulator::Low);
// again but this time destructive by overwriting a source reg
assembler.write_vmulf(VR::V6, VR::V6, VR::V1, e);
assembler.write_vmulf(VR::V7, VR::V0, VR::V7, e);
assembler.write_sqv(VR::V2, E::_0, 0x100, GPR::R0);
assembler.write_sqv(VR::V3, E::_0, 0x110, GPR::R0);
assembler.write_sqv(VR::V4, E::_0, 0x120, GPR::R0);
assembler.write_sqv(VR::V5, E::_0, 0x130, GPR::R0);
assembler.write_sqv(VR::V6, E::_0, 0x140, GPR::R0);
assembler.write_sqv(VR::V7, E::_0, 0x150, GPR::R0);
assembler.write_break();
RSP::run_and_wait(0);
soft_assert_eq(SPMEM::read_vector16_from_dmem(0x100), expected_result, "VMULF result")?;
soft_assert_eq(SPMEM::read_vector16_from_dmem(0x110), expected_acc_top, "VMULF Acc[32..48]")?;
soft_assert_eq(SPMEM::read_vector16_from_dmem(0x120), expected_acc_mid, "VMULF Acc[16..32]")?;
soft_assert_eq(SPMEM::read_vector16_from_dmem(0x130), expected_acc_low, "VMULF Acc[0..16]")?;
soft_assert_eq(SPMEM::read_vector16_from_dmem(0x140), expected_result, "VMULF result when doing VMULF V6, V6, V1")?;
soft_assert_eq(SPMEM::read_vector16_from_dmem(0x150), expected_result, "VMULF result when doing VMULF V7, V0, V7")?;
Ok(())
}
pub struct VMULFAll {}
impl Test for VMULFAll {
fn name(&self) -> &str { "RSP VMULF" }
fn level(&self) -> Level { Level::BasicFunctionality }
fn values(&self) -> Vec<Box<dyn Any>> { Vec::new() }
fn run(&self, _value: &Box<dyn Any>) -> Result<(), String> {
run_test(
Element::All,
[0, 0, 0, 0, 0x7fff, 0x8001, 0x7ffe, 0x7fff],
[0, 0, 0, 0, 0, 0xffff, 0, 0],
[0, 0, 0, 0, 0x7fff, 0x8001, 0x7ffe, 0x8000],
[0x8000, 0x8000, 0x8000, 0xc000, 0x8000, 0x8000, 0x8002, 0x8000],
)
}
}
pub struct VMULFAll1 {}
impl Test for VMULFAll1 {
fn name(&self) -> &str { "RSP VMULF (e={1})" }
fn level(&self) -> Level { Level::BasicFunctionality }
fn values(&self) -> Vec<Box<dyn Any>> { Vec::new() }
fn run(&self, _value: &Box<dyn Any>) -> Result<(), String> {
run_test(
Element::All1,
[0, 0, 0, 0, 0x7fff, 0x8001, 0x7ffe, 0x7fff],
[0, 0, 0, 0, 0, 0xffff, 0, 0],
[0, 0, 0, 0, 0x7fff, 0x8001, 0x7ffe, 0x8000],
[0x8000, 0x8000, 0x8000, 0xc000, 0x8000, 0x8000, 0x8002, 0x8000],
)
}
}
pub struct VMULFH0 {}
impl Test for VMULFH0 {
fn name(&self) -> &str { "RSP VMULF (e=H0)" }
fn level(&self) -> Level { Level::BasicFunctionality }
fn values(&self) -> Vec<Box<dyn Any>> { Vec::new() }
fn run(&self, _value: &Box<dyn Any>) -> Result<(), String> {
run_test(
Element::H0,
[0, 0, 0, 0, 0x7fff, 0x8002, 0x8002, 0x7fff],
[0, 0, 0, 0, 0, 0xffff, 0xffff, 0],
[0, 0, 0, 0, 0x7fff, 0x8002, 0x8002, 0x7fff],
[0x8000, 0x8000, 0x8000, 0x8000, 0x8000, 0x7ffe, 0x7ffe, 0x8000],
)
}
}
pub struct VMULFH1 {}
impl Test for VMULFH1 {
fn name(&self) -> &str { "RSP VMULF (e=H1)" }
fn level(&self) -> Level { Level::BasicFunctionality }
fn values(&self) -> Vec<Box<dyn Any>> { Vec::new() }
fn run(&self, _value: &Box<dyn Any>) -> Result<(), String> {
run_test(
Element::H1,
[0, 0, 0, 0, 0x7fff, 0x8001, 0x8001, 0x7fff],
[0, 0, 0, 0, 0, 0xffff, 0xffff, 0],
[0, 0, 0, 0, 0x8000, 0x8001, 0x8001, 0x8000],
[0x8000, 0x8000, 0x8000, 0x8000, 0x8000, 0x8000, 0x8000, 0x8000],
)
}
}
| 35.257353 | 158 | 0.606674 |
bf446fa522802f21ad1661d4d756f2c1330aa5da | 604 | use marketplacetf_api::{MarketplaceAPI, error::Error};
#[tokio::main]
async fn main() -> Result<(), Error> {
let key = "key";
let marketplacetf = MarketplaceAPI::new(key);
let sales = marketplacetf.get_sales(10, None).await?;
if let Some(sale) = sales.first() {
let names = sale.items
.iter()
.map(|item| item.name.as_str())
.collect::<Vec<_>>();
println!("Sold {} for ${:.2}!", names.join(", "), sale.earned_credit as f32 / 100.0);
} else {
println!("You have not sold anything :(");
}
Ok(())
}
| 27.454545 | 93 | 0.533113 |
fe93f8f46eaa6762a7d0092cbfe174627efa9045 | 25,142 | // Copyright 2016 Matthew Collins
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::cell::RefCell;
use std::fs;
use std::rc::Rc;
use std::sync::Arc;
use std::thread;
use crate::format;
use crate::format::{Component, ComponentType};
use crate::paths;
use crate::protocol;
use crate::render;
use crate::ui;
use crate::render::hud::{Hud, HudContext};
use crate::render::Renderer;
use crate::screen::{Screen, ScreenSystem};
use crate::ui::Container;
use crossbeam_channel::unbounded;
use crossbeam_channel::{Receiver, TryRecvError};
use instant::Duration;
use parking_lot::RwLock;
use rand::Rng;
use serde_json::Value;
use std::collections::BTreeMap;
pub struct ServerList {
elements: Option<UIElements>,
disconnect_reason: Option<Component>,
needs_reload: Rc<RefCell<bool>>,
}
impl Clone for ServerList {
fn clone(&self) -> Self {
ServerList {
elements: None,
disconnect_reason: self.disconnect_reason.clone(),
needs_reload: Rc::new(RefCell::new(false)),
}
}
}
struct UIElements {
logo: ui::logo::Logo,
servers: Vec<Server>,
_add_btn: ui::ButtonRef,
_refresh_btn: ui::ButtonRef,
_options_btn: ui::ButtonRef,
_disclaimer: ui::TextRef,
_disconnected: Option<ui::ImageRef>,
}
struct Server {
back: ui::ImageRef,
offset: f64,
y: f64,
motd: ui::FormattedRef,
ping: ui::ImageRef,
players: ui::TextRef,
version: ui::FormattedRef,
icon: ui::ImageRef,
icon_texture: Option<String>,
done_ping: bool,
recv: Receiver<PingInfo>,
}
struct PingInfo {
motd: format::Component,
ping: Duration,
exists: bool,
online: i32,
max: i32,
protocol_version: i32,
protocol_name: String,
forge_mods: Vec<crate::protocol::forge::ForgeMod>,
favicon: Option<image::DynamicImage>,
}
impl Server {
fn update_position(&mut self) {
if self.offset < 0.0 {
self.y = self.offset * 200.0;
} else {
self.y = self.offset * 100.0;
}
}
}
impl ServerList {
pub fn new(disconnect_reason: Option<Component>) -> ServerList {
ServerList {
elements: None,
disconnect_reason,
needs_reload: Rc::new(RefCell::new(false)),
}
}
fn reload_server_list(
&mut self,
renderer: Arc<render::Renderer>,
ui_container: &mut ui::Container,
) {
let elements = self.elements.as_mut().unwrap();
*self.needs_reload.borrow_mut() = false;
{
// Clean up previous list icons.
let mut tex = renderer.get_textures_ref().write();
for server in &mut elements.servers {
if let Some(ref icon) = server.icon_texture {
tex.remove_dynamic(icon);
}
}
}
elements.servers.clear();
let file = match fs::File::open(paths::get_data_dir().join("servers.json")) {
Ok(val) => val,
Err(_) => return,
};
let servers_info: serde_json::Value = serde_json::from_reader(file).unwrap();
let servers = servers_info.get("servers").unwrap().as_array().unwrap();
let mut offset = 0.0;
for (index, svr) in servers.iter().enumerate() {
let name = svr.get("name").unwrap().as_str().unwrap().to_owned();
let address = svr.get("address").unwrap().as_str().unwrap().to_owned();
// Everything is attached to this
let back = ui::ImageBuilder::new()
.texture("leafish:solid")
.position(0.0, offset * 100.0)
.size(700.0, 100.0)
.colour((0, 0, 0, 100))
.alignment(ui::VAttach::Middle, ui::HAttach::Center)
.create(ui_container);
let (send, recv) = unbounded();
// Make whole entry interactable
{
let mut backr = back.borrow_mut();
let address = address.clone();
backr.add_hover_func(move |this, over, _| {
this.colour.3 = if over { 200 } else { 100 };
false
});
backr.add_click_func(move |_, game| {
game.screen_sys
.clone()
.replace_screen(Box::new(super::connecting::Connecting::new(&address)));
let hud_context = Arc::new(RwLock::new(HudContext::new()));
let result = game.connect_to(&address, hud_context.clone());
game.screen_sys.clone().pop_screen();
if let Err(error) = result {
game.screen_sys
.clone()
.add_screen(Box::new(ServerList::new(Some(Component::new(
ComponentType::new(&error.to_string(), None),
)))));
} else {
game.screen_sys
.clone()
.add_screen(Box::new(Hud::new(hud_context)));
}
true
});
}
// Server name
ui::TextBuilder::new()
.text(name.clone())
.position(100.0, 5.0)
.attach(&mut *back.borrow_mut());
// Server icon
let icon = ui::ImageBuilder::new()
.texture("misc/unknown_server")
.position(5.0, 5.0)
.size(90.0, 90.0)
.attach(&mut *back.borrow_mut());
// Ping indicator
let ping = ui::ImageBuilder::new()
.texture("gui/icons")
.position(5.0, 5.0)
.size(20.0, 16.0)
.texture_coords((0.0, 56.0 / 256.0, 10.0 / 256.0, 8.0 / 256.0))
.alignment(ui::VAttach::Top, ui::HAttach::Right)
.attach(&mut *back.borrow_mut());
// Player count
let players = ui::TextBuilder::new()
.text("???")
.position(30.0, 5.0)
.alignment(ui::VAttach::Top, ui::HAttach::Right)
.attach(&mut *back.borrow_mut());
// Server's message of the day
let motd = ui::FormattedBuilder::new()
.text(Component::new(ComponentType::new("Connecting...", None)))
.position(100.0, 23.0)
.max_width(700.0 - (90.0 + 10.0 + 5.0))
.attach(&mut *back.borrow_mut());
// Version information
let version = ui::FormattedBuilder::new()
.text(Component::new(ComponentType::new("", None)))
.position(100.0, 5.0)
.max_width(700.0 - (90.0 + 10.0 + 5.0))
.alignment(ui::VAttach::Bottom, ui::HAttach::Left)
.attach(&mut *back.borrow_mut());
// Delete entry button
let delete_entry = ui::ButtonBuilder::new()
.position(0.0, 0.0)
.size(25.0, 25.0)
.alignment(ui::VAttach::Bottom, ui::HAttach::Right)
.attach(&mut *back.borrow_mut());
{
let mut btn = delete_entry.borrow_mut();
let txt = ui::TextBuilder::new()
.text("X")
.alignment(ui::VAttach::Middle, ui::HAttach::Center)
.attach(&mut *btn);
btn.add_text(txt);
let index = index;
let sname = name.clone();
let saddr = address.clone();
btn.add_click_func(move |_, game| {
let text = format!("Are you sure you wish to delete {} {}?", &sname, &saddr);
game.screen_sys.clone().add_screen(Box::new(
super::confirm_box::ConfirmBox::new(
text,
Rc::new(|game| {
game.screen_sys.pop_screen();
}),
Rc::new(move |game| {
game.screen_sys.pop_screen();
Self::delete_server(index);
}),
),
));
true
})
}
// Edit entry button
let edit_entry = ui::ButtonBuilder::new()
.position(25.0, 0.0)
.size(25.0, 25.0)
.alignment(ui::VAttach::Bottom, ui::HAttach::Right)
.attach(&mut *back.borrow_mut());
{
let mut btn = edit_entry.borrow_mut();
let txt = ui::TextBuilder::new()
.text("E")
.alignment(ui::VAttach::Middle, ui::HAttach::Center)
.attach(&mut *btn);
btn.add_text(txt);
let index = index;
let sname = name.clone();
let saddr = address.clone();
btn.add_click_func(move |_, game| {
game.screen_sys.clone().replace_screen(Box::new(
super::edit_server::EditServerEntry::new(Some((
index,
sname.clone(),
saddr.clone(),
))),
));
true
})
}
let mut server = Server {
back,
offset,
y: 0.0,
done_ping: false,
recv,
motd,
ping,
players,
version,
icon,
icon_texture: None,
};
server.update_position();
elements.servers.push(server);
offset += 1.0;
// Don't block the main thread whilst pinging the server
thread::spawn(move || {
match protocol::Conn::new(&address, protocol::SUPPORTED_PROTOCOLS[0])
.and_then(|conn| conn.do_status())
{
Ok(res) => {
let desc = res.0.description;
let favicon = if let Some(icon) = res.0.favicon {
let data_base64 = &icon["data:image/png;base64,".len()..];
let data_base64: String =
data_base64.chars().filter(|c| !c.is_whitespace()).collect();
let data = base64::decode(data_base64).unwrap();
Some(image::load_from_memory(&data).unwrap())
} else {
None
};
drop(send.send(PingInfo {
motd: desc,
ping: res.1,
exists: true,
online: res.0.players.online,
max: res.0.players.max,
protocol_version: res.0.version.protocol,
protocol_name: res.0.version.name,
forge_mods: res.0.forge_mods,
favicon,
}));
}
Err(err) => {
let e = format!("{}", err);
let msg = ComponentType::new(&e, Some(format::Color::Red));
let _ = send.send(PingInfo {
motd: Component::new(msg),
ping: Duration::new(99999, 0),
exists: false,
online: 0,
max: 0,
protocol_version: 0,
protocol_name: "".to_owned(),
forge_mods: vec![],
favicon: None,
});
}
}
});
}
}
fn delete_server(index: usize) {
let mut servers_info = match fs::File::open(paths::get_data_dir().join("servers.json")) {
Ok(val) => serde_json::from_reader(val).unwrap(),
Err(_) => {
let mut info = BTreeMap::default();
info.insert("servers".to_owned(), Value::Array(vec![]));
Value::Object(info.into_iter().collect())
}
};
{
let servers = servers_info
.as_object_mut()
.unwrap()
.get_mut("servers")
.unwrap()
.as_array_mut()
.unwrap();
servers.remove(index);
}
let mut out = fs::File::create(paths::get_data_dir().join("servers.json")).unwrap();
serde_json::to_writer_pretty(&mut out, &servers_info).unwrap();
}
fn init_list(&mut self, renderer: Arc<render::Renderer>, ui_container: &mut ui::Container) {
let logo = ui::logo::Logo::new(renderer.resources.clone(), ui_container);
// Refresh the server list
let refresh = ui::ButtonBuilder::new()
.position(300.0, -50.0 - 15.0)
.size(100.0, 30.0)
.alignment(ui::VAttach::Middle, ui::HAttach::Center)
.draw_index(2)
.create(ui_container);
{
let mut refresh = refresh.borrow_mut();
let txt = ui::TextBuilder::new()
.text("Refresh")
.alignment(ui::VAttach::Middle, ui::HAttach::Center)
.attach(&mut *refresh);
refresh.add_text(txt);
let nr = self.needs_reload.clone();
refresh.add_click_func(move |_, _| {
*nr.borrow_mut() = true;
true
})
}
// Add a new server to the list
let add = ui::ButtonBuilder::new()
.position(200.0, -50.0 - 15.0)
.size(100.0, 30.0)
.alignment(ui::VAttach::Middle, ui::HAttach::Center)
.draw_index(2)
.create(ui_container);
{
let mut add = add.borrow_mut();
let txt = ui::TextBuilder::new()
.text("Add")
.alignment(ui::VAttach::Middle, ui::HAttach::Center)
.attach(&mut *add);
add.add_text(txt);
add.add_click_func(move |_, game| {
game.screen_sys
.clone()
.replace_screen(Box::new(super::edit_server::EditServerEntry::new(None)));
true
})
}
// Options menu
let options = ui::ButtonBuilder::new()
.position(5.0, 25.0)
.size(40.0, 40.0)
.draw_index(1)
.alignment(ui::VAttach::Bottom, ui::HAttach::Right)
.create(ui_container);
{
let mut options = options.borrow_mut();
ui::ImageBuilder::new()
.texture("leafish:gui/cog")
.position(0.0, 0.0)
.size(40.0, 40.0)
.alignment(ui::VAttach::Middle, ui::HAttach::Center)
.attach(&mut *options);
options.add_click_func(|_, game| {
game.screen_sys
.clone()
.add_screen(Box::new(super::SettingsMenu::new(game.vars.clone(), false)));
true
});
}
// Disclaimer
let disclaimer = ui::TextBuilder::new()
.text("Not affiliated with Mojang/Minecraft")
.position(5.0, 5.0)
.colour((255, 200, 200, 255))
.draw_index(1)
.alignment(ui::VAttach::Bottom, ui::HAttach::Right)
.create(ui_container);
// If we are kicked from a server display the reason
let disconnected = if let Some(ref disconnect_reason) = self.disconnect_reason {
let (width, height) = ui::Formatted::compute_size(
renderer.clone(),
disconnect_reason,
600.0,
1.0,
1.0,
1.0,
);
let background = ui::ImageBuilder::new()
.texture("leafish:solid")
.position(0.0, 3.0)
.size(
width.max(renderer.ui.lock().size_of_string("Disconnected")) + 4.0,
height + 4.0 + 16.0,
)
.colour((0, 0, 0, 100))
.alignment(ui::VAttach::Top, ui::HAttach::Center)
.draw_index(10)
.create(ui_container);
ui::TextBuilder::new()
.text("Disconnected")
.position(0.0, 2.0)
.colour((255, 0, 0, 255))
.alignment(ui::VAttach::Top, ui::HAttach::Center)
.attach(&mut *background.borrow_mut());
ui::FormattedBuilder::new()
.text(disconnect_reason.clone())
.position(0.0, 18.0)
.max_width(600.0)
.alignment(ui::VAttach::Top, ui::HAttach::Center)
.attach(&mut *background.borrow_mut());
Some(background)
} else {
None
};
self.elements = Some(UIElements {
logo,
servers: vec![],
_add_btn: add,
_refresh_btn: refresh,
_options_btn: options,
_disclaimer: disclaimer,
_disconnected: disconnected,
});
}
}
impl super::Screen for ServerList {
fn on_active(
&mut self,
_screen_sys: &ScreenSystem,
renderer: Arc<render::Renderer>,
ui_container: &mut ui::Container,
) {
self.init_list(renderer, ui_container);
*self.needs_reload.borrow_mut() = true;
}
fn on_deactive(
&mut self,
_screen_sys: &ScreenSystem,
renderer: Arc<render::Renderer>,
_ui_container: &mut ui::Container,
) {
// Clean up
{
let elements = self.elements.as_mut().unwrap();
let mut tex = renderer.get_textures_ref().write();
for server in &mut elements.servers {
if let Some(ref icon) = server.icon_texture {
tex.remove_dynamic(icon);
}
}
}
self.elements = None
}
fn tick(
&mut self,
_screen_sys: &ScreenSystem,
renderer: Arc<render::Renderer>,
ui_container: &mut ui::Container,
delta: f64,
) {
if *self.needs_reload.borrow() {
self.reload_server_list(renderer.clone(), ui_container);
}
let elements = self.elements.as_mut().unwrap();
elements.logo.tick(renderer.clone());
for s in &mut elements.servers {
// Animate the entries
{
let mut back = s.back.borrow_mut();
let dy = s.y - back.y;
if dy * dy > 1.0 {
let y = back.y;
back.y = y + delta * dy * 0.1;
} else {
back.y = s.y;
}
}
#[allow(clippy::if_same_then_else)]
if s.y < elements._add_btn.borrow().y {
// TODO: Make button invisible!
} else {
// TODO: Make button visible.
}
// Keep checking to see if the server has finished being
// pinged
if !s.done_ping {
match s.recv.try_recv() {
Ok(res) => {
s.done_ping = true;
s.motd.borrow_mut().set_text(res.motd);
// Selects the icon for the given ping range
// TODO: switch to as_millis() experimental duration_as_u128 #50202 once available?
let ping_ms = (res.ping.subsec_nanos() as f64) / 1000000.0
+ (res.ping.as_secs() as f64) * 1000.0;
let y = match ping_ms.round() as u64 {
_x @ 0..=75 => 16.0 / 256.0,
_x @ 76..=150 => 24.0 / 256.0,
_x @ 151..=225 => 32.0 / 256.0,
_x @ 226..=350 => 40.0 / 256.0,
_x @ 351..=999 => 48.0 / 256.0,
_ => 56.0 / 256.0,
};
s.ping.borrow_mut().texture_coords.1 = y;
if res.exists {
{
let mut players = s.players.borrow_mut();
let txt = if protocol::SUPPORTED_PROTOCOLS
.contains(&res.protocol_version)
{
players.colour.1 = 255;
players.colour.2 = 255;
format!("{}/{}", res.online, res.max)
} else {
players.colour.1 = 85;
players.colour.2 = 85;
format!("Out of date {}/{}", res.online, res.max)
};
players.text = txt;
}
let sm =
format!("{} mods + {}", res.forge_mods.len(), res.protocol_name);
let st = if !res.forge_mods.is_empty() {
&sm
} else {
&res.protocol_name
};
let msg_component =
Component::new(ComponentType::new(st, Some(format::Color::Yellow)));
s.version.borrow_mut().set_text(msg_component);
}
if let Some(favicon) = res.favicon {
let name: String = std::iter::repeat(())
.map(|()| {
rand::thread_rng().sample(&rand::distributions::Alphanumeric)
as char
})
.take(30)
.collect();
let tex = renderer.get_textures_ref();
s.icon_texture = Some(name.clone());
let icon_tex = tex.write().put_dynamic(&name, favicon);
s.icon.borrow_mut().texture = icon_tex.name;
}
}
Err(TryRecvError::Disconnected) => {
s.done_ping = true;
s.motd
.borrow_mut()
.set_text(Component::new(ComponentType::new(
"Channel dropped",
Some(format::Color::Red),
)));
}
_ => {}
}
}
}
}
fn on_scroll(&mut self, _: f64, y: f64) {
let elements = self.elements.as_mut().unwrap();
if elements.servers.is_empty() {
return;
}
let mut diff = y / 1.0;
{
let last = elements.servers.last().unwrap();
if last.offset + diff <= 2.0 {
diff = 2.0 - last.offset;
}
let first = elements.servers.first().unwrap();
if first.offset + diff >= 0.0 {
diff = -first.offset;
}
}
for s in &mut elements.servers {
s.offset += diff;
s.update_position();
}
}
fn on_resize(
&mut self,
screen_sys: &ScreenSystem,
renderer: Arc<Renderer>,
ui_container: &mut Container,
) {
// TODO: Don't ping the servers on resize!
self.on_deactive(screen_sys, renderer.clone(), ui_container);
self.on_active(screen_sys, renderer, ui_container);
}
fn clone_screen(&self) -> Box<dyn Screen> {
Box::new(self.clone())
}
}
| 36.33237 | 107 | 0.446027 |
1a22c687473b75796e427498648fc40edf53f9dd | 5,003 | // Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.
//! The concurrency manager is responsible for concurrency control of
//! transactions.
//!
//! The concurrency manager contains a lock table in memory. Lock information
//! can be stored in it and reading requests can check if these locks block
//! the read.
//!
//! In order to mutate the lock of a key stored in the lock table, it needs
//! to be locked first using `lock_key` or `lock_keys`.
mod key_handle;
mod lock_table;
pub use self::key_handle::{KeyHandle, KeyHandleGuard};
pub use self::lock_table::LockTable;
use std::{
mem::{self, MaybeUninit},
sync::{
atomic::{AtomicU64, Ordering},
Arc,
},
};
use txn_types::{Key, Lock, TimeStamp};
// TODO: Currently we are using a Mutex<BTreeMap> to implement the handle table.
// In the future we should replace it with a concurrent ordered map.
// Pay attention that the async functions of ConcurrencyManager should not hold
// the mutex.
#[derive(Clone)]
pub struct ConcurrencyManager {
max_read_ts: Arc<AtomicU64>,
lock_table: LockTable,
}
impl ConcurrencyManager {
pub fn new(latest_ts: TimeStamp) -> Self {
ConcurrencyManager {
max_read_ts: Arc::new(AtomicU64::new(latest_ts.into_inner())),
lock_table: LockTable::default(),
}
}
pub fn max_read_ts(&self) -> TimeStamp {
TimeStamp::new(self.max_read_ts.load(Ordering::SeqCst))
}
/// Updates max_read_ts with the given read_ts. It has no effect if
/// max_read_ts >= read_ts or read_ts is TimeStamp::max().
pub fn update_max_read_ts(&self, read_ts: TimeStamp) {
if read_ts != TimeStamp::max() {
self.max_read_ts
.fetch_max(read_ts.into_inner(), Ordering::SeqCst);
}
}
/// Acquires a mutex of the key and returns an RAII guard. When the guard goes
/// out of scope, the mutex will be unlocked.
///
/// The guard can be used to store Lock in the table. The stored lock
/// is visible to `read_key_check` and `read_range_check`.
pub async fn lock_key(&self, key: &Key) -> KeyHandleGuard {
self.lock_table.lock_key(key).await
}
/// Acquires mutexes of the keys and returns the RAII guards. The order of the
/// guards is the same with the given keys.
///
/// The guards can be used to store Lock in the table. The stored lock
/// is visible to `read_key_check` and `read_range_check`.
pub async fn lock_keys(&self, keys: impl Iterator<Item = &Key>) -> Vec<KeyHandleGuard> {
let mut keys_with_index: Vec<_> = keys.enumerate().collect();
// To prevent deadlock, we sort the keys and lock them one by one.
keys_with_index.sort_by_key(|(_, key)| *key);
let mut result: Vec<MaybeUninit<KeyHandleGuard>> = Vec::new();
result.resize_with(keys_with_index.len(), || MaybeUninit::uninit());
for (index, key) in keys_with_index {
result[index] = MaybeUninit::new(self.lock_table.lock_key(key).await);
}
#[allow(clippy::unsound_collection_transmute)]
unsafe {
mem::transmute(result)
}
}
/// Checks if there is a memory lock of the key which blocks the read.
/// The given `check_fn` should return false iff the lock passed in
/// blocks the read.
pub fn read_key_check<E>(
&self,
key: &Key,
check_fn: impl FnOnce(&Lock) -> Result<(), E>,
) -> Result<(), E> {
self.lock_table.check_key(key, check_fn)
}
/// Checks if there is a memory lock in the range which blocks the read.
/// The given `check_fn` should return false iff the lock passed in
/// blocks the read.
pub fn read_range_check<E>(
&self,
start_key: Option<&Key>,
end_key: Option<&Key>,
check_fn: impl FnMut(&Key, &Lock) -> Result<(), E>,
) -> Result<(), E> {
self.lock_table.check_range(start_key, end_key, check_fn)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_lock_keys_order() {
let concurrency_manager = ConcurrencyManager::new(1.into());
let keys: Vec<_> = [b"c", b"a", b"b"]
.iter()
.map(|k| Key::from_raw(*k))
.collect();
let guards = concurrency_manager.lock_keys(keys.iter()).await;
for (key, guard) in keys.iter().zip(&guards) {
assert_eq!(key, guard.key());
}
}
#[tokio::test]
async fn test_update_max_read_ts() {
let concurrency_manager = ConcurrencyManager::new(10.into());
concurrency_manager.update_max_read_ts(20.into());
assert_eq!(concurrency_manager.max_read_ts(), 20.into());
concurrency_manager.update_max_read_ts(5.into());
assert_eq!(concurrency_manager.max_read_ts(), 20.into());
concurrency_manager.update_max_read_ts(TimeStamp::max());
assert_eq!(concurrency_manager.max_read_ts(), 20.into());
}
}
| 35.232394 | 92 | 0.637418 |
87454c3030cb6ce78dcd2037b1b81eca29abf410 | 49,249 | //! Full build support for the Skia library, SkiaBindings library and bindings.rs file.
use crate::build_support::{android, cargo, clang, ios, llvm, vs, xcode};
use bindgen::{CodegenConfig, EnumVariation};
use cc::Build;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use std::{env, fs};
/// The libraries to link with.
mod lib {
pub const SKIA: &str = "skia";
pub const SKIA_BINDINGS: &str = "skia-bindings";
pub const SK_SHAPER: &str = "skshaper";
pub const SK_PARAGRAPH: &str = "skparagraph";
}
/// Feature identifiers define the additional configuration parts of the binaries to download.
mod feature_id {
pub const GL: &str = "gl";
pub const VULKAN: &str = "vulkan";
pub const METAL: &str = "metal";
pub const D3D: &str = "d3d";
pub const TEXTLAYOUT: &str = "textlayout";
pub const WEBPE: &str = "webpe";
pub const WEBPD: &str = "webpd";
pub const EGL: &str = "egl";
pub const X11: &str = "x11";
pub const WAYLAND: &str = "wayland";
}
/// The defaults for the Skia build configuration.
impl Default for BuildConfiguration {
fn default() -> Self {
let skia_debug = matches!(cargo::env_var("SKIA_DEBUG"), Some(v) if v != "0");
BuildConfiguration {
on_windows: cargo::host().is_windows(),
skia_debug,
// `OPT_LEVEL` is set by Cargo itself.
opt_level: cargo::env_var("OPT_LEVEL"),
features: Features {
gl: cfg!(feature = "gl"),
egl: cfg!(feature = "egl"),
wayland: cfg!(feature = "wayland"),
x11: cfg!(feature = "x11"),
vulkan: cfg!(feature = "vulkan"),
metal: cfg!(feature = "metal"),
d3d: cfg!(feature = "d3d"),
text_layout: cfg!(feature = "textlayout"),
webp_encode: cfg!(feature = "webp-encode"),
webp_decode: cfg!(feature = "webp-decode"),
animation: false,
dng: false,
particles: false,
},
definitions: Vec::new(),
cc: cargo::env_var("CC").unwrap_or_else(|| "clang".to_string()),
cxx: cargo::env_var("CXX").unwrap_or_else(|| "clang++".to_string()),
}
}
}
/// The build configuration for Skia.
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct BuildConfiguration {
/// Do we build _on_ a Windows OS?
on_windows: bool,
/// Set the optimization level (0-3, s or z). Clang and GCC use the same notation
/// as Rust, so we just pass this option through from Cargo.
opt_level: Option<String>,
/// Build Skia in a debug configuration?
skia_debug: bool,
/// The Skia feature set to compile.
features: Features,
/// Additional preprocessor definitions that will override predefined ones.
definitions: Definitions,
/// C compiler to use
cc: String,
/// C++ compiler to use
cxx: String,
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Features {
/// Build with OpenGL support?
pub gl: bool,
/// Build with EGL support? If you set X11, setting this to false will use LibGL (GLX)
pub egl: bool,
/// Build with Wayland support? This requires EGL, as GLX does not work on Wayland.
pub wayland: bool,
/// Build with X11 support?
pub x11: bool,
/// Build with Vulkan support?
pub vulkan: bool,
/// Build with Metal support?
pub metal: bool,
/// Build with Direct3D support?
pub d3d: bool,
/// Features related to text layout. Modules skshaper and skparagraph.
pub text_layout: bool,
/// Support the encoding of bitmap data to the WEBP image format.
pub webp_encode: bool,
/// Support the decoding of the WEBP image format to bitmap data.
pub webp_decode: bool,
/// Build with animation support (yet unsupported, no wrappers).
pub animation: bool,
/// Support DNG file format (currently unsupported because of build errors).
pub dng: bool,
/// Build the particles module (unsupported, no wrappers).
pub particles: bool,
}
impl Features {
pub fn gpu(&self) -> bool {
self.gl || self.vulkan || self.metal || self.d3d
}
/// Feature Ids used to look up prebuilt binaries.
pub fn ids(&self) -> Vec<&str> {
let mut feature_ids = Vec::new();
if self.gl {
feature_ids.push(feature_id::GL);
}
if self.egl {
feature_ids.push(feature_id::EGL);
}
if self.x11 {
feature_ids.push(feature_id::X11);
}
if self.wayland {
feature_ids.push(feature_id::WAYLAND);
}
if self.vulkan {
feature_ids.push(feature_id::VULKAN);
}
if self.metal {
feature_ids.push(feature_id::METAL);
}
if self.d3d {
feature_ids.push(feature_id::D3D);
}
if self.text_layout {
feature_ids.push(feature_id::TEXTLAYOUT);
}
if self.webp_encode {
feature_ids.push(feature_id::WEBPE);
}
if self.webp_decode {
feature_ids.push(feature_id::WEBPD);
}
feature_ids
}
}
/// This is the final, low level build configuration.
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct FinalBuildConfiguration {
/// The Skia source directory.
pub skia_source_dir: PathBuf,
/// The name value pairs passed as arguments to gn.
pub gn_args: Vec<(String, String)>,
/// ninja files that need to be parsed for further definitions.
pub ninja_files: Vec<PathBuf>,
/// The additional definitions (cloned from the definitions of
/// the BuildConfiguration).
pub definitions: Definitions,
/// The binding source files to compile.
pub binding_sources: Vec<PathBuf>,
/// Whether to use system libraries or not.
pub use_system_libraries: bool,
}
impl FinalBuildConfiguration {
pub fn from_build_configuration(
build: &BuildConfiguration,
use_system_libraries: bool,
skia_source_dir: &Path,
) -> FinalBuildConfiguration {
let features = &build.features;
let gn_args = {
fn quote(s: &str) -> String {
format!("\"{}\"", s)
}
let mut args: Vec<(&str, String)> = vec![
("is_official_build", yes_if(!build.skia_debug)),
("is_debug", yes_if(build.skia_debug)),
("skia_enable_gpu", yes_if(features.gpu())),
("skia_use_gl", yes_if(features.gl)),
("skia_use_egl", yes_if(features.egl)),
("skia_use_x11", yes_if(features.x11)),
("skia_use_system_libpng", yes_if(use_system_libraries)),
("skia_use_libwebp_encode", yes_if(features.webp_encode)),
("skia_use_libwebp_decode", yes_if(features.webp_decode)),
("skia_use_system_zlib", yes_if(use_system_libraries)),
("skia_use_xps", no()),
("skia_use_dng_sdk", yes_if(features.dng)),
("cc", quote(&build.cc)),
("cxx", quote(&build.cxx)),
];
if features.vulkan {
args.push(("skia_use_vulkan", yes()));
args.push(("skia_enable_spirv_validation", no()));
}
if features.metal {
args.push(("skia_use_metal", yes()));
}
if features.d3d {
args.push(("skia_use_direct3d", yes()))
}
// further flags that limit the components of Skia debug builds.
if build.skia_debug {
args.push(("skia_enable_spirv_validation", no()));
args.push(("skia_enable_tools", no()));
args.push(("skia_enable_vulkan_debug_layers", no()));
args.push(("skia_use_libheif", no()));
args.push(("skia_use_lua", no()));
}
if features.text_layout {
args.extend(vec![
("skia_enable_skshaper", yes()),
("skia_use_icu", yes()),
("skia_use_system_icu", yes_if(use_system_libraries)),
("skia_use_harfbuzz", yes()),
("skia_pdf_subset_harfbuzz", yes()),
("skia_use_system_harfbuzz", yes_if(use_system_libraries)),
("skia_use_sfntly", no()),
("skia_enable_skparagraph", yes()),
// note: currently, tests need to be enabled, because modules/skparagraph
// is not included in the default dependency configuration.
// ("paragraph_tests_enabled", no()),
]);
} else {
args.push(("skia_use_icu", no()));
}
if features.webp_encode || features.webp_decode {
args.push(("skia_use_system_libwebp", yes_if(use_system_libraries)))
}
let mut use_expat = true;
// target specific gn args.
let target = cargo::target();
let target_str: &str = &format!("--target={}", target.to_string());
let mut set_target = true;
let sysroot_arg;
let opt_level_arg;
let mut cflags: Vec<&str> = vec![];
let mut asmflags: Vec<&str> = vec![];
if let Some(sysroot) = cargo::env_var("SDKROOT") {
sysroot_arg = format!("--sysroot={}", sysroot);
cflags.push(&sysroot_arg);
}
let jpeg_sys_cflags: Vec<String>;
if cfg!(feature = "use-system-jpeg-turbo") {
let paths = cargo::env_var("DEP_JPEG_INCLUDE").expect("mozjpeg-sys include path");
jpeg_sys_cflags = std::env::split_paths(&paths)
.map(|arg| format!("-I{}", arg.display()))
.collect();
cflags.extend(jpeg_sys_cflags.iter().map(|x| -> &str { x.as_ref() }));
args.push(("skia_use_system_libjpeg_turbo", yes()));
} else {
args.push((
"skia_use_system_libjpeg_turbo",
yes_if(use_system_libraries),
));
}
if let Some(opt_level) = &build.opt_level {
/* LTO generates corrupt libraries on the host platforms when building with --release
if opt_level.parse::<usize>() != Ok(0) {
cflags.push("-flto");
}
*/
// When targeting windows `-O` isn't supported.
if !target.is_windows() {
opt_level_arg = format!("-O{}", opt_level);
cflags.push(&opt_level_arg);
}
}
match target.as_strs() {
(_, _, "windows", Some("msvc")) if build.on_windows => {
if let Some(win_vc) = vs::resolve_win_vc() {
args.push(("win_vc", quote(win_vc.to_str().unwrap())))
}
// Code on MSVC needs to be compiled differently (e.g. with /MT or /MD) depending on the runtime being linked.
// (See https://doc.rust-lang.org/reference/linkage.html#static-and-dynamic-c-runtimes)
// When static feature is enabled (target-feature=+crt-static) the C runtime should be statically linked
// and the compiler has to place the library name LIBCMT.lib into the .obj
// See https://docs.microsoft.com/en-us/cpp/build/reference/md-mt-ld-use-run-time-library?view=vs-2019
if cargo::target_crt_static() {
cflags.push("/MT");
} else {
// otherwise the C runtime should be linked dynamically
cflags.push("/MD");
}
// Tell Skia's build system where LLVM is supposed to be located.
if let Some(llvm_home) = llvm::win::find_llvm_home() {
args.push(("clang_win", quote(&llvm_home)));
} else {
panic!(
"Unable to locate LLVM installation. skia-bindings can not be built."
);
}
}
(arch, "linux", "android", _) | (arch, "linux", "androideabi", _) => {
args.push(("ndk", quote(&android::ndk())));
// TODO: make API-level configurable?
args.push(("ndk_api", android::API_LEVEL.into()));
args.push(("target_cpu", quote(clang::target_arch(arch))));
args.push(("skia_use_system_freetype2", yes_if(use_system_libraries)));
args.push(("skia_enable_fontmgr_android", yes()));
// Enabling fontmgr_android implicitly enables expat.
// We make this explicit to avoid relying on an expat installed
// in the system.
use_expat = true;
}
(arch, _, "ios", _) => {
args.push(("target_os", quote("ios")));
args.push(("target_cpu", quote(clang::target_arch(arch))));
ios::extra_skia_cflags(arch, &mut cflags);
}
(arch, _, os, _) => {
let skia_target_os = match os {
"darwin" => {
// Skia will take care to set a specific `-target` for the current macOS
// version. So we don't push another target `--target` that may
// conflict.
set_target = false;
"mac"
}
"windows" => "win",
_ => os,
};
args.push(("target_os", quote(skia_target_os)));
args.push(("target_cpu", quote(clang::target_arch(arch))));
}
}
if use_expat {
args.push(("skia_use_expat", yes()));
args.push(("skia_use_system_expat", yes_if(use_system_libraries)));
} else {
args.push(("skia_use_expat", no()));
}
if set_target {
cflags.push(target_str);
asmflags.push(target_str);
}
if !cflags.is_empty() {
let cflags = format!(
"[{}]",
cflags.into_iter().map(quote).collect::<Vec<_>>().join(",")
);
args.push(("extra_cflags", cflags));
}
if !asmflags.is_empty() {
let asmflags = format!(
"[{}]",
asmflags
.into_iter()
.map(quote)
.collect::<Vec<_>>()
.join(",")
);
args.push(("extra_asmflags", asmflags));
}
args.into_iter()
.map(|(key, value)| (key.to_string(), value))
.collect()
};
let ninja_files = {
let mut files = vec!["obj/skia.ninja".into()];
if features.text_layout {
files.extend(vec![
"obj/modules/skshaper/skshaper.ninja".into(),
"obj/modules/skparagraph/skparagraph.ninja".into(),
]);
}
files
};
let binding_sources = {
let mut sources: Vec<PathBuf> = vec!["src/bindings.cpp".into()];
if features.gl {
sources.push("src/gl.cpp".into());
}
if features.vulkan {
sources.push("src/vulkan.cpp".into());
}
if features.metal {
sources.push("src/metal.cpp".into());
}
if features.d3d {
sources.push("src/d3d.cpp".into());
}
if features.gpu() {
sources.push("src/gpu.cpp".into());
}
if features.text_layout {
sources.extend(vec!["src/shaper.cpp".into(), "src/paragraph.cpp".into()]);
}
sources.push("src/svg.cpp".into());
sources
};
FinalBuildConfiguration {
skia_source_dir: skia_source_dir.into(),
gn_args,
ninja_files,
definitions: build.definitions.clone(),
binding_sources,
use_system_libraries,
}
}
}
fn yes() -> String {
"true".into()
}
fn no() -> String {
"false".into()
}
fn yes_if(y: bool) -> String {
if y {
yes()
} else {
no()
}
}
/// The configuration of the resulting binaries.
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct BinariesConfiguration {
/// The feature identifiers we built with.
pub feature_ids: Vec<String>,
/// The output directory of the libraries we build and we need to inform cargo about.
pub output_directory: PathBuf,
/// The TARGET specific link libraries we need to inform cargo about.
pub link_libraries: Vec<String>,
/// The static Skia libraries skia-bindings provides and dependent projects need to link with.
pub built_libraries: Vec<String>,
/// Additional files relative to the output_directory
/// that are needed to build dependent projects.
pub additional_files: Vec<PathBuf>,
/// `true` if the skia libraries are built with debugging information.
pub skia_debug: bool,
}
const SKIA_OUTPUT_DIR: &str = "skia";
const ICUDTL_DAT: &str = "icudtl.dat";
impl BinariesConfiguration {
/// Build a binaries configuration based on the current environment cargo
/// supplies us with and a Skia build configuration.
pub fn from_cargo_env(build: &BuildConfiguration) -> Self {
let features = &build.features;
let target = cargo::target();
let mut built_libraries = Vec::new();
let mut additional_files = Vec::new();
let feature_ids = features.ids();
if features.text_layout {
if target.is_windows() {
additional_files.push(ICUDTL_DAT.into());
}
built_libraries.push(lib::SK_PARAGRAPH.into());
built_libraries.push(lib::SK_SHAPER.into());
}
let mut link_libraries = Vec::new();
match target.as_strs() {
(_, "unknown", "linux", _) => {
link_libraries.extend(vec!["stdc++", "fontconfig", "freetype"]);
if features.gl {
if features.egl {
link_libraries.push("EGL");
}
if features.x11 {
link_libraries.push("GL");
}
if features.wayland {
link_libraries.push("wayland-egl");
link_libraries.push("GLESv2");
}
}
}
(_, "apple", "darwin", _) => {
link_libraries.extend(vec!["c++", "framework=ApplicationServices"]);
if features.gl {
link_libraries.push("framework=OpenGL");
}
if features.metal {
link_libraries.push("framework=Metal");
// MetalKit was added in m87 BUILD.gn.
link_libraries.push("framework=MetalKit");
link_libraries.push("framework=Foundation");
}
}
(_, _, "windows", Some("msvc")) => {
link_libraries.extend(&["usp10", "ole32", "user32", "gdi32", "fontsub"]);
if features.gl {
link_libraries.push("opengl32");
}
if features.d3d {
link_libraries.extend(&["d3d12", "dxgi", "d3dcompiler"]);
}
}
(_, "linux", "android", _) | (_, "linux", "androideabi", _) => {
link_libraries.extend(android::link_libraries(features));
}
(_, "apple", "ios", _) => {
link_libraries.extend(ios::link_libraries(features));
}
_ => panic!("unsupported target: {:?}", cargo::target()),
};
let output_directory = cargo::output_directory()
.join(SKIA_OUTPUT_DIR)
.to_str()
.unwrap()
.into();
built_libraries.push(lib::SKIA.into());
built_libraries.push(lib::SKIA_BINDINGS.into());
BinariesConfiguration {
feature_ids: feature_ids.into_iter().map(|f| f.to_string()).collect(),
output_directory,
link_libraries: link_libraries
.into_iter()
.map(|lib| lib.to_string())
.collect(),
built_libraries,
additional_files,
skia_debug: build.skia_debug,
}
}
/// Inform cargo that the library files of the given configuration are available and
/// can be used as dependencies.
pub fn commit_to_cargo(&self) {
cargo::add_link_search(self.output_directory.to_str().unwrap());
// On Linux, the order is significant, first the static libraries we built, and then
// the system libraries.
let target = cargo::target();
for lib in &self.built_libraries {
// Prefixing the libraries we built with `static=` causes linker errors on Windows.
// https://github.com/rust-skia/rust-skia/pull/354
let kind_prefix = {
if target.is_windows() {
""
} else {
"static="
}
};
cargo::add_link_lib(format!("{}{}", kind_prefix, lib));
}
cargo::add_link_libs(&self.link_libraries);
}
}
/// Orchestrates the entire build of Skia based on the arguments provided.
pub fn build(
build: &FinalBuildConfiguration,
config: &BinariesConfiguration,
ninja_command: Option<PathBuf>,
gn_command: Option<PathBuf>,
offline: bool,
) {
let python2 = &prerequisites::locate_python2_cmd();
println!("Python 2 found: {:?}", python2);
let ninja = ninja_command.unwrap_or_else(|| {
env::current_dir()
.unwrap()
.join("depot_tools")
.join(ninja::default_exe_name())
});
if !offline && !build.use_system_libraries {
println!("Synchronizing Skia dependencies");
#[cfg(feature = "binary-cache")]
crate::build_support::binary_cache::resolve_dependencies();
assert!(
Command::new(python2)
.arg("skia/tools/git-sync-deps")
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.status()
.unwrap()
.success(),
"`skia/tools/git-sync-deps` failed"
);
}
configure_skia(build, config, python2, gn_command.as_deref());
build_skia(build, config, &ninja);
}
/// Configures Skia by calling gn
pub fn configure_skia(
build: &FinalBuildConfiguration,
config: &BinariesConfiguration,
python2: &Path,
gn_command: Option<&Path>,
) {
let gn_args = build
.gn_args
.iter()
.map(|(name, value)| name.clone() + "=" + value)
.collect::<Vec<String>>()
.join(" ");
let gn_command = gn_command
.map(|p| p.to_owned())
.unwrap_or_else(|| build.skia_source_dir.join("bin").join("gn"));
println!("Skia args: {}", &gn_args);
let output = Command::new(gn_command)
.args(&[
"gen",
config.output_directory.to_str().unwrap(),
&format!("--script-executable={}", python2.to_str().unwrap()),
&format!("--args={}", gn_args),
])
.envs(env::vars())
.current_dir(&build.skia_source_dir)
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.output()
.expect("gn error");
if output.status.code() != Some(0) {
panic!("{:?}", String::from_utf8(output.stdout).unwrap());
}
}
/// Builds Skia.
///
/// This function assumes that all prerequisites are in place and that the output directory
/// contains a fully configured Skia source tree generated by gn.
pub fn build_skia(
build: &FinalBuildConfiguration,
config: &BinariesConfiguration,
ninja_command: &Path,
) {
// Libraries we explicitly want ninja to build.
let ninja_built_libraries = config
.built_libraries
.iter()
.filter(|x| *x != lib::SKIA_BINDINGS);
let ninja_status = Command::new(ninja_command)
.args(ninja_built_libraries)
.args(&["-C", config.output_directory.to_str().unwrap()])
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.status();
assert!(
ninja_status
.expect("failed to run `ninja`, does the directory depot_tools/ exist?")
.success(),
"`ninja` returned an error, please check the output for details."
);
generate_bindings(build, &config.output_directory)
}
fn generate_bindings(build: &FinalBuildConfiguration, output_directory: &Path) {
let builder = bindgen::Builder::default()
.generate_comments(false)
.layout_tests(true)
.default_enum_style(EnumVariation::Rust {
non_exhaustive: false,
})
.size_t_is_usize(true)
.parse_callbacks(Box::new(ParseCallbacks))
.raw_line("#![allow(clippy::all)]")
// https://github.com/rust-lang/rust-bindgen/issues/1651
.raw_line("#![allow(unknown_lints)]")
.raw_line("#![allow(deref_nullptr)]")
// GrVkBackendContext contains u128 fields on macOS
.raw_line("#![allow(improper_ctypes)]")
.allowlist_function("C_.*")
.constified_enum(".*Mask")
.constified_enum(".*Flags")
.constified_enum(".*Bits")
.constified_enum("SkCanvas_SaveLayerFlagsSet")
.constified_enum("GrVkAlloc_Flag")
.constified_enum("GrGLBackendState")
// not used:
.blocklist_type("SkPathRef_Editor")
.blocklist_function("SkPathRef_Editor_Editor")
// private types that pull in inline functions that cannot be linked:
// https://github.com/rust-skia/rust-skia/issues/318
.raw_line("pub enum GrContext_Base {}")
.blocklist_type("GrContext_Base")
.blocklist_function("GrContext_Base_.*")
.raw_line("pub enum GrImageContext {}")
.blocklist_type("GrImageContext")
.raw_line("pub enum GrImageContextPriv {}")
.blocklist_type("GrImageContextPriv")
.raw_line("pub enum GrContextThreadSafeProxy {}")
.blocklist_type("GrContextThreadSafeProxy")
.blocklist_type("GrContextThreadSafeProxyPriv")
.raw_line("pub enum GrContextThreadSafeProxyPriv {}")
.blocklist_type("GrRecordingContextPriv")
.raw_line("pub enum GrRecordingContextPriv {}")
.blocklist_function("GrRecordingContext_priv.*")
.blocklist_function("GrDirectContext_priv.*")
.blocklist_type("GrContextPriv")
.raw_line("pub enum GrContextPriv {}")
.blocklist_function("GrContext_priv.*")
.blocklist_function("SkDeferredDisplayList_priv.*")
.raw_line("pub enum SkVerticesPriv {}")
.blocklist_type("SkVerticesPriv")
.blocklist_function("SkVertices_priv.*")
.blocklist_function("std::bitset_flip.*")
// Vulkan reexports that got swallowed by making them opaque.
// (these can not be allowlisted by a extern "C" function)
.allowlist_type("VkPhysicalDeviceFeatures")
.allowlist_type("VkPhysicalDeviceFeatures2").
// m91: These functions are not actually implemented.
blocklist_function("SkCustomTypefaceBuilder_setGlyph[123].*")
// misc
.allowlist_var("SK_Color.*")
.allowlist_var("kAll_GrBackendState")
.use_core()
.clang_arg("-std=c++17")
.clang_args(&["-x", "c++"])
.clang_arg("-v");
// don't generate destructors on Windows: https://github.com/rust-skia/rust-skia/issues/318
let mut builder = if cfg!(target_os = "windows") {
builder.with_codegen_config({
let mut config = CodegenConfig::default();
config.remove(CodegenConfig::DESTRUCTORS);
config
})
} else {
builder
};
for function in ALLOWLISTED_FUNCTIONS {
builder = builder.allowlist_function(function)
}
for opaque_type in OPAQUE_TYPES {
builder = builder.opaque_type(opaque_type)
}
for t in BLOCKLISTED_TYPES {
builder = builder.blocklist_type(t);
}
let mut cc_build = Build::new();
for source in &build.binding_sources {
cc_build.file(source);
let source = source.to_str().unwrap();
cargo::rerun_if_changed(source);
builder = builder.header(source);
}
let include_path = &build.skia_source_dir;
cargo::rerun_if_changed(include_path.join("include"));
builder = builder.clang_arg(format!("-I{}", include_path.display()));
cc_build.include(include_path);
let definitions = {
let mut definitions = Vec::new();
for ninja_file in &build.ninja_files {
let ninja_file = output_directory.join(ninja_file);
let contents = fs::read_to_string(ninja_file).unwrap();
definitions = definitions::combine(definitions, definitions::from_ninja(contents))
}
definitions::combine(definitions, build.definitions.clone())
};
// Whether GIF decoding is supported,
// is decided by BUILD.gn based on the existence of the libgifcodec directory:
if !definitions.iter().any(|(v, _)| v == "SK_USE_LIBGIFCODEC") {
cargo::warning("GIF decoding support may be missing, does the directory skia/third_party/externals/libgifcodec/ exist?")
}
for (name, value) in &definitions {
match value {
Some(value) => {
cc_build.define(name, value.as_str());
builder = builder.clang_arg(format!("-D{}={}", name, value));
}
None => {
cc_build.define(name, "");
builder = builder.clang_arg(format!("-D{}", name));
}
}
}
cc_build.cpp(true).out_dir(output_directory);
if !cfg!(windows) {
cc_build.flag("-std=c++17");
}
let target = cargo::target();
let target_str = &target.to_string();
cc_build.target(target_str);
let sdk;
let sysroot = cargo::env_var("SDKROOT");
let mut sysroot: Option<&str> = sysroot.as_ref().map(AsRef::as_ref);
let mut sysroot_flag = "--sysroot=";
match target.as_strs() {
(_, "apple", "darwin", _) => {
// macOS uses `-isysroot/path/to/sysroot`, but this doesn't appear
// to work for other targets. `--sysroot=` works for all targets,
// to my knowledge, but doesn't seem to be idiomatic for macOS
// compilation. To capture this, we allow manually setting sysroot
// on any platform, but we use `-isysroot` for OSX builds and `--sysroot`
// elsewhere. If you don't manually set the sysroot, we can automatically
// detect it, but this is only possible for macOS.
sysroot_flag = "-isysroot";
if sysroot.is_none() {
if let Some(macos_sdk) = xcode::get_sdk_path("macosx") {
sdk = macos_sdk;
sysroot = Some(
sdk.to_str()
.expect("macOS SDK path could not be converted to string"),
);
} else {
cargo::warning("failed to get macosx SDK path")
}
}
}
(arch, "linux", "android", _) | (arch, "linux", "androideabi", _) => {
for arg in android::additional_clang_args(target_str, arch) {
builder = builder.clang_arg(arg);
}
}
(arch, "apple", "ios", _) => {
for arg in ios::additional_clang_args(arch) {
builder = builder.clang_arg(arg);
}
}
_ => {}
}
if let Some(sysroot) = sysroot {
let sysroot = format!("{}{}", sysroot_flag, sysroot);
builder = builder.clang_arg(&sysroot);
cc_build.flag(&sysroot);
}
println!("COMPILING BINDINGS: {:?}", build.binding_sources);
// we add skia-bindings later on.
cc_build.cargo_metadata(false);
cc_build.compile(lib::SKIA_BINDINGS);
println!("GENERATING BINDINGS");
let bindings = builder.generate().expect("Unable to generate bindings");
let out_path = PathBuf::from("src");
bindings
.write_to_file(out_path.join("bindings.rs"))
.expect("Couldn't write bindings!");
}
const ALLOWLISTED_FUNCTIONS: &[&str] = &[
"SkAnnotateRectWithURL",
"SkAnnotateNamedDestination",
"SkAnnotateLinkToDestination",
"SkColorTypeBytesPerPixel",
"SkColorTypeIsAlwaysOpaque",
"SkColorTypeValidateAlphaType",
"SkRGBToHSV",
// this function does not allowlist (probably because of inlining):
"SkColorToHSV",
"SkHSVToColor",
"SkPreMultiplyARGB",
"SkPreMultiplyColor",
"SkBlendMode_AsCoeff",
"SkBlendMode_Name",
"SkSwapRB",
// functions for which the doc generation fails
"SkColorFilter_asComponentTable",
// pathops/
"Op",
"Simplify",
"TightBounds",
"AsWinding",
// utils/
"Sk3LookAt",
"Sk3Perspective",
"Sk3MapPts",
"SkUnitCubicInterp",
];
const OPAQUE_TYPES: &[&str] = &[
// Types for which the binding generator pulls in stuff that can not be compiled.
"SkDeferredDisplayList",
"SkDeferredDisplayList_PendingPathsMap",
// Types for which a bindgen layout is wrong causing types that contain
// fields of them to fail their layout test.
// Windows:
"std::atomic",
"std::function",
"std::unique_ptr",
"SkAutoTMalloc",
"SkTHashMap",
// Ubuntu 18 LLVM 6: all types derived from SkWeakRefCnt
"SkWeakRefCnt",
"GrContext",
"GrGLInterface",
"GrSurfaceProxy",
"Sk2DPathEffect",
"SkCornerPathEffect",
"SkDataTable",
"SkDiscretePathEffect",
"SkDrawable",
"SkLine2DPathEffect",
"SkPath2DPathEffect",
"SkPathRef_GenIDChangeListener",
"SkPicture",
"SkPixelRef",
"SkSurface",
// Types not needed (for now):
"SkDeque",
"SkDeque_Iter",
"GrGLInterface_Functions",
// SkShaper (m77) Trivial*Iterator classes create two vtable pointers.
"SkShaper_TrivialBiDiRunIterator",
"SkShaper_TrivialFontRunIterator",
"SkShaper_TrivialLanguageRunIterator",
"SkShaper_TrivialScriptRunIterator",
// skparagraph
"std::vector",
"std::u16string",
// skparagraph (m78), (layout fails on macOS and Linux, not sure why, looks like an obscure alignment problem)
"skia::textlayout::FontCollection",
// skparagraph (m79), std::map is used in LineMetrics
"std::map",
// Vulkan reexports with the wrong field naming conventions.
"VkPhysicalDeviceFeatures",
"VkPhysicalDeviceFeatures2",
// Since Rust 1.39 beta (TODO: investigate why, and re-test when 1.39 goes stable).
"GrContextOptions_PersistentCache",
"GrContextOptions_ShaderErrorHandler",
"Sk1DPathEffect",
"SkBBoxHierarchy", // vtable
"SkBBHFactory",
"SkBitmap_Allocator",
"SkBitmap_HeapAllocator",
"SkColorFilter",
"SkDeque_F2BIter",
"SkDrawLooper",
"SkDrawLooper_Context",
"SkDrawable_GpuDrawHandler",
"SkFlattenable",
"SkFontMgr",
"SkFontStyleSet",
"SkMaskFilter",
"SkPathEffect",
"SkPicture_AbortCallback",
"SkPixelRef_GenIDChangeListener",
"SkRasterHandleAllocator",
"SkRefCnt",
"SkShader",
"SkStream",
"SkStreamAsset",
"SkStreamMemory",
"SkStreamRewindable",
"SkStreamSeekable",
"SkTypeface_LocalizedStrings",
"SkWStream",
"GrVkMemoryAllocator",
"SkShaper",
"SkShaper_BiDiRunIterator",
"SkShaper_FontRunIterator",
"SkShaper_LanguageRunIterator",
"SkShaper_RunHandler",
"SkShaper_RunIterator",
"SkShaper_ScriptRunIterator",
"SkContourMeasure",
"SkDocument",
// m81: tuples:
"SkRuntimeEffect_EffectResult",
"SkRuntimeEffect_ByteCodeResult",
"SkRuntimeEffect_SpecializeResult",
// m81: derives from std::string
"SkSL::String",
"std::basic_string",
"std::basic_string_value_type",
// m81: wrong size on macOS and Linux
"SkRuntimeEffect",
"GrShaderCaps",
// more stuff we don't need that was tracked down fixing:
// https://github.com/rust-skia/rust-skia/issues/318
// referred from SkPath, but not used:
"SkPathRef",
"SkMutex",
// m82: private
"SkIDChangeListener",
// m86:
"GrRecordingContext",
"GrDirectContext",
// m87:
"GrD3DAlloc",
"GrD3DMemoryAllocator",
// m87, yuva_pixmaps
"std::tuple",
];
const BLOCKLISTED_TYPES: &[&str] = &[
// modules/skparagraph
// pulls in a std::map<>, which we treat as opaque, but bindgen creates wrong bindings for
// std::_Tree* types
"std::_Tree.*",
"std::map.*",
// debug builds:
"SkLRUCache",
"SkLRUCache_Entry",
// not used at all:
"std::vector.*",
// too much template magic:
"SkRuntimeEffect_ConstIterable.*",
// Linux LLVM9 c++17
"std::_Rb_tree.*",
// Linux LLVM9 c++17 with SKIA_DEBUG=1
"std::__cxx.*",
"std::array.*",
];
#[derive(Debug)]
struct ParseCallbacks;
impl bindgen::callbacks::ParseCallbacks for ParseCallbacks {
/// Allows to rename an enum variant, replacing `_original_variant_name`.
fn enum_variant_name(
&self,
enum_name: Option<&str>,
original_variant_name: &str,
_variant_value: bindgen::callbacks::EnumVariantValue,
) -> Option<String> {
enum_name.and_then(|enum_name| {
ENUM_TABLE
.iter()
.find(|n| n.0 == enum_name)
.map(|(_, replacer)| replacer(enum_name, original_variant_name))
})
}
}
type EnumEntry = (&'static str, fn(&str, &str) -> String);
const ENUM_TABLE: &[EnumEntry] = &[
//
// codec/
//
("DocumentStructureType", rewrite::k_xxx),
("ZeroInitialized", rewrite::k_xxx_name),
("SelectionPolicy", rewrite::k_xxx),
//
// core/ effects/
//
("SkApplyPerspectiveClip", rewrite::k_xxx),
("SkBlendMode", rewrite::k_xxx),
("SkBlendModeCoeff", rewrite::k_xxx),
("SkBlurStyle", rewrite::k_xxx_name),
("SkClipOp", rewrite::k_xxx),
("SkColorChannel", rewrite::k_xxx),
("SkCoverageMode", rewrite::k_xxx),
("SkEncodedImageFormat", rewrite::k_xxx),
("SkEncodedOrigin", rewrite::k_xxx_name),
("SkFilterQuality", rewrite::k_xxx_name),
("SkFontHinting", rewrite::k_xxx),
("SkAlphaType", rewrite::k_xxx_name),
("SkYUVColorSpace", rewrite::k_xxx_name),
("SkPathFillType", rewrite::k_xxx),
("SkPathConvexityType", rewrite::k_xxx),
("SkPathDirection", rewrite::k_xxx),
("SkPathVerb", rewrite::k_xxx),
("SkPathOp", rewrite::k_xxx_name),
("SkTileMode", rewrite::k_xxx),
// SkPaint_Style
// SkStrokeRec_Style
// SkPath1DPathEffect_Style
("Style", rewrite::k_xxx_name_opt),
// SkPaint_Cap
("Cap", rewrite::k_xxx_name),
// SkPaint_Join
("Join", rewrite::k_xxx_name),
// SkStrokeRec_InitStyle
("InitStyle", rewrite::k_xxx_name),
// SkBlurImageFilter_TileMode
// SkMatrixConvolutionImageFilter_TileMode
("TileMode", rewrite::k_xxx_name),
// SkCanvas_*
("PointMode", rewrite::k_xxx_name),
("SrcRectConstraint", rewrite::k_xxx_name),
// SkCanvas_Lattice_RectType
("RectType", rewrite::k_xxx),
// SkDisplacementMapEffect_ChannelSelectorType
("ChannelSelectorType", rewrite::k_xxx_name),
// SkDropShadowImageFilter_ShadowMode
("ShadowMode", rewrite::k_xxx_name),
// SkFont_Edging
("Edging", rewrite::k_xxx),
// SkFont_Slant
("Slant", rewrite::k_xxx_name),
// SkHighContrastConfig_InvertStyle
("InvertStyle", rewrite::k_xxx),
// SkImage_*
("BitDepth", rewrite::k_xxx),
("CachingHint", rewrite::k_xxx_name),
("CompressionType", rewrite::k_xxx),
// SkImageFilter_MapDirection
("MapDirection", rewrite::k_xxx_name),
// SkCodec_Result
// SkInterpolatorBase_Result
("Result", rewrite::k_xxx),
// SkMatrix_ScaleToFit
("ScaleToFit", rewrite::k_xxx_name),
// SkPath_*
("ArcSize", rewrite::k_xxx_name),
("AddPathMode", rewrite::k_xxx_name),
// SkRegion_Op
// TODO: remove kLastOp?
("Op", rewrite::k_xxx_name_opt),
// SkRRect_*
// TODO: remove kLastType?
// SkRuntimeEffect_Variable_Type
("Type", rewrite::k_xxx_name_opt),
("Corner", rewrite::k_xxx_name),
// SkShader_GradientType
("GradientType", rewrite::k_xxx_name),
// SkSurface_*
("ContentChangeMode", rewrite::k_xxx_name),
("BackendHandleAccess", rewrite::k_xxx_name),
// SkTextUtils_Align
("Align", rewrite::k_xxx_name),
// SkTrimPathEffect_Mode
("Mode", rewrite::k_xxx),
// SkTypeface_SerializeBehavior
("SerializeBehavior", rewrite::k_xxx),
// SkVertices_VertexMode
("VertexMode", rewrite::k_xxx_name),
// SkYUVAIndex_Index
("Index", rewrite::k_xxx_name),
// SkRuntimeEffect_Variable_Qualifier
("Qualifier", rewrite::k_xxx),
// private type that leaks through SkRuntimeEffect_Variable
("GrSLType", rewrite::k_xxx_name),
//
// gpu/
//
("GrGLStandard", rewrite::k_xxx_name),
("GrGLFormat", rewrite::k_xxx),
("GrSurfaceOrigin", rewrite::k_xxx_name),
("GrBackendApi", rewrite::k_xxx),
("GrMipmapped", rewrite::k_xxx),
("GrRenderable", rewrite::k_xxx),
("GrProtected", rewrite::k_xxx),
//
// DartTypes.h
//
("Affinity", rewrite::k_xxx),
("RectHeightStyle", rewrite::k_xxx),
("RectWidthStyle", rewrite::k_xxx),
("TextAlign", rewrite::k_xxx),
("TextDirection", rewrite::k_xxx_uppercase),
("TextBaseline", rewrite::k_xxx),
("TextHeightBehavior", rewrite::k_xxx),
("DrawOptions", rewrite::k_xxx),
//
// TextStyle.h
//
("TextDecorationStyle", rewrite::k_xxx),
("TextDecorationMode", rewrite::k_xxx),
("StyleType", rewrite::k_xxx),
("PlaceholderAlignment", rewrite::k_xxx),
//
// Vk*
//
("VkChromaLocation", rewrite::vk),
("VkFilter", rewrite::vk),
("VkFormat", rewrite::vk),
("VkImageLayout", rewrite::vk),
("VkImageTiling", rewrite::vk),
("VkSamplerYcbcrModelConversion", rewrite::vk),
("VkSamplerYcbcrRange", rewrite::vk),
("VkStructureType", rewrite::vk),
// m84: SkPath::Verb
("Verb", rewrite::k_xxx_name),
// m84: SkVertices::Attribute::Usage
("Usage", rewrite::k_xxx),
("GrSemaphoresSubmitted", rewrite::k_xxx),
("BackendSurfaceAccess", rewrite::k_xxx),
// m85
("VkSharingMode", rewrite::vk),
// m86:
("SkFilterMode", rewrite::k_xxx),
("SkMipmapMode", rewrite::k_xxx),
("Enable", rewrite::k_xxx),
("ShaderCacheStrategy", rewrite::k_xxx),
// m87:
// SkYUVAInfo_PlanarConfig
("PlanarConfig", rewrite::k_xxx),
("Siting", rewrite::k_xxx),
// SkYUVAPixmapInfo
("DataType", rewrite::k_xxx),
// m88:
// SkYUVAInfo_*
("PlaneConfig", rewrite::k_xxx),
// m89, SkImageFilters::Dither
("Dither", rewrite::k_xxx),
("SkScanlineOrder", rewrite::k_xxx_name),
];
pub(crate) mod rewrite {
use heck::ShoutySnakeCase;
use regex::Regex;
pub fn k_xxx_uppercase(name: &str, variant: &str) -> String {
k_xxx(name, variant).to_uppercase()
}
pub fn k_xxx(name: &str, variant: &str) -> String {
if let Some(stripped) = variant.strip_prefix('k') {
stripped.into()
} else {
panic!(
"Variant name '{}' of enum type '{}' is expected to start with a 'k'",
variant, name
);
}
}
pub fn _k_xxx_enum(name: &str, variant: &str) -> String {
capture(name, variant, &format!("k(.*)_{}", name))
}
pub fn k_xxx_name_opt(name: &str, variant: &str) -> String {
let suffix = &format!("_{}", name);
if variant.ends_with(suffix) {
capture(name, variant, &format!("k(.*){}", suffix))
} else {
capture(name, variant, "k(.*)")
}
}
pub fn k_xxx_name(name: &str, variant: &str) -> String {
capture(name, variant, &format!("k(.*)_{}", name))
}
pub fn vk(name: &str, variant: &str) -> String {
let prefix = name.to_shouty_snake_case();
capture(name, variant, &format!("{}_(.*)", prefix))
}
fn capture(name: &str, variant: &str, pattern: &str) -> String {
let re = Regex::new(pattern).unwrap();
re.captures(variant).unwrap_or_else(|| {
panic!(
"failed to match '{}' on enum variant '{}' of enum '{}'",
pattern, variant, name
)
})[1]
.into()
}
}
mod prerequisites {
use std::path::PathBuf;
use std::process::Command;
/// Resolves the full path
pub fn locate_python2_cmd() -> PathBuf {
const PYTHON_CMDS: [&str; 2] = ["python", "python2"];
for python in PYTHON_CMDS.as_ref() {
println!("Probing '{}'", python);
if let Some(true) = is_python_version_2(python) {
return python.into();
}
}
panic!(">>>>> Probing for Python 2 failed, please make sure that it's available in PATH, probed executables are: {:?} <<<<<", PYTHON_CMDS);
}
/// Returns true if the given python executable is python version 2.
/// or None if the executable was not found.
fn is_python_version_2(exe: impl AsRef<str>) -> Option<bool> {
Command::new(exe.as_ref())
.arg("--version")
.output()
.map(|output| {
let mut str = String::from_utf8(output.stdout).unwrap();
if str.is_empty() {
// Python2 seems to push the version to stderr.
str = String::from_utf8(output.stderr).unwrap()
}
// Don't parse version output, for example output
// might be "Python 2.7.15+"
str.starts_with("Python 2.")
})
.ok()
}
}
pub use definitions::{Definition, Definitions};
pub(crate) mod definitions {
use std::collections::HashSet;
/// A preprocessor definition.
pub type Definition = (String, Option<String>);
/// A container for a number of preprocessor definitions.
pub type Definitions = Vec<Definition>;
/// Parse a defines = line from a ninja build file.
pub fn from_ninja(ninja_file: impl AsRef<str>) -> Definitions {
let lines = ninja_file.as_ref().lines();
let defines = {
let prefix = "defines = ";
let defines = lines
.into_iter()
.find(|s| s.starts_with(prefix))
.expect("missing a line with the prefix 'defines =' in a .ninja file");
&defines[prefix.len()..]
};
let defines: Vec<&str> = {
let prefix = "-D";
defines
.split_whitespace()
.map(|d| {
if let Some(stripped) = d.strip_prefix(prefix) {
stripped
} else {
panic!("missing '-D' prefix from a definition")
}
})
.collect()
};
defines
.into_iter()
.map(|d| {
let items: Vec<&str> = d.splitn(2, '=').collect();
match items.len() {
1 => (items[0].to_string(), None),
2 => (items[0].to_string(), Some(items[1].to_string())),
_ => panic!("internal error"),
}
})
.collect()
}
pub fn combine(a: Definitions, b: Definitions) -> Definitions {
remove_duplicates(a.into_iter().chain(b.into_iter()).collect())
}
pub fn remove_duplicates(mut definitions: Definitions) -> Definitions {
let mut uniques = HashSet::new();
definitions.retain(|e| uniques.insert(e.0.clone()));
definitions
}
}
mod ninja {
use std::path::PathBuf;
pub fn default_exe_name() -> PathBuf {
if cfg!(windows) { "ninja.exe" } else { "ninja" }.into()
}
}
| 34.272095 | 147 | 0.562306 |
fbcc7fdaefe474a59bc9c6790d034332bb2be371 | 1,909 | use std::error::Error;
use std::fs::File;
use std::io::{BufWriter, Write};
use crate::writer::writer::Writer;
use rbf::record::{AsciiMode, Record, UTF8Mode};
pub struct TextWriter {
last_recname: String,
buffer: BufWriter<File>,
}
impl Writer for TextWriter {
fn new(input_file: &str) -> TextWriter {
// build output file name
let output_file = input_file.to_owned() + ".txt";
// open file for reading
let file = match File::create(&output_file) {
// if ok, create a new BufReader to read the file line by line
Ok(f) => f,
// The `description` method of `io::Error` returns a string that
// describes the error
Err(why) => panic!("couldn't open {}: {}", output_file, why.description()),
};
let buffer = BufWriter::new(file);
TextWriter {
last_recname: String::new(),
buffer: buffer,
}
}
fn close(&self) {}
#[allow(unused_must_use)]
fn write(&mut self, rec: &Record<AsciiMode>) {
// build header from field names only if not the same record than before
if self.last_recname != rec.name {
self.buffer.write(b"\n");
let header: Vec<_> = rec
.flist
.iter()
.map(|f| format!("{:length$} ", f.name, length = f.cell_size))
.collect();
self.buffer.write(&header.join("|").as_bytes());
self.buffer.write(b"\n");
// last rec name is now current
self.last_recname = rec.name.clone();
}
// now data
let data: Vec<_> = rec
.flist
.iter()
.map(|f| format!("{:length$} ", f.value(), length = f.cell_size))
.collect();
self.buffer.write(&data.join("|").as_bytes());
self.buffer.write(b"\n");
}
}
| 30.301587 | 87 | 0.527501 |
14c8a1a37a8fb31467e0b5b86a2a4c573b1b0f2d | 722 | use surf::post;
use crate::utils::*;
#[async_std::test]
async fn should_template_request_text_body() {
let srv = given("resp/template/body/text");
post(&srv.url()).body("Lorem ipsum").await.unwrap()
.assert_ok()
.assert_body_text("Lorem ipsum")
.assert_content_type_text();
post(&srv.url()).body("Ipsum lorem").await.unwrap()
.assert_ok()
.assert_body_text("Ipsum lorem")
.assert_content_type_text();
}
#[async_std::test]
async fn should_not_template_request_text_body_when_missing() {
let srv = given("resp/template/body/text");
post(&srv.url()).await.unwrap()
.assert_ok()
.assert_body_text("")
.assert_content_type_text();
} | 28.88 | 63 | 0.646814 |
11177700c17000e808bc3f6c23bb3c5463e0dd69 | 17,165 | // Copyright 2017 TiKV Project Authors. Licensed under Apache-2.0.
use super::{Error, EvalContext, Expression, Result, ScalarFunc};
use crate::codec::mysql::json::{parse_json_path_expr, ModifyType, PathExpression};
use crate::codec::mysql::Json;
use crate::codec::Datum;
use std::borrow::Cow;
use std::collections::BTreeMap;
impl ScalarFunc {
#[inline]
pub fn json_depth<'a, 'b: 'a>(
&'b self,
ctx: &mut EvalContext,
row: &'a [Datum],
) -> Result<Option<i64>> {
let j = try_opt!(self.children[0].eval_json(ctx, row));
Ok(Some(j.depth()))
}
#[inline]
pub fn json_type<'a, 'b: 'a>(
&'b self,
ctx: &mut EvalContext,
row: &'a [Datum],
) -> Result<Option<Cow<'a, [u8]>>> {
let j = try_opt!(self.children[0].eval_json(ctx, row));
Ok(Some(Cow::Borrowed(j.json_type())))
}
#[inline]
pub fn json_unquote<'a, 'b: 'a>(
&'b self,
ctx: &mut EvalContext,
row: &'a [Datum],
) -> Result<Option<Cow<'a, [u8]>>> {
let j = try_opt!(self.children[0].eval_json(ctx, row));
j.unquote()
.map_err(Error::from)
.map(|s| Some(Cow::Owned(s.into_bytes())))
}
pub fn json_array<'a, 'b: 'a>(
&'b self,
ctx: &mut EvalContext,
row: &'a [Datum],
) -> Result<Option<Cow<'a, Json>>> {
let parser = JsonFuncArgsParser::new(row);
let elems = try_opt!(self
.children
.iter()
.map(|e| parser.get_json(ctx, e))
.collect());
Ok(Some(Cow::Owned(Json::Array(elems))))
}
pub fn json_object<'a, 'b: 'a>(
&'b self,
ctx: &mut EvalContext,
row: &'a [Datum],
) -> Result<Option<Cow<'a, Json>>> {
let mut pairs = BTreeMap::new();
let parser = JsonFuncArgsParser::new(row);
for chunk in self.children.chunks(2) {
let key = try_opt!(chunk[0].eval_string_and_decode(ctx, row)).into_owned();
let val = try_opt!(parser.get_json(ctx, &chunk[1]));
pairs.insert(key, val);
}
Ok(Some(Cow::Owned(Json::Object(pairs))))
}
pub fn json_extract<'a, 'b: 'a>(
&'b self,
ctx: &mut EvalContext,
row: &'a [Datum],
) -> Result<Option<Cow<'a, Json>>> {
// TODO: We can cache the PathExpressions if children are Constant.
let j = try_opt!(self.children[0].eval_json(ctx, row));
let parser = JsonFuncArgsParser::new(row);
let path_exprs: Vec<_> = try_opt!(parser.get_path_exprs(ctx, &self.children[1..]));
Ok(j.extract(&path_exprs).map(Cow::Owned))
}
#[inline]
pub fn json_set<'a, 'b: 'a>(
&'b self,
ctx: &mut EvalContext,
row: &'a [Datum],
) -> Result<Option<Cow<'a, Json>>> {
self.json_modify(ctx, row, ModifyType::Set)
}
#[inline]
pub fn json_insert<'a, 'b: 'a>(
&'b self,
ctx: &mut EvalContext,
row: &'a [Datum],
) -> Result<Option<Cow<'a, Json>>> {
self.json_modify(ctx, row, ModifyType::Insert)
}
#[inline]
pub fn json_replace<'a, 'b: 'a>(
&'b self,
ctx: &mut EvalContext,
row: &'a [Datum],
) -> Result<Option<Cow<'a, Json>>> {
self.json_modify(ctx, row, ModifyType::Replace)
}
pub fn json_remove<'a, 'b: 'a>(
&'b self,
ctx: &mut EvalContext,
row: &'a [Datum],
) -> Result<Option<Cow<'a, Json>>> {
let mut j = try_opt!(self.children[0].eval_json(ctx, row)).into_owned();
let parser = JsonFuncArgsParser::new(row);
let path_exprs: Vec<_> = try_opt!(parser.get_path_exprs(ctx, &self.children[1..]));
j.remove(&path_exprs)
.map(|_| Some(Cow::Owned(j)))
.map_err(Error::from)
}
pub fn json_merge<'a, 'b: 'a>(
&'b self,
ctx: &mut EvalContext,
row: &'a [Datum],
) -> Result<Option<Cow<'a, Json>>> {
let parser = JsonFuncArgsParser::new(row);
let mut head = try_opt!(self.children[0].eval_json(ctx, row)).into_owned();
for e in &self.children[1..] {
let suffix = try_opt!(parser.get_json_not_none(ctx, e));
head = head.merge(suffix);
}
Ok(Some(Cow::Owned(head)))
}
fn json_modify<'a, 'b: 'a>(
&'b self,
ctx: &mut EvalContext,
row: &'a [Datum],
mt: ModifyType,
) -> Result<Option<Cow<'a, Json>>> {
let mut j = try_opt!(self.children[0].eval_json(ctx, row)).into_owned();
let parser = JsonFuncArgsParser::new(row);
let mut path_exprs = Vec::with_capacity(self.children.len() / 2);
let mut values = Vec::with_capacity(self.children.len() / 2);
for chunk in self.children[1..].chunks(2) {
path_exprs.push(try_opt!(parser.get_path_expr(ctx, &chunk[0])));
values.push(try_opt!(parser.get_json(ctx, &chunk[1])));
}
j.modify(&path_exprs, values, mt)
.map(|_| Some(Cow::Owned(j)))
.map_err(Error::from)
}
}
struct JsonFuncArgsParser<'a> {
row: &'a [Datum],
}
impl<'a> JsonFuncArgsParser<'a> {
#[inline]
fn new(row: &'a [Datum]) -> Self {
JsonFuncArgsParser { row }
}
fn get_path_expr(
&self,
ctx: &mut EvalContext,
e: &Expression,
) -> Result<Option<PathExpression>> {
let s = try_opt!(e.eval_string_and_decode(ctx, self.row));
let expr = parse_json_path_expr(&s)?;
Ok(Some(expr))
}
fn get_path_exprs(
&self,
ctx: &mut EvalContext,
es: &[Expression],
) -> Result<Option<Vec<PathExpression>>> {
es.iter().map(|e| self.get_path_expr(ctx, e)).collect()
}
fn get_json(&self, ctx: &mut EvalContext, e: &Expression) -> Result<Option<Json>> {
let j = e
.eval_json(ctx, self.row)?
.map_or(Json::None, Cow::into_owned);
Ok(Some(j))
}
fn get_json_not_none(&self, ctx: &mut EvalContext, e: &Expression) -> Result<Option<Json>> {
let j = try_opt!(e.eval_json(ctx, self.row)).into_owned();
Ok(Some(j))
}
}
#[cfg(test)]
mod tests {
use crate::codec::mysql::Json;
use crate::codec::Datum;
use crate::expr::tests::{datum_expr, make_null_datums, scalar_func_expr};
use crate::expr::{EvalContext, Expression};
use tipb::ScalarFuncSig;
#[test]
fn test_json_depth() {
let cases = vec![
(None, None),
(Some("null"), Some(1)),
(Some("[true, 2017]"), Some(2)),
(Some(r#"{"a": {"a1": [3]}, "b": {"b1": {"c": {"d": [5]}}}}"#), Some(6)),
(Some("{}"), Some(1)),
(Some("[]"), Some(1)),
(Some("true"), Some(1)),
(Some("1"), Some(1)),
(Some("-1"), Some(1)),
(Some(r#""a""#), Some(1)),
(Some(r#"[10, 20]"#), Some(2)),
(Some(r#"[[], {}]"#),Some(2) ),
(Some(r#"[10, {"a": 20}]"#), Some(3)),
(Some(r#"[[2], 3, [[[4]]]]"#), Some(5)),
(Some(r#"{"Name": "Homer"}"#), Some(2)),
(Some(r#"[10, {"a": 20}]"#), Some(3)),
(Some(r#"{"Person": {"Name": "Homer", "Age": 39, "Hobbies": ["Eating", "Sleeping"]} }"#), Some(4)),
(Some(r#"{"a":1}"#), Some(2)),
(Some(r#"{"a":[1]}"#), Some(3)),
(Some(r#"{"b":2, "c":3}"#), Some(2)),
(Some(r#"[1]"#), Some(2)),
(Some(r#"[1,2]"#), Some(2)),
(Some(r#"[1,2,[1,3]]"#), Some(3)),
(Some(r#"[1,2,[1,[5,[3]]]]"#), Some(5)),
(Some(r#"[1,2,[1,[5,{"a":[2,3]}]]]"#), Some(6)),
(Some(r#"[{"a":1}]"#), Some(3)),
(Some(r#"[{"a":1,"b":2}]"#), Some(3)),
(Some(r#"[{"a":{"a":1},"b":2}]"#), Some(4)),
];
let mut ctx = EvalContext::default();
for (input, exp) in cases {
let input = match input {
None => Datum::Null,
Some(s) => Datum::Json(s.parse().unwrap()),
};
let exp = match exp {
None => Datum::Null,
Some(s) => Datum::I64(s.to_owned()),
};
let arg = datum_expr(input);
let op = scalar_func_expr(ScalarFuncSig::JsonDepthSig, &[arg]);
let op = Expression::build(&mut ctx, op).unwrap();
let got = op.eval(&mut ctx, &[]).unwrap();
assert_eq!(got, exp);
}
}
#[test]
fn test_json_type() {
let cases = vec![
(None, None),
(Some(r#"true"#), Some("BOOLEAN")),
(Some(r#"null"#), Some("NULL")),
(Some(r#"-3"#), Some("INTEGER")),
(Some(r#"3"#), Some("INTEGER")),
(Some(r#"3.14"#), Some("DOUBLE")),
(Some(r#"9223372036854775808"#), Some("DOUBLE")),
(Some(r#"[1, 2, 3]"#), Some("ARRAY")),
(Some(r#"{"name": 123}"#), Some("OBJECT")),
];
let mut ctx = EvalContext::default();
for (input, exp) in cases {
let input = match input {
None => Datum::Null,
Some(s) => Datum::Json(s.parse().unwrap()),
};
let exp = match exp {
None => Datum::Null,
Some(s) => Datum::Bytes(s.to_owned().into_bytes()),
};
let arg = datum_expr(input);
let op = scalar_func_expr(ScalarFuncSig::JsonTypeSig, &[arg]);
let op = Expression::build(&mut ctx, op).unwrap();
let got = op.eval(&mut ctx, &[]).unwrap();
assert_eq!(got, exp);
}
}
#[test]
fn test_json_unquote() {
let cases = vec![
(None, false, None),
(Some(r"a"), false, Some("a")),
(Some(r#""3""#), false, Some(r#""3""#)),
(Some(r#""3""#), true, Some(r#"3"#)),
(Some(r#"{"a": "b"}"#), false, Some(r#"{"a": "b"}"#)),
(Some(r#"{"a": "b"}"#), true, Some(r#"{"a":"b"}"#)),
(
Some(r#"hello,\"quoted string\",world"#),
false,
Some(r#"hello,"quoted string",world"#),
),
];
let mut ctx = EvalContext::default();
for (input, parse, exp) in cases {
let input = match input {
None => Datum::Null,
Some(s) => {
if parse {
Datum::Json(s.parse().unwrap())
} else {
Datum::Json(Json::String(s.to_owned()))
}
}
};
let exp = match exp {
None => Datum::Null,
Some(s) => Datum::Bytes(s.to_owned().into_bytes()),
};
let arg = datum_expr(input);
let op = scalar_func_expr(ScalarFuncSig::JsonUnquoteSig, &[arg]);
let op = Expression::build(&mut ctx, op).unwrap();
let got = op.eval(&mut ctx, &[]).unwrap();
assert_eq!(got, exp);
}
}
#[test]
fn test_json_object() {
let cases = vec![
(vec![], Datum::Json(r#"{}"#.parse().unwrap())),
(
vec![Datum::Bytes(b"1".to_vec()), Datum::Null],
Datum::Json(r#"{"1":null}"#.parse().unwrap()),
),
(
vec![
Datum::Bytes(b"1".to_vec()),
Datum::Null,
Datum::Bytes(b"2".to_vec()),
Datum::Json(Json::String("sdf".to_owned())),
Datum::Bytes(b"k1".to_vec()),
Datum::Json(Json::String("v1".to_owned())),
],
Datum::Json(r#"{"1":null,"2":"sdf","k1":"v1"}"#.parse().unwrap()),
),
];
let mut ctx = EvalContext::default();
for (inputs, exp) in cases {
let args = inputs.into_iter().map(datum_expr).collect::<Vec<_>>();
let op = scalar_func_expr(ScalarFuncSig::JsonObjectSig, &args);
let op = Expression::build(&mut ctx, op).unwrap();
let got = op.eval(&mut ctx, &[]).unwrap();
assert_eq!(got, exp);
}
}
#[test]
fn test_json_array() {
let cases = vec![
(vec![], Datum::Json(r#"[]"#.parse().unwrap())),
(
vec![Datum::Json("1".parse().unwrap()), Datum::Null],
Datum::Json(r#"[1, null]"#.parse().unwrap()),
),
(
vec![
Datum::Json("1".parse().unwrap()),
Datum::Null,
Datum::Json("2".parse().unwrap()),
Datum::Json(Json::String("sdf".to_owned())),
Datum::Json(Json::String("k1".to_owned())),
Datum::Json(Json::String("v1".to_owned())),
],
Datum::Json(r#"[1, null, 2, "sdf", "k1", "v1"]"#.parse().unwrap()),
),
];
let mut ctx = EvalContext::default();
for (inputs, exp) in cases {
let args = inputs.into_iter().map(datum_expr).collect::<Vec<_>>();
let op = scalar_func_expr(ScalarFuncSig::JsonArraySig, &args);
let op = Expression::build(&mut ctx, op).unwrap();
let got = op.eval(&mut ctx, &[]).unwrap();
assert_eq!(got, exp);
}
}
#[test]
fn test_json_modify() {
let cases = vec![
(
ScalarFuncSig::JsonSetSig,
vec![Datum::Null, Datum::Null, Datum::Null],
Datum::Null,
),
(
ScalarFuncSig::JsonSetSig,
vec![
Datum::Json(Json::I64(9)),
Datum::Bytes(b"$[1]".to_vec()),
Datum::Json(Json::U64(3)),
],
Datum::Json(r#"[9,3]"#.parse().unwrap()),
),
(
ScalarFuncSig::JsonInsertSig,
vec![
Datum::Json(Json::I64(9)),
Datum::Bytes(b"$[1]".to_vec()),
Datum::Json(Json::U64(3)),
],
Datum::Json(r#"[9,3]"#.parse().unwrap()),
),
(
ScalarFuncSig::JsonReplaceSig,
vec![
Datum::Json(Json::I64(9)),
Datum::Bytes(b"$[1]".to_vec()),
Datum::Json(Json::U64(3)),
],
Datum::Json(r#"9"#.parse().unwrap()),
),
(
ScalarFuncSig::JsonSetSig,
vec![
Datum::Json(r#"{"a":"x"}"#.parse().unwrap()),
Datum::Bytes(b"$.a".to_vec()),
Datum::Null,
],
Datum::Json(r#"{"a":null}"#.parse().unwrap()),
),
];
let mut ctx = EvalContext::default();
for (sig, inputs, exp) in cases {
let args: Vec<_> = inputs.into_iter().map(datum_expr).collect();
let op = scalar_func_expr(sig, &args);
let op = Expression::build(&mut ctx, op).unwrap();
let got = op.eval(&mut ctx, &[]).unwrap();
assert_eq!(got, exp);
}
}
#[test]
fn test_json_merge() {
let cases = vec![
(vec![Datum::Null, Datum::Null], Datum::Null),
(
vec![
Datum::Json("{}".parse().unwrap()),
Datum::Json("[]".parse().unwrap()),
],
Datum::Json("[{}]".parse().unwrap()),
),
(
vec![
Datum::Json("{}".parse().unwrap()),
Datum::Json("[]".parse().unwrap()),
Datum::Json("3".parse().unwrap()),
Datum::Json(r#""4""#.parse().unwrap()),
],
Datum::Json(r#"[{}, 3, "4"]"#.parse().unwrap()),
),
];
let mut ctx = EvalContext::default();
for (inputs, exp) in cases {
let args: Vec<_> = inputs.into_iter().map(datum_expr).collect();
let op = scalar_func_expr(ScalarFuncSig::JsonMergeSig, &args);
let op = Expression::build(&mut ctx, op).unwrap();
let got = op.eval(&mut ctx, &[]).unwrap();
assert_eq!(got, exp);
}
}
#[test]
fn test_json_invalid_arguments() {
let cases = vec![
(ScalarFuncSig::JsonObjectSig, make_null_datums(3)),
(ScalarFuncSig::JsonSetSig, make_null_datums(4)),
(ScalarFuncSig::JsonInsertSig, make_null_datums(6)),
(ScalarFuncSig::JsonReplaceSig, make_null_datums(8)),
];
let mut ctx = EvalContext::default();
for (sig, args) in cases {
let args: Vec<_> = args.into_iter().map(datum_expr).collect();
let op = Expression::build(&mut ctx, scalar_func_expr(sig, &args));
assert!(op.is_err());
}
}
}
| 34.959267 | 111 | 0.454937 |
21c76bc2036a8474d1de849fb4d2a2bddd591757 | 19,937 | use std::collections::{HashMap, HashSet};
use std::path::{Path, PathBuf};
use slotmap::{DefaultKey, DenseSlotMap};
use crate::content::{Page, Section};
use crate::sorting::{
find_siblings, sort_pages_by_date, sort_pages_by_title, sort_pages_by_weight,
};
use config::Config;
use front_matter::{PageFrontMatter, SortBy};
// Like vec! but for HashSet
macro_rules! set {
( $( $x:expr ),* ) => {
{
let mut s = HashSet::new();
$(
s.insert($x);
)*
s
}
};
}
/// Houses everything about pages and sections
/// Think of it as a database where each page and section has an id (Key here)
/// that can be used to find the actual value
/// Sections and pages can then refer to other elements by those keys, which are very cheap to
/// copy.
/// We can assume the keys are always existing as removing a page/section deletes all references
/// to that key.
#[derive(Debug)]
pub struct Library {
/// All the pages of the site
pages: DenseSlotMap<DefaultKey, Page>,
/// All the sections of the site
sections: DenseSlotMap<DefaultKey, Section>,
/// A mapping path -> key for pages so we can easily get their key
pub paths_to_pages: HashMap<PathBuf, DefaultKey>,
/// A mapping path -> key for sections so we can easily get their key
pub paths_to_sections: HashMap<PathBuf, DefaultKey>,
/// Whether we need to look for translations
is_multilingual: bool,
// aliases -> files,
// so we can easily check for conflicts
pub reverse_aliases: HashMap<String, HashSet<String>>,
pub translations: HashMap<PathBuf, HashSet<DefaultKey>>,
}
impl Library {
pub fn new(cap_pages: usize, cap_sections: usize, is_multilingual: bool) -> Self {
Library {
pages: DenseSlotMap::with_capacity(cap_pages),
sections: DenseSlotMap::with_capacity(cap_sections),
paths_to_pages: HashMap::with_capacity(cap_pages),
paths_to_sections: HashMap::with_capacity(cap_sections),
is_multilingual,
reverse_aliases: HashMap::new(),
translations: HashMap::new(),
}
}
fn insert_reverse_aliases(&mut self, entries: Vec<String>, file_rel_path: &str) {
for entry in entries {
self.reverse_aliases
.entry(entry)
.and_modify(|s| {
s.insert(file_rel_path.to_owned());
})
.or_insert_with(|| {
let mut s = HashSet::new();
s.insert(file_rel_path.to_owned());
s
});
}
}
/// Add a section and return its Key
pub fn insert_section(&mut self, section: Section) -> DefaultKey {
let file_path = section.file.path.clone();
let rel_path = section.path.clone();
let mut entries = vec![rel_path];
entries.extend(section.meta.aliases.to_vec());
self.insert_reverse_aliases(entries, §ion.file.relative);
let key = self.sections.insert(section);
self.paths_to_sections.insert(file_path, key);
key
}
/// Add a page and return its Key
pub fn insert_page(&mut self, page: Page) -> DefaultKey {
let file_path = page.file.path.clone();
let rel_path = page.path.clone();
let mut entries = vec![rel_path];
entries.extend(page.meta.aliases.to_vec());
self.insert_reverse_aliases(entries, &page.file.relative);
let key = self.pages.insert(page);
self.paths_to_pages.insert(file_path, key);
key
}
pub fn pages(&self) -> &DenseSlotMap<DefaultKey, Page> {
&self.pages
}
pub fn pages_mut(&mut self) -> &mut DenseSlotMap<DefaultKey, Page> {
&mut self.pages
}
pub fn pages_values(&self) -> Vec<&Page> {
self.pages.values().collect::<Vec<_>>()
}
pub fn sections(&self) -> &DenseSlotMap<DefaultKey, Section> {
&self.sections
}
pub fn sections_mut(&mut self) -> &mut DenseSlotMap<DefaultKey, Section> {
&mut self.sections
}
pub fn sections_values(&self) -> Vec<&Section> {
self.sections.values().collect::<Vec<_>>()
}
/// Find out the direct subsections of each subsection if there are some
/// as well as the pages for each section
pub fn populate_sections(&mut self, config: &Config) {
let root_path =
self.sections.values().find(|s| s.is_index()).map(|s| s.file.parent.clone()).unwrap();
// We are going to get both the ancestors and grandparents for each section in one go
let mut ancestors: HashMap<PathBuf, Vec<_>> = HashMap::new();
let mut subsections: HashMap<PathBuf, Vec<_>> = HashMap::new();
let mut includes: HashMap<DefaultKey, Vec<_>> = HashMap::new();
for (key, section) in self.sections.iter_mut() {
// Make sure the pages of a section are empty since we can call that many times on `serve`
section.pages = vec![];
section.ignored_pages = vec![];
if let Some(ref grand_parent) = section.file.grand_parent {
subsections
// Using the original filename to work for multi-lingual sections
.entry(grand_parent.join(§ion.file.filename))
.or_insert_with(Vec::new)
.push(section.file.path.clone());
}
includes.insert(key, section.meta.include.clone());
// populate translations if necessary
if self.is_multilingual {
self.translations
.entry(section.file.canonical.clone())
.and_modify(|trans| {
trans.insert(key);
})
.or_insert(set![key]);
};
// Index has no ancestors, no need to go through it
if section.is_index() {
ancestors.insert(section.file.path.clone(), vec![]);
continue;
}
let mut path = root_path.clone();
let root_key = self.paths_to_sections[&root_path.join(§ion.file.filename)];
// Index section is the first ancestor of every single section
let mut parents = vec![root_key];
for component in §ion.file.components {
path = path.join(component);
// Skip itself
if path == section.file.parent {
continue;
}
if let Some(section_key) =
self.paths_to_sections.get(&path.join(§ion.file.filename))
{
parents.push(*section_key);
}
}
ancestors.insert(section.file.path.clone(), parents);
}
for (key, page) in &mut self.pages {
let parent_filename = if page.lang != config.default_language {
format!("_index.{}.md", page.lang)
} else {
"_index.md".to_string()
};
let mut parent_section_path = page.file.parent.join(&parent_filename);
while let Some(section_key) = self.paths_to_sections.get(&parent_section_path) {
let parent_is_transparent;
// We need to get a reference to a section later so keep the scope of borrowing small
{
let section = self.sections.get_mut(*section_key).unwrap();
section.pages.push(key);
parent_is_transparent = section.meta.transparent;
}
page.ancestors =
ancestors.get(&parent_section_path).cloned().unwrap_or_else(Vec::new);
// Don't forget to push the actual parent
page.ancestors.push(*section_key);
// Find the page template if one of a parent has page_template set
// Stops after the first one found, keep in mind page.ancestors
// is [index, ..., parent] so we need to reverse it first
if page.meta.template.is_none() {
for ancestor in page.ancestors.iter().rev() {
let s = self.sections.get(*ancestor).unwrap();
if s.meta.page_template.is_some() {
page.meta.template = s.meta.page_template.clone();
break;
}
}
}
if !parent_is_transparent {
break;
}
// We've added `_index(.{LANG})?.md` so if we are here so we need to go up twice
match parent_section_path.clone().parent().unwrap().parent() {
Some(parent) => parent_section_path = parent.join(&parent_filename),
None => break,
}
}
// populate translations if necessary
if self.is_multilingual {
self.translations
.entry(page.file.canonical.clone())
.and_modify(|trans| {
trans.insert(key);
})
.or_insert(set![key]);
};
}
for (key, inc_paths) in includes.into_iter() {
let mut added_pages = Vec::new();
for path in inc_paths {
if let Some(inc_section_key) =
self.paths_to_sections.get(&root_path.join(&path).join("_index.md"))
{
let inc_section = self.sections.get_mut(*inc_section_key).unwrap();
added_pages.extend_from_slice(&inc_section.pages);
inc_section.includers.push(key.clone());
}
}
let section = self.sections.get_mut(key).unwrap();
section.pages.extend_from_slice(&added_pages);
}
self.sort_sections_pages();
let sections = self.paths_to_sections.clone();
let mut sections_weight = HashMap::new();
for (key, section) in &self.sections {
sections_weight.insert(key, section.meta.weight);
}
for section in self.sections.values_mut() {
if let Some(children) = subsections.get(§ion.file.path) {
let mut children: Vec<_> = children.iter().map(|p| sections[p]).collect();
children.sort_by(|a, b| sections_weight[a].cmp(§ions_weight[b]));
section.subsections = children;
}
section.ancestors = ancestors.get(§ion.file.path).cloned().unwrap_or_else(Vec::new);
}
}
/// Sort all sections pages according to sorting method given
/// Pages that cannot be sorted are set to the section.ignored_pages instead
pub fn sort_sections_pages(&mut self) {
fn get_data<'a, T>(
section: &'a Section,
pages: &'a DenseSlotMap<DefaultKey, Page>,
field: impl Fn(&'a PageFrontMatter) -> Option<T>,
) -> Vec<(&'a DefaultKey, Option<T>, &'a str)> {
section
.pages
.iter()
.map(|k| {
if let Some(page) = pages.get(*k) {
(k, field(&page.meta), page.permalink.as_ref())
} else {
unreachable!("Sorting got an unknown page")
}
})
.collect()
}
let mut updates = HashMap::new();
for (key, section) in &self.sections {
let (sorted_pages, cannot_be_sorted_pages) = match section.meta.sort_by {
SortBy::None => continue,
SortBy::Date => {
let data = get_data(section, &self.pages, |meta| meta.datetime);
sort_pages_by_date(data)
}
SortBy::UpdateDate => {
let data = get_data(section, &self.pages, |meta| {
std::cmp::max(meta.datetime, meta.updated_datetime)
});
sort_pages_by_date(data)
}
SortBy::Title => {
let data = get_data(section, &self.pages, |meta| meta.title.as_deref());
sort_pages_by_title(data)
}
SortBy::Weight => {
let data = get_data(section, &self.pages, |meta| meta.weight);
sort_pages_by_weight(data)
}
};
updates.insert(key, (sorted_pages, cannot_be_sorted_pages, section.meta.sort_by));
}
for (key, (sorted, cannot_be_sorted, sort_by)) in updates {
let section_is_transparent = if let Some(section) = self.sections.get(key) {
section.meta.transparent
} else {
false
};
if !section_is_transparent {
// Find sibling between sorted pages first
let with_siblings = find_siblings(&sorted);
for (k2, val1, val2) in with_siblings {
if let Some(page) = self.pages.get_mut(k2) {
match sort_by {
SortBy::Date => {
page.earlier = val2;
page.later = val1;
}
SortBy::UpdateDate => {
page.earlier_updated = val2;
page.later_updated = val1;
}
SortBy::Title => {
page.title_prev = val1;
page.title_next = val2;
}
SortBy::Weight => {
page.lighter = val1;
page.heavier = val2;
}
SortBy::None => {
unreachable!("Impossible to find siblings in SortBy::None")
}
}
} else {
unreachable!("Sorting got an unknown page")
}
}
}
if let Some(s) = self.sections.get_mut(key) {
s.pages = sorted;
s.ignored_pages = cannot_be_sorted;
}
}
}
/// Find all the orphan pages: pages that are in a folder without an `_index.md`
pub fn get_all_orphan_pages(&self) -> Vec<&Page> {
let pages_in_sections =
self.sections.values().flat_map(|s| &s.pages).collect::<HashSet<_>>();
self.pages
.iter()
.filter(|(key, _)| !pages_in_sections.contains(&key))
.map(|(_, page)| page)
.collect()
}
/// Used in integration tests
pub fn get_section_key<P: AsRef<Path>>(&self, path: P) -> Option<&DefaultKey> {
self.paths_to_sections.get(path.as_ref())
}
pub fn get_section<P: AsRef<Path>>(&self, path: P) -> Option<&Section> {
self.sections.get(self.paths_to_sections.get(path.as_ref()).cloned().unwrap_or_default())
}
/// Used in integration tests
pub fn get_section_mut<P: AsRef<Path>>(&mut self, path: P) -> Option<&mut Section> {
self.sections
.get_mut(self.paths_to_sections.get(path.as_ref()).cloned().unwrap_or_default())
}
pub fn get_section_by_key(&self, key: DefaultKey) -> &Section {
self.sections.get(key).unwrap()
}
pub fn get_section_path_by_key(&self, key: DefaultKey) -> &str {
&self.get_section_by_key(key).file.relative
}
pub fn get_page<P: AsRef<Path>>(&self, path: P) -> Option<&Page> {
self.pages.get(self.paths_to_pages.get(path.as_ref()).cloned().unwrap_or_default())
}
pub fn get_page_by_key(&self, key: DefaultKey) -> &Page {
self.pages.get(key).unwrap()
}
pub fn remove_section<P: AsRef<Path>>(&mut self, path: P) -> Option<Section> {
if let Some(k) = self.paths_to_sections.remove(path.as_ref()) {
self.sections.remove(k)
} else {
None
}
}
pub fn remove_page<P: AsRef<Path>>(&mut self, path: P) -> Option<Page> {
if let Some(k) = self.paths_to_pages.remove(path.as_ref()) {
self.pages.remove(k)
} else {
None
}
}
pub fn contains_section<P: AsRef<Path>>(&self, path: P) -> bool {
self.paths_to_sections.contains_key(path.as_ref())
}
/// This will check every section/page paths + the aliases and ensure none of them
/// are colliding.
/// Returns (path colliding, [list of files causing that collision])
pub fn check_for_path_collisions(&self) -> Vec<(String, Vec<String>)> {
self.reverse_aliases
.iter()
.filter_map(|(alias, files)| {
if files.len() > 1 {
Some((alias.clone(), files.clone().into_iter().collect::<Vec<_>>()))
} else {
None
}
})
.collect()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn can_find_no_collisions() {
let mut library = Library::new(10, 10, false);
let page = Page { path: "hello".to_string(), ..Default::default() };
let page2 = Page { path: "hello-world".to_string(), ..Default::default() };
let section = Section { path: "blog".to_string(), ..Default::default() };
library.insert_page(page);
library.insert_page(page2);
library.insert_section(section);
let collisions = library.check_for_path_collisions();
assert_eq!(collisions.len(), 0);
}
#[test]
fn can_find_collisions_between_pages() {
let mut library = Library::new(10, 10, false);
let mut page = Page { path: "hello".to_string(), ..Default::default() };
page.file.relative = "hello".to_string();
let mut page2 = Page { path: "hello".to_string(), ..Default::default() };
page2.file.relative = "hello-world".to_string();
let mut section = Section { path: "blog".to_string(), ..Default::default() };
section.file.relative = "hello-world".to_string();
library.insert_page(page.clone());
library.insert_page(page2.clone());
library.insert_section(section);
let collisions = library.check_for_path_collisions();
assert_eq!(collisions.len(), 1);
assert_eq!(collisions[0].0, page.path);
assert!(collisions[0].1.contains(&page.file.relative));
assert!(collisions[0].1.contains(&page2.file.relative));
}
#[test]
fn can_find_collisions_with_an_alias() {
let mut library = Library::new(10, 10, false);
let mut page = Page { path: "hello".to_string(), ..Default::default() };
page.file.relative = "hello".to_string();
let mut page2 = Page { path: "hello".to_string(), ..Default::default() };
page2.file.relative = "hello-world".to_string();
page2.meta.aliases = vec!["hello".to_string()];
let mut section = Section { path: "blog".to_string(), ..Default::default() };
section.file.relative = "hello-world".to_string();
library.insert_page(page.clone());
library.insert_page(page2.clone());
library.insert_section(section);
let collisions = library.check_for_path_collisions();
assert_eq!(collisions.len(), 1);
assert_eq!(collisions[0].0, page.path);
assert!(collisions[0].1.contains(&page.file.relative));
assert!(collisions[0].1.contains(&page2.file.relative));
}
}
| 38.414258 | 102 | 0.537945 |
4b0296616b4f68012c727a52ad8b30dba812be90 | 6,619 | // Copyright 2019 The xi-editor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Window building and app lifecycle.
use std::cell::RefCell;
use std::rc::Rc;
use std::sync::Arc;
use crate::kurbo::Size;
use crate::shell::window::WindowHandle;
use crate::shell::{init, runloop, Error as PlatformError, WindowBuilder};
use crate::win_handler::AppState;
use crate::window::{Window, WindowId};
use crate::{theme, AppDelegate, Data, DruidHandler, Env, LocalizedString, MenuDesc, Widget};
/// A function that modifies the initial environment.
type EnvSetupFn = dyn FnOnce(&mut Env);
/// Handles initial setup of an application, and starts the runloop.
pub struct AppLauncher<T> {
windows: Vec<WindowDesc<T>>,
env_setup: Option<Box<EnvSetupFn>>,
delegate: Option<Box<dyn AppDelegate<T>>>,
}
/// A function that can create a widget.
type WidgetBuilderFn<T> = dyn Fn() -> Box<dyn Widget<T>> + 'static;
/// A description of a window to be instantiated.
///
/// This includes a function that can build the root widget, as well as other
/// window properties such as the title.
pub struct WindowDesc<T> {
pub(crate) root_builder: Arc<WidgetBuilderFn<T>>,
pub(crate) title: Option<LocalizedString<T>>,
pub(crate) size: Option<Size>,
pub(crate) menu: Option<MenuDesc<T>>,
/// The `WindowId` that will be assigned to this window.
///
/// This can be used to track a window from when it is launched and when
/// it actually connects.
pub id: WindowId,
}
impl<T: Data + 'static> AppLauncher<T> {
/// Create a new `AppLauncher` with the provided window.
pub fn with_window(window: WindowDesc<T>) -> Self {
AppLauncher {
windows: vec![window],
env_setup: None,
delegate: None,
}
}
/// Provide an optional closure that will be given mutable access to
/// the environment before launch.
///
/// This can be used to set or override theme values.
pub fn configure_env(mut self, f: impl Fn(&mut Env) + 'static) -> Self {
self.env_setup = Some(Box::new(f));
self
}
/// Set the [`AppDelegate`].
///
/// [`AppDelegate`]: struct.AppDelegate.html
pub fn delegate(mut self, delegate: impl AppDelegate<T> + 'static) -> Self {
self.delegate = Some(Box::new(delegate));
self
}
/// Initialize a minimal logger for printing logs out to stderr.
///
/// Meant for use during development only.
pub fn use_simple_logger(self) -> Self {
simple_logger::init().ok();
self
}
/// Build the windows and start the runloop.
///
/// Returns an error if a window cannot be instantiated. This is usually
/// a fatal error.
pub fn launch(mut self, data: T) -> Result<(), PlatformError> {
init();
let mut main_loop = runloop::RunLoop::new();
let mut env = theme::init();
if let Some(f) = self.env_setup.take() {
f(&mut env);
}
let state = AppState::new(data, env, self.delegate.take());
for desc in self.windows {
let window = desc.build_native(&state)?;
window.show();
}
main_loop.run();
Ok(())
}
}
impl<T: Data + 'static> WindowDesc<T> {
/// Create a new `WindowDesc`, taking a funciton that will generate the root
/// [`Widget`] for this window.
///
/// It is possible that a `WindowDesc` can be reused to launch multiple windows.
///
/// [`Widget`]: trait.Widget.html
pub fn new<W, F>(root: F) -> WindowDesc<T>
where
W: Widget<T> + 'static,
F: Fn() -> W + 'static,
{
// wrap this closure in another closure that dyns the result
// this just makes our API slightly cleaner; callers don't need to explicitly box.
let root_builder: Arc<WidgetBuilderFn<T>> = Arc::new(move || Box::new(root()));
WindowDesc {
root_builder,
title: None,
size: None,
menu: MenuDesc::platform_default(),
id: WindowId::next(),
}
}
/// Set the title for this window. This is a [`LocalizedString`] that will
/// be kept up to date as the application's state changes.
///
/// [`LocalizedString`]: struct.LocalizedString.html
pub fn title(mut self, title: LocalizedString<T>) -> Self {
self.title = Some(title);
self
}
/// Set the window size at creation
///
/// You can pass in a tuple `(width, height)` or `kurbo::Size` e.g. to create a window 1000px wide and 500px high
/// ```ignore
/// window.window_size((1000.0, 500.0));
/// ```
pub fn window_size(mut self, size: impl Into<Size>) -> Self {
self.size = Some(size.into());
self
}
/// Attempt to create a platform window from this `WindowDesc`.
pub(crate) fn build_native(
&self,
state: &Rc<RefCell<AppState<T>>>,
) -> Result<WindowHandle, PlatformError> {
let mut title = self
.title
.clone()
.unwrap_or_else(|| LocalizedString::new("app-name"));
title.resolve(&state.borrow().data, &state.borrow().env);
let mut menu = self.menu.to_owned();
let platform_menu = menu
.as_mut()
.map(|m| m.build_window_menu(&state.borrow().data, &state.borrow().env));
let handler = DruidHandler::new_shared(state.clone(), self.id);
let mut builder = WindowBuilder::new();
builder.set_handler(Box::new(handler));
if let Some(size) = self.size {
builder.set_size(size);
}
builder.set_title(title.localized_str());
if let Some(menu) = platform_menu {
builder.set_menu(menu);
}
let root = (self.root_builder)();
state
.borrow_mut()
.add_window(self.id, Window::new(root, title, menu));
builder.build()
}
/// Set the menu for this window.
pub fn menu(mut self, menu: MenuDesc<T>) -> Self {
self.menu = Some(menu);
self
}
}
| 32.930348 | 117 | 0.609155 |
0ed8d37b6796bde3e8727397a146d9b04c5da338 | 11,119 | use cosmwasm_std::{
generic_err, log, unauthorized, Api, BankMsg, Binary, CanonicalAddr, Coin, CosmosMsg, Env,
Extern, HandleResponse, HandleResult, InitResponse, InitResult, MigrateResponse, Querier,
StdResult, Storage,
};
use crate::msg::{HandleMsg, InitMsg, MigrateMsg, QueryMsg};
use crate::state::{config, config_read, State};
pub fn init<S: Storage, A: Api, Q: Querier>(
deps: &mut Extern<S, A, Q>,
env: Env,
msg: InitMsg,
) -> InitResult {
let state = State {
arbiter: deps.api.canonical_address(&msg.arbiter)?,
recipient: deps.api.canonical_address(&msg.recipient)?,
source: env.message.sender.clone(),
end_height: msg.end_height,
end_time: msg.end_time,
};
if state.is_expired(&env) {
Err(generic_err("creating expired escrow"))
} else {
config(&mut deps.storage).save(&state)?;
Ok(InitResponse::default())
}
}
pub fn handle<S: Storage, A: Api, Q: Querier>(
deps: &mut Extern<S, A, Q>,
env: Env,
msg: HandleMsg,
) -> HandleResult {
let state = config_read(&deps.storage).load()?;
match msg {
HandleMsg::Approve { quantity } => try_approve(deps, env, state, quantity),
HandleMsg::Refund {} => try_refund(deps, env, state),
}
}
fn try_approve<S: Storage, A: Api, Q: Querier>(
deps: &mut Extern<S, A, Q>,
env: Env,
state: State,
quantity: Option<Vec<Coin>>,
) -> HandleResult {
if env.message.sender != state.arbiter {
Err(unauthorized())
} else if state.is_expired(&env) {
Err(generic_err("escrow expired"))
} else {
let amount = if let Some(quantity) = quantity {
quantity
} else {
// release everything
let contract_address_human = deps.api.human_address(&env.contract.address)?;
// Querier guarantees to returns up-to-date data, including funds sent in this handle message
// https://github.com/CosmWasm/wasmd/blob/master/x/wasm/internal/keeper/keeper.go#L185-L192
deps.querier.query_all_balances(contract_address_human)?
};
send_tokens(
&deps.api,
&env.contract.address,
&state.recipient,
amount,
"approve",
)
}
}
fn try_refund<S: Storage, A: Api, Q: Querier>(
deps: &mut Extern<S, A, Q>,
env: Env,
state: State,
) -> HandleResult {
// anyone can try to refund, as long as the contract is expired
if !state.is_expired(&env) {
Err(generic_err("escrow not yet expired"))
} else {
let contract_address_human = deps.api.human_address(&env.contract.address)?;
// Querier guarantees to returns up-to-date data, including funds sent in this handle message
// https://github.com/CosmWasm/wasmd/blob/master/x/wasm/internal/keeper/keeper.go#L185-L192
let balance = deps.querier.query_all_balances(contract_address_human)?;
send_tokens(
&deps.api,
&env.contract.address,
&state.source,
balance,
"refund",
)
}
}
// this is a helper to move the tokens, so the business logic is easy to read
fn send_tokens<A: Api>(
api: &A,
from_address: &CanonicalAddr,
to_address: &CanonicalAddr,
amount: Vec<Coin>,
action: &str,
) -> HandleResult {
let from_human = api.human_address(from_address)?;
let to_human = api.human_address(to_address)?;
let log = vec![log("action", action), log("to", to_human.as_str())];
let r = HandleResponse {
messages: vec![CosmosMsg::Bank(BankMsg::Send {
from_address: from_human,
to_address: to_human,
amount,
})],
log,
data: None,
};
Ok(r)
}
pub fn query<S: Storage, A: Api, Q: Querier>(
_deps: &Extern<S, A, Q>,
msg: QueryMsg,
) -> StdResult<Binary> {
// this always returns error
match msg {}
}
#[cfg(test)]
mod tests {
use super::*;
use cosmwasm_std::testing::{mock_dependencies, mock_env};
use cosmwasm_std::{coins, Api, HumanAddr, StdError};
fn init_msg_expire_by_height(height: u64) -> InitMsg {
InitMsg {
arbiter: HumanAddr::from("verifies"),
recipient: HumanAddr::from("benefits"),
end_height: Some(height),
end_time: None,
}
}
fn mock_env_height<A: Api>(
api: &A,
signer: &str,
sent: &[Coin],
height: u64,
time: u64,
) -> Env {
let mut env = mock_env(api, signer, sent);
env.block.height = height;
env.block.time = time;
env
}
#[test]
fn proper_initialization() {
let mut deps = mock_dependencies(20, &[]);
let msg = init_msg_expire_by_height(1000);
let env = mock_env_height(&deps.api, "creator", &coins(1000, "earth"), 876, 0);
let res = init(&mut deps, env, msg).unwrap();
assert_eq!(0, res.messages.len());
// it worked, let's query the state
let state = config_read(&mut deps.storage).load().unwrap();
assert_eq!(
state,
State {
arbiter: deps
.api
.canonical_address(&HumanAddr::from("verifies"))
.unwrap(),
recipient: deps
.api
.canonical_address(&HumanAddr::from("benefits"))
.unwrap(),
source: deps
.api
.canonical_address(&HumanAddr::from("creator"))
.unwrap(),
end_height: Some(1000),
end_time: None,
}
);
}
#[test]
fn cannot_initialize_expired() {
let mut deps = mock_dependencies(20, &[]);
let msg = init_msg_expire_by_height(1000);
let env = mock_env_height(&deps.api, "creator", &coins(1000, "earth"), 1001, 0);
let res = init(&mut deps, env, msg);
match res.unwrap_err() {
generic_err { msg, .. } => assert_eq!(msg, "creating expired escrow"),
e => panic!("unexpected error: {:?}", e),
}
}
#[test]
fn handle_approve() {
let mut deps = mock_dependencies(20, &[]);
// initialize the store
let init_amount = coins(1000, "earth");
let init_env = mock_env_height(&deps.api, "creator", &init_amount, 876, 0);
let contract_addr = deps.api.human_address(&init_env.contract.address).unwrap();
let msg = init_msg_expire_by_height(1000);
let init_res = init(&mut deps, init_env, msg).unwrap();
assert_eq!(0, init_res.messages.len());
// balance changed in init
deps.querier.update_balance(&contract_addr, init_amount);
// beneficiary cannot release it
let msg = HandleMsg::Approve { quantity: None };
let env = mock_env_height(&deps.api, "beneficiary", &[], 900, 0);
let handle_res = handle(&mut deps, env, msg.clone());
match handle_res.unwrap_err() {
StdError::Unauthorized { .. } => {}
e => panic!("unexpected error: {:?}", e),
}
// verifier cannot release it when expired
let env = mock_env_height(&deps.api, "verifies", &[], 1100, 0);
let handle_res = handle(&mut deps, env, msg.clone());
match handle_res.unwrap_err() {
generic_err { msg, .. } => assert_eq!(msg, "escrow expired"),
e => panic!("unexpected error: {:?}", e),
}
// complete release by verfier, before expiration
let env = mock_env_height(&deps.api, "verifies", &[], 999, 0);
let handle_res = handle(&mut deps, env, msg.clone()).unwrap();
assert_eq!(1, handle_res.messages.len());
let msg = handle_res.messages.get(0).expect("no message");
assert_eq!(
msg,
&CosmosMsg::Bank(BankMsg::Send {
from_address: HumanAddr::from("cosmos2contract"),
to_address: HumanAddr::from("benefits"),
amount: coins(1000, "earth"),
})
);
// partial release by verfier, before expiration
let partial_msg = HandleMsg::Approve {
quantity: Some(coins(500, "earth")),
};
let env = mock_env_height(&deps.api, "verifies", &[], 999, 0);
let handle_res = handle(&mut deps, env, partial_msg).unwrap();
assert_eq!(1, handle_res.messages.len());
let msg = handle_res.messages.get(0).expect("no message");
assert_eq!(
msg,
&CosmosMsg::Bank(BankMsg::Send {
from_address: HumanAddr::from("cosmos2contract"),
to_address: HumanAddr::from("benefits"),
amount: coins(500, "earth"),
})
);
}
#[test]
fn handle_refund() {
let mut deps = mock_dependencies(20, &[]);
// initialize the store
let init_amount = coins(1000, "earth");
let init_env = mock_env_height(&deps.api, "creator", &init_amount, 876, 0);
let contract_addr = deps.api.human_address(&init_env.contract.address).unwrap();
let msg = init_msg_expire_by_height(1000);
let init_res = init(&mut deps, init_env, msg).unwrap();
assert_eq!(0, init_res.messages.len());
// balance changed in init
deps.querier.update_balance(&contract_addr, init_amount);
// cannot release when unexpired (height < end_height)
let msg = HandleMsg::Refund {};
let env = mock_env_height(&deps.api, "anybody", &[], 800, 0);
let handle_res = handle(&mut deps, env, msg.clone());
match handle_res.unwrap_err() {
generic_err { msg, .. } => assert_eq!(msg, "escrow not yet expired"),
e => panic!("unexpected error: {:?}", e),
}
// cannot release when unexpired (height == end_height)
let msg = HandleMsg::Refund {};
let env = mock_env_height(&deps.api, "anybody", &[], 1000, 0);
let handle_res = handle(&mut deps, env, msg.clone());
match handle_res.unwrap_err() {
generic_err { msg, .. } => assert_eq!(msg, "escrow not yet expired"),
e => panic!("unexpected error: {:?}", e),
}
// anyone can release after expiration
let env = mock_env_height(&deps.api, "anybody", &[], 1001, 0);
let handle_res = handle(&mut deps, env, msg.clone()).unwrap();
assert_eq!(1, handle_res.messages.len());
let msg = handle_res.messages.get(0).expect("no message");
assert_eq!(
msg,
&CosmosMsg::Bank(BankMsg::Send {
from_address: HumanAddr::from("cosmos2contract"),
to_address: HumanAddr::from("creator"),
amount: coins(1000, "earth"),
})
);
}
}
pub fn migrate<S: Storage, A: Api, Q: Querier>(
_deps: &mut Extern<S, A, Q>,
_env: Env,
_msg: MigrateMsg,
) -> StdResult<MigrateResponse> {
Ok(MigrateResponse::default())
}
| 34.531056 | 105 | 0.567767 |
161f7b79e053f7adbfb77c41d6928413045e5bf2 | 1,939 | /*
* Copyright 2020 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use fs_utils::to_abs_path;
use libp2p::PeerId;
use std::path::PathBuf;
use std::time::Duration;
#[derive(Debug, Clone)]
pub struct VmPoolConfig {
/// Number of VMs to create
pub pool_size: usize,
/// Timeout of a particle execution
pub execution_timeout: Duration,
}
#[derive(Debug, Clone)]
pub struct VmConfig {
pub current_peer_id: PeerId,
/// Path to AIR interpreter .wasm file (aquamarine.wasm)
pub air_interpreter: PathBuf,
/// Dir for the interpreter to persist particle data
/// to merge it between particles of the same particle_id
pub particles_dir: PathBuf,
/// Dir to store directories shared between services
/// in the span of a single particle execution
pub particles_vault_dir: PathBuf,
}
impl VmPoolConfig {
pub fn new(pool_size: usize, execution_timeout: Duration) -> Self {
Self {
pool_size,
execution_timeout,
}
}
}
impl VmConfig {
pub fn new(current_peer_id: PeerId, base_dir: PathBuf, air_interpreter: PathBuf) -> Self {
let base_dir = to_abs_path(base_dir);
Self {
current_peer_id,
particles_dir: config_utils::particles_dir(&base_dir),
particles_vault_dir: config_utils::particles_vault_dir(&base_dir),
air_interpreter,
}
}
}
| 30.296875 | 94 | 0.687468 |
5deaf4fcdac8d6dcc420226fe872b53a140cff34 | 1,801 | #[test]
fn margin_bottom() {
let mut stretch = stretch::Stretch::new();
let node0 = stretch
.new_node(
stretch::style::Style {
size: stretch::geometry::Size {
height: stretch::style::Dimension::Points(10f32),
..Default::default()
},
margin: stretch::geometry::Rect {
bottom: stretch::style::Dimension::Points(10f32),
..Default::default()
},
..Default::default()
},
vec![],
)
.unwrap();
let node = stretch
.new_node(
stretch::style::Style {
flex_direction: stretch::style::FlexDirection::Column,
justify_content: stretch::style::JustifyContent::FlexEnd,
size: stretch::geometry::Size {
width: stretch::style::Dimension::Points(100f32),
height: stretch::style::Dimension::Points(100f32),
..Default::default()
},
..Default::default()
},
vec![node0],
)
.unwrap();
stretch.compute_layout(node, stretch::geometry::Size::undefined()).unwrap();
assert_eq!(stretch.layout(node).unwrap().size.width, 100f32);
assert_eq!(stretch.layout(node).unwrap().size.height, 100f32);
assert_eq!(stretch.layout(node).unwrap().location.x, 0f32);
assert_eq!(stretch.layout(node).unwrap().location.y, 0f32);
assert_eq!(stretch.layout(node0).unwrap().size.width, 100f32);
assert_eq!(stretch.layout(node0).unwrap().size.height, 10f32);
assert_eq!(stretch.layout(node0).unwrap().location.x, 0f32);
assert_eq!(stretch.layout(node0).unwrap().location.y, 80f32);
}
| 40.022222 | 80 | 0.539145 |
09852403d934ab4397ffa68189ed8ad1e8269095 | 135 | struct Foo {
field: i32
}
impl Foo {
fn foo<'a>(&self, x: &i32) -> &i32 {
x //~ ERROR lifetime mismatch
}
}
fn main() { }
| 10.384615 | 38 | 0.511111 |
0aafb5d964dd9bf8000b810613275e6c847970f9 | 31,867 | use std::marker::PhantomData;
use std::path::PathBuf;
use anyhow::{ensure, Context};
use generic_array::typenum;
use merkletree::store::{ReplicaConfig, StoreConfig};
use rayon::prelude::*;
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
use storage_proofs_core::{
drgraph::Graph,
error::Result,
fr32::bytes_into_fr_repr_safe,
hasher::{Domain, HashFunction, Hasher, PoseidonArity},
merkle::{
create_base_lcmerkle_tree, create_base_merkle_tree, BinaryLCMerkleTree, BinaryMerkleTree,
LCMerkleTree, MerkleProof, MerkleProofTrait, MerkleTreeTrait,
},
parameter_cache::ParameterSetMetadata,
proof::{NoRequirements, ProofScheme},
util::{data_at_node, data_at_node_offset, NODE_SIZE},
Data,
};
use crate::{encode, PoRep};
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
pub struct Tau<T> {
pub comm_r: T,
pub comm_d: T,
}
impl<T: Domain> Tau<T> {
pub fn new(comm_d: T, comm_r: T) -> Self {
Tau { comm_d, comm_r }
}
}
#[derive(Debug)]
pub struct ProverAux<H: Hasher> {
pub tree_d: BinaryMerkleTree<H>,
pub tree_r: BinaryLCMerkleTree<H>,
}
impl<H: Hasher> ProverAux<H> {
pub fn new(tree_d: BinaryMerkleTree<H>, tree_r: BinaryLCMerkleTree<H>) -> Self {
ProverAux { tree_d, tree_r }
}
}
#[derive(Debug, Clone)]
pub struct PublicInputs<T: Domain> {
pub replica_id: Option<T>,
pub challenges: Vec<usize>,
pub tau: Option<Tau<T>>,
}
#[derive(Debug)]
pub struct PrivateInputs<'a, H: Hasher> {
pub tree_d: &'a BinaryMerkleTree<H>,
pub tree_r: &'a BinaryLCMerkleTree<H>,
pub tree_r_config_rows_to_discard: usize,
}
#[derive(Clone, Debug)]
pub struct SetupParams {
pub drg: DrgParams,
pub private: bool,
pub challenges_count: usize,
}
#[derive(Debug, Clone)]
pub struct DrgParams {
// Number of nodes
pub nodes: usize,
// Base degree of DRG
pub degree: usize,
pub expansion_degree: usize,
pub porep_id: [u8; 32],
}
#[derive(Debug, Clone)]
pub struct PublicParams<H, G>
where
H: Hasher,
G: Graph<H> + ParameterSetMetadata,
{
pub graph: G,
pub private: bool,
pub challenges_count: usize,
_h: PhantomData<H>,
}
impl<H, G> PublicParams<H, G>
where
H: Hasher,
G: Graph<H> + ParameterSetMetadata,
{
pub fn new(graph: G, private: bool, challenges_count: usize) -> Self {
PublicParams {
graph,
private,
challenges_count,
_h: PhantomData,
}
}
}
impl<H, G> ParameterSetMetadata for PublicParams<H, G>
where
H: Hasher,
G: Graph<H> + ParameterSetMetadata,
{
fn identifier(&self) -> String {
format!(
"drgporep::PublicParams{{graph: {}}}",
self.graph.identifier(),
)
}
fn sector_size(&self) -> u64 {
self.graph.sector_size()
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DataProof<H: Hasher, U: PoseidonArity> {
#[serde(bound(
serialize = "MerkleProof<H, U>: Serialize",
deserialize = "MerkleProof<H, U>: Deserialize<'de>"
))]
pub proof: MerkleProof<H, U>,
pub data: H::Domain,
}
impl<H: Hasher, U: 'static + PoseidonArity> DataProof<H, U> {
pub fn new(n: usize) -> Self {
DataProof {
proof: MerkleProof::new(n),
data: Default::default(),
}
}
/// proves_challenge returns true if this self.proof corresponds to challenge.
/// This is useful for verifying that a supplied proof is actually relevant to a given challenge.
pub fn proves_challenge(&self, challenge: usize) -> bool {
self.proof.proves_challenge(challenge)
}
}
pub type ReplicaParents<H> = Vec<(u32, DataProof<H, typenum::U2>)>;
#[derive(Default, Debug, Clone, Serialize, Deserialize)]
pub struct Proof<H: Hasher> {
#[serde(bound(
serialize = "H::Domain: Serialize",
deserialize = "H::Domain: Deserialize<'de>"
))]
pub data_root: H::Domain,
#[serde(bound(
serialize = "H::Domain: Serialize",
deserialize = "H::Domain: Deserialize<'de>"
))]
pub replica_root: H::Domain,
#[serde(bound(
serialize = "DataProof<H, typenum::U2>: Serialize",
deserialize = "DataProof<H, typenum::U2>: Deserialize<'de>"
))]
pub replica_nodes: Vec<DataProof<H, typenum::U2>>,
#[serde(bound(
serialize = "H::Domain: Serialize",
deserialize = "H::Domain: Deserialize<'de>"
))]
pub replica_parents: Vec<ReplicaParents<H>>,
#[serde(bound(
serialize = "H::Domain: Serialize",
deserialize = "H::Domain: Deserialize<'de>"
))]
pub nodes: Vec<DataProof<H, typenum::U2>>,
}
impl<H: Hasher> Proof<H> {
pub fn new_empty(height: usize, degree: usize, challenges: usize) -> Proof<H> {
Proof {
data_root: Default::default(),
replica_root: Default::default(),
replica_nodes: vec![DataProof::new(height); challenges],
replica_parents: vec![vec![(0, DataProof::new(height)); degree]; challenges],
nodes: vec![DataProof::new(height); challenges],
}
}
pub fn new(
replica_nodes: Vec<DataProof<H, typenum::U2>>,
replica_parents: Vec<ReplicaParents<H>>,
nodes: Vec<DataProof<H, typenum::U2>>,
) -> Proof<H> {
Proof {
data_root: nodes[0].proof.root(),
replica_root: replica_nodes[0].proof.root(),
replica_nodes,
replica_parents,
nodes,
}
}
}
impl<'a, H: Hasher> From<&'a Proof<H>> for Proof<H> {
fn from(p: &Proof<H>) -> Proof<H> {
Proof {
data_root: p.nodes[0].proof.root(),
replica_root: p.replica_nodes[0].proof.root(),
replica_nodes: p.replica_nodes.clone(),
replica_parents: p.replica_parents.clone(),
nodes: p.nodes.clone(),
}
}
}
#[derive(Default)]
pub struct DrgPoRep<'a, H, G>
where
H: Hasher,
G: 'a + Graph<H>,
{
_h: PhantomData<&'a H>,
_g: PhantomData<G>,
}
impl<'a, H, G> ProofScheme<'a> for DrgPoRep<'a, H, G>
where
H: 'static + Hasher,
G: 'a + Graph<H> + ParameterSetMetadata,
{
type PublicParams = PublicParams<H, G>;
type SetupParams = SetupParams;
type PublicInputs = PublicInputs<<H as Hasher>::Domain>;
type PrivateInputs = PrivateInputs<'a, H>;
type Proof = Proof<H>;
type Requirements = NoRequirements;
fn setup(sp: &Self::SetupParams) -> Result<Self::PublicParams> {
let graph = G::new(
sp.drg.nodes,
sp.drg.degree,
sp.drg.expansion_degree,
sp.drg.porep_id,
)?;
Ok(PublicParams::new(graph, sp.private, sp.challenges_count))
}
fn prove<'b>(
pub_params: &'b Self::PublicParams,
pub_inputs: &'b Self::PublicInputs,
priv_inputs: &'b Self::PrivateInputs,
) -> Result<Self::Proof> {
let len = pub_inputs.challenges.len();
ensure!(
len <= pub_params.challenges_count,
"too many challenges {} > {}",
len,
pub_params.challenges_count
);
let mut replica_nodes = Vec::with_capacity(len);
let mut replica_parents = Vec::with_capacity(len);
let mut data_nodes: Vec<DataProof<H, typenum::U2>> = Vec::with_capacity(len);
for i in 0..len {
let challenge = pub_inputs.challenges[i] % pub_params.graph.size();
ensure!(challenge != 0, "cannot prove the first node");
let tree_d = &priv_inputs.tree_d;
let tree_r = &priv_inputs.tree_r;
let tree_r_config_rows_to_discard = priv_inputs.tree_r_config_rows_to_discard;
let data = tree_r.read_at(challenge)?;
let tree_proof =
tree_r.gen_cached_proof(challenge, Some(tree_r_config_rows_to_discard))?;
replica_nodes.push(DataProof {
proof: tree_proof,
data,
});
let mut parents = vec![0; pub_params.graph.degree()];
pub_params.graph.parents(challenge, &mut parents)?;
let mut replica_parentsi = Vec::with_capacity(parents.len());
for p in &parents {
replica_parentsi.push((*p, {
let proof = tree_r
.gen_cached_proof(*p as usize, Some(tree_r_config_rows_to_discard))?;
DataProof {
proof,
data: tree_r.read_at(*p as usize)?,
}
}));
}
replica_parents.push(replica_parentsi);
let node_proof = tree_d.gen_proof(challenge)?;
{
// TODO: use this again, I can't make lifetimes work though atm and I do not know why
// let extracted = Self::extract(
// pub_params,
// &pub_inputs.replica_id.into_bytes(),
// &replica,
// challenge,
// )?;
let extracted = decode_domain_block::<H>(
&pub_inputs.replica_id.context("missing replica_id")?,
tree_r,
challenge,
tree_r.read_at(challenge)?,
&parents,
)?;
data_nodes.push(DataProof {
data: extracted,
proof: node_proof,
});
}
}
let proof = Proof::new(replica_nodes, replica_parents, data_nodes);
Ok(proof)
}
fn verify(
pub_params: &Self::PublicParams,
pub_inputs: &Self::PublicInputs,
proof: &Self::Proof,
) -> Result<bool> {
let mut hasher = Sha256::new();
for i in 0..pub_inputs.challenges.len() {
{
// This was verify_proof_meta.
if pub_inputs.challenges[i] >= pub_params.graph.size() {
return Ok(false);
}
if !(proof.nodes[i].proves_challenge(pub_inputs.challenges[i])) {
return Ok(false);
}
if !(proof.replica_nodes[i].proves_challenge(pub_inputs.challenges[i])) {
return Ok(false);
}
let mut expected_parents = vec![0; pub_params.graph.degree()];
pub_params
.graph
.parents(pub_inputs.challenges[i], &mut expected_parents)?;
if proof.replica_parents[i].len() != expected_parents.len() {
println!(
"proof parents were not the same length as in public parameters: {} != {}",
proof.replica_parents[i].len(),
expected_parents.len()
);
return Ok(false);
}
let parents_as_expected = proof.replica_parents[i]
.iter()
.zip(&expected_parents)
.all(|(actual, expected)| actual.0 == *expected);
if !parents_as_expected {
println!("proof parents were not those provided in public parameters");
return Ok(false);
}
}
let challenge = pub_inputs.challenges[i] % pub_params.graph.size();
ensure!(challenge != 0, "cannot prove the first node");
if !proof.replica_nodes[i].proof.validate(challenge) {
return Ok(false);
}
for (parent_node, p) in &proof.replica_parents[i] {
if !p.proof.validate(*parent_node as usize) {
return Ok(false);
}
}
let key = {
let prover_bytes = pub_inputs.replica_id.context("missing replica_id")?;
hasher.update(AsRef::<[u8]>::as_ref(&prover_bytes));
for p in proof.replica_parents[i].iter() {
hasher.update(AsRef::<[u8]>::as_ref(&p.1.data));
}
let hash = hasher.finalize_reset();
bytes_into_fr_repr_safe(hash.as_ref()).into()
};
let unsealed = encode::decode(key, proof.replica_nodes[i].data);
if unsealed != proof.nodes[i].data {
return Ok(false);
}
if !proof.nodes[i].proof.validate_data(unsealed) {
println!("invalid data for merkle path {:?}", unsealed);
return Ok(false);
}
}
Ok(true)
}
}
impl<'a, H, G> PoRep<'a, H, H> for DrgPoRep<'a, H, G>
where
H: 'static + Hasher,
G::Key: AsRef<<H as Hasher>::Domain>,
G: 'a + Graph<H> + ParameterSetMetadata + Sync + Send,
{
type Tau = Tau<<H as Hasher>::Domain>;
type ProverAux = ProverAux<H>;
fn replicate(
pp: &Self::PublicParams,
replica_id: &<H as Hasher>::Domain,
mut data: Data<'a>,
data_tree: Option<BinaryMerkleTree<H>>,
config: StoreConfig,
replica_path: PathBuf,
) -> Result<(Self::Tau, Self::ProverAux)> {
use storage_proofs_core::cache_key::CacheKey;
let tree_d = match data_tree {
Some(tree) => tree,
None => create_base_merkle_tree::<BinaryMerkleTree<H>>(
Some(config.clone()),
pp.graph.size(),
data.as_ref(),
)?,
};
let graph = &pp.graph;
// encode(&pp.graph, replica_id, data, None)?;
// Because a node always follows all of its parents in the data,
// the nodes are by definition already topologically sorted.
// Therefore, if we simply traverse the data in order, encoding each node in place,
// we can always get each parent's encodings with a simple lookup --
// since we will already have encoded the parent earlier in the traversal.
let mut parents = vec![0; graph.degree()];
for node in 0..graph.size() {
graph.parents(node, &mut parents)?;
let key = graph.create_key(replica_id, node, &parents, data.as_ref(), None)?;
let start = data_at_node_offset(node);
let end = start + NODE_SIZE;
let node_data = <H as Hasher>::Domain::try_from_bytes(&data.as_ref()[start..end])?;
let encoded = H::sloth_encode(key.as_ref(), &node_data)?;
encoded.write_bytes(&mut data.as_mut()[start..end])?;
}
let replica_config = ReplicaConfig {
path: replica_path,
offsets: vec![0],
};
let tree_r_last_config =
StoreConfig::from_config(&config, CacheKey::CommRLastTree.to_string(), None);
let tree_r =
create_base_lcmerkle_tree::<H, <BinaryLCMerkleTree<H> as MerkleTreeTrait>::Arity>(
tree_r_last_config,
pp.graph.size(),
&data.as_ref(),
&replica_config,
)?;
let comm_d = tree_d.root();
let comm_r = tree_r.root();
Ok((Tau::new(comm_d, comm_r), ProverAux::new(tree_d, tree_r)))
}
fn extract_all<'b>(
pp: &'b Self::PublicParams,
replica_id: &'b <H as Hasher>::Domain,
data: &'b [u8],
_config: Option<StoreConfig>,
) -> Result<Vec<u8>> {
decode(&pp.graph, replica_id, data, None)
}
fn extract(
pp: &Self::PublicParams,
replica_id: &<H as Hasher>::Domain,
data: &[u8],
node: usize,
_config: Option<StoreConfig>,
) -> Result<Vec<u8>> {
Ok(decode_block(&pp.graph, replica_id, data, None, node)?.into_bytes())
}
}
pub fn decode<'a, H, G>(
graph: &'a G,
replica_id: &'a <H as Hasher>::Domain,
data: &'a [u8],
exp_parents_data: Option<&'a [u8]>,
) -> Result<Vec<u8>>
where
H: Hasher,
G::Key: AsRef<H::Domain>,
G: Graph<H> + Sync,
{
// TODO: proper error handling
let result = (0..graph.size())
.into_par_iter()
.flat_map(|i| {
decode_block::<H, G>(graph, replica_id, data, exp_parents_data, i)
.expect("decode block failure")
.into_bytes()
})
.collect();
Ok(result)
}
pub fn decode_block<'a, H, G>(
graph: &'a G,
replica_id: &'a <H as Hasher>::Domain,
data: &'a [u8],
exp_parents_data: Option<&'a [u8]>,
v: usize,
) -> Result<<H as Hasher>::Domain>
where
H: Hasher,
G::Key: AsRef<H::Domain>,
G: Graph<H>,
{
let mut parents = vec![0; graph.degree()];
graph.parents(v, &mut parents)?;
let key = graph.create_key(replica_id, v, &parents, &data, exp_parents_data)?;
let node_data = <H as Hasher>::Domain::try_from_bytes(&data_at_node(data, v)?)?;
Ok(encode::decode(*key.as_ref(), node_data))
}
pub fn decode_domain_block<H: Hasher>(
replica_id: &H::Domain,
tree: &BinaryLCMerkleTree<H>,
node: usize,
node_data: H::Domain,
parents: &[u32],
) -> Result<H::Domain>
where
H: Hasher,
{
let key = create_key_from_tree::<H, _>(replica_id, node, parents, tree)?;
Ok(encode::decode(key, node_data))
}
/// Creates the encoding key from a `MerkleTree`.
/// The algorithm for that is `Blake2s(id | encodedParentNode1 | encodedParentNode1 | ...)`.
/// It is only public so that it can be used for benchmarking
pub fn create_key_from_tree<H: Hasher, U: 'static + PoseidonArity>(
id: &H::Domain,
node: usize,
parents: &[u32],
tree: &LCMerkleTree<H, U>,
) -> Result<H::Domain> {
let mut hasher = Sha256::new();
hasher.update(AsRef::<[u8]>::as_ref(&id));
// The hash is about the parents, hence skip if a node doesn't have any parents
if node != parents[0] as usize {
let mut scratch: [u8; NODE_SIZE] = [0; NODE_SIZE];
for parent in parents.iter() {
tree.read_into(*parent as usize, &mut scratch)?;
hasher.update(&scratch);
}
}
let hash = hasher.finalize();
Ok(bytes_into_fr_repr_safe(hash.as_ref()).into())
}
pub fn replica_id<H: Hasher>(prover_id: [u8; 32], sector_id: [u8; 32]) -> H::Domain {
let mut to_hash = [0; 64];
to_hash[..32].copy_from_slice(&prover_id);
to_hash[32..].copy_from_slice(§or_id);
H::Function::hash_leaf(&to_hash)
}
#[cfg(test)]
mod tests {
use super::*;
use bellperson::bls::Fr;
use ff::Field;
use rand::SeedableRng;
use rand_xorshift::XorShiftRng;
use storage_proofs_core::{
cache_key::CacheKey,
drgraph::{BucketGraph, BASE_DEGREE},
fr32::fr_into_bytes,
hasher::{Blake2sHasher, Sha256Hasher},
merkle::{BinaryMerkleTree, MerkleTreeTrait},
table_tests,
test_helper::setup_replica,
util::{data_at_node, default_rows_to_discard},
};
use crate::stacked::BINARY_ARITY;
fn test_extract_all<Tree: MerkleTreeTrait>() {
let rng = &mut XorShiftRng::from_seed(crate::TEST_SEED);
let replica_id: <Tree::Hasher as Hasher>::Domain =
<Tree::Hasher as Hasher>::Domain::random(rng);
let nodes = 4;
let data = vec![2u8; 32 * nodes];
// MT for original data is always named tree-d, and it will be
// referenced later in the process as such.
let cache_dir = tempfile::tempdir().expect("tempdir failure");
let config = StoreConfig::new(
cache_dir.path(),
CacheKey::CommDTree.to_string(),
default_rows_to_discard(nodes, BINARY_ARITY),
);
// Generate a replica path.
let replica_path = cache_dir.path().join("replica-path");
let mut mmapped_data = setup_replica(&data, &replica_path);
let sp = SetupParams {
drg: DrgParams {
nodes,
degree: BASE_DEGREE,
expansion_degree: 0,
porep_id: [32; 32],
},
private: false,
challenges_count: 1,
};
let pp: PublicParams<Tree::Hasher, BucketGraph<Tree::Hasher>> =
DrgPoRep::setup(&sp).expect("setup failed");
DrgPoRep::replicate(
&pp,
&replica_id,
(mmapped_data.as_mut()).into(),
None,
config.clone(),
replica_path,
)
.expect("replication failed");
let mut copied = vec![0; data.len()];
copied.copy_from_slice(&mmapped_data);
assert_ne!(data, copied, "replication did not change data");
let decoded_data = DrgPoRep::<Tree::Hasher, _>::extract_all(
&pp,
&replica_id,
mmapped_data.as_mut(),
Some(config),
)
.unwrap_or_else(|e| {
panic!("Failed to extract data from `DrgPoRep`: {}", e);
});
assert_eq!(data, decoded_data.as_slice(), "failed to extract data");
cache_dir.close().expect("Failed to remove cache dir");
}
#[test]
fn extract_all_sha256() {
test_extract_all::<BinaryMerkleTree<Sha256Hasher>>();
}
#[test]
fn extract_all_blake2s() {
test_extract_all::<BinaryMerkleTree<Blake2sHasher>>();
}
fn test_extract<Tree: MerkleTreeTrait>() {
let rng = &mut XorShiftRng::from_seed(crate::TEST_SEED);
let replica_id: <Tree::Hasher as Hasher>::Domain =
<Tree::Hasher as Hasher>::Domain::random(rng);
let nodes = 4;
let data = vec![2u8; 32 * nodes];
// MT for original data is always named tree-d, and it will be
// referenced later in the process as such.
let cache_dir = tempfile::tempdir().expect("tempdir failure");
let config = StoreConfig::new(
cache_dir.path(),
CacheKey::CommDTree.to_string(),
default_rows_to_discard(nodes, BINARY_ARITY),
);
// Generate a replica path.
let replica_path = cache_dir.path().join("replica-path");
let mut mmapped_data = setup_replica(&data, &replica_path);
let sp = SetupParams {
drg: DrgParams {
nodes: data.len() / 32,
degree: BASE_DEGREE,
expansion_degree: 0,
porep_id: [32; 32],
},
private: false,
challenges_count: 1,
};
let pp =
DrgPoRep::<Tree::Hasher, BucketGraph<Tree::Hasher>>::setup(&sp).expect("setup failed");
DrgPoRep::replicate(
&pp,
&replica_id,
(mmapped_data.as_mut()).into(),
None,
config.clone(),
replica_path,
)
.expect("replication failed");
let mut copied = vec![0; data.len()];
copied.copy_from_slice(&mmapped_data);
assert_ne!(data, copied, "replication did not change data");
for i in 0..nodes {
let decoded_data =
DrgPoRep::extract(&pp, &replica_id, &mmapped_data, i, Some(config.clone()))
.expect("failed to extract node data from PoRep");
let original_data = data_at_node(&data, i).expect("data_at_node failure");
assert_eq!(
original_data,
decoded_data.as_slice(),
"failed to extract data"
);
}
}
#[test]
fn extract_sha256() {
test_extract::<BinaryMerkleTree<Sha256Hasher>>();
}
#[test]
fn extract_blake2s() {
test_extract::<BinaryMerkleTree<Blake2sHasher>>();
}
fn prove_verify_aux<Tree: MerkleTreeTrait>(
nodes: usize,
i: usize,
use_wrong_challenge: bool,
use_wrong_parents: bool,
) {
assert!(i < nodes);
// The loop is here in case we need to retry because of an edge case in the test design.
loop {
let rng = &mut XorShiftRng::from_seed(crate::TEST_SEED);
let degree = BASE_DEGREE;
let expansion_degree = 0;
let replica_id: <Tree::Hasher as Hasher>::Domain =
<Tree::Hasher as Hasher>::Domain::random(rng);
let data: Vec<u8> = (0..nodes)
.flat_map(|_| fr_into_bytes(&Fr::random(rng)))
.collect();
// MT for original data is always named tree-d, and it will be
// referenced later in the process as such.
let cache_dir = tempfile::tempdir().expect("tempdir failure");
let config = StoreConfig::new(
cache_dir.path(),
CacheKey::CommDTree.to_string(),
default_rows_to_discard(nodes, BINARY_ARITY),
);
// Generate a replica path.
let replica_path = cache_dir.path().join("replica-path");
let mut mmapped_data = setup_replica(&data, &replica_path);
let challenge = i;
let sp = SetupParams {
drg: DrgParams {
nodes,
degree,
expansion_degree,
porep_id: [32; 32],
},
private: false,
challenges_count: 2,
};
let pp = DrgPoRep::<Tree::Hasher, BucketGraph<_>>::setup(&sp).expect("setup failed");
let (tau, aux) = DrgPoRep::<Tree::Hasher, _>::replicate(
&pp,
&replica_id,
(mmapped_data.as_mut()).into(),
None,
config,
replica_path.clone(),
)
.expect("replication failed");
let mut copied = vec![0; data.len()];
copied.copy_from_slice(&mmapped_data);
assert_ne!(data, copied, "replication did not change data");
let pub_inputs = PublicInputs::<<Tree::Hasher as Hasher>::Domain> {
replica_id: Some(replica_id),
challenges: vec![challenge, challenge],
tau: Some(tau),
};
let priv_inputs = PrivateInputs::<Tree::Hasher> {
tree_d: &aux.tree_d,
tree_r: &aux.tree_r,
tree_r_config_rows_to_discard: default_rows_to_discard(nodes, BINARY_ARITY),
};
let real_proof = DrgPoRep::<Tree::Hasher, _>::prove(&pp, &pub_inputs, &priv_inputs)
.expect("proving failed");
if use_wrong_parents {
// Only one 'wrong' option will be tested at a time.
assert!(!use_wrong_challenge);
let real_parents = real_proof.replica_parents;
// Parent vector claiming the wrong parents.
let fake_parents = vec![real_parents[0]
.iter()
// Incrementing each parent node will give us a different parent set.
// It's fine to be out of range, since this only needs to fail.
.map(|(i, data_proof)| (i + 1, data_proof.clone()))
.collect::<Vec<_>>()];
let proof = Proof::new(
real_proof.replica_nodes.clone(),
fake_parents,
real_proof.nodes.clone(),
);
let is_valid =
DrgPoRep::verify(&pp, &pub_inputs, &proof).expect("verification failed");
assert!(!is_valid, "verified in error -- with wrong parents");
let mut all_same = true;
for (p, _) in &real_parents[0] {
if *p != real_parents[0][0].0 {
all_same = false;
}
}
if all_same {
println!("invalid test data can't scramble proofs with all same parents.");
// If for some reason, we hit this condition because of the data passed in,
// try again.
continue;
}
// Parent vector claiming the right parents but providing valid proofs for different
// parents.
let fake_proof_parents = vec![real_parents[0]
.iter()
.enumerate()
.map(|(i, (p, _))| {
// Rotate the real parent proofs.
let x = (i + 1) % real_parents[0].len();
let j = real_parents[0][x].0;
(*p, real_parents[0][j as usize].1.clone())
})
.collect::<Vec<_>>()];
let proof2 = Proof::new(
real_proof.replica_nodes,
fake_proof_parents,
real_proof.nodes,
);
assert!(
!DrgPoRep::<Tree::Hasher, _>::verify(&pp, &pub_inputs, &proof2).unwrap_or_else(
|e| {
panic!("Verification failed: {}", e);
}
),
"verified in error -- with wrong parent proofs"
);
return;
}
let proof = real_proof;
if use_wrong_challenge {
let pub_inputs_with_wrong_challenge_for_proof =
PublicInputs::<<Tree::Hasher as Hasher>::Domain> {
replica_id: Some(replica_id),
challenges: vec![if challenge == 1 { 2 } else { 1 }],
tau: Some(tau),
};
let verified = DrgPoRep::<Tree::Hasher, _>::verify(
&pp,
&pub_inputs_with_wrong_challenge_for_proof,
&proof,
)
.expect("Verification failed");
assert!(
!verified,
"wrongly verified proof which does not match challenge in public input"
);
} else {
assert!(
DrgPoRep::<Tree::Hasher, _>::verify(&pp, &pub_inputs, &proof)
.expect("verification failed"),
"failed to verify"
);
}
cache_dir.close().expect("Failed to remove cache dir");
// Normally, just run once.
break;
}
}
fn prove_verify(n: usize, i: usize) {
prove_verify_aux::<BinaryMerkleTree<Sha256Hasher>>(n, i, false, false);
prove_verify_aux::<BinaryMerkleTree<Blake2sHasher>>(n, i, false, false);
}
fn prove_verify_wrong_challenge(n: usize, i: usize) {
prove_verify_aux::<BinaryMerkleTree<Sha256Hasher>>(n, i, true, false);
prove_verify_aux::<BinaryMerkleTree<Blake2sHasher>>(n, i, true, false);
}
fn prove_verify_wrong_parents(n: usize, i: usize) {
prove_verify_aux::<BinaryMerkleTree<Sha256Hasher>>(n, i, false, true);
prove_verify_aux::<BinaryMerkleTree<Blake2sHasher>>(n, i, false, true);
}
table_tests! {
prove_verify {
prove_verify_32_16_1(16, 1);
prove_verify_32_64_1(64, 1);
prove_verify_32_64_2(64, 2);
prove_verify_32_256_1(256, 1);
prove_verify_32_256_2(256, 2);
prove_verify_32_256_3(256, 3);
prove_verify_32_256_4(256, 4);
prove_verify_32_256_5(256, 5);
}
}
#[test]
fn test_drgporep_verifies_using_challenge() {
prove_verify_wrong_challenge(8, 1);
}
#[test]
fn test_drgporep_verifies_parents() {
// Challenge a node (3) that doesn't have all the same parents.
prove_verify_wrong_parents(8, 5);
}
}
| 31.962889 | 101 | 0.540371 |
64d25247753b8ab984bb346e1387e7ceeb25e203 | 87,802 | // DO NOT EDIT !
// This file was generated automatically from 'src/mako/cli/main.rs.mako'
// DO NOT EDIT !
#![allow(unused_variables, unused_imports, dead_code, unused_mut)]
#[macro_use]
extern crate tokio;
#[macro_use]
extern crate clap;
extern crate yup_oauth2 as oauth2;
extern crate yup_hyper_mock as mock;
extern crate hyper_rustls;
extern crate serde;
extern crate serde_json;
extern crate hyper;
extern crate mime;
extern crate strsim;
extern crate google_tpu1_alpha1;
use std::env;
use std::io::{self, Write};
use clap::{App, SubCommand, Arg};
use google_tpu1_alpha1::{api, Error};
mod client;
use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg,
input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol,
calltype_from_str, remove_json_null_values, ComplexType, JsonType, JsonTypeInfo};
use std::default::Default;
use std::str::FromStr;
use serde_json as json;
use clap::ArgMatches;
enum DoitError {
IoError(String, io::Error),
ApiError(Error),
}
struct Engine<'n> {
opt: ArgMatches<'n>,
hub: api::TPU<hyper::Client<hyper_rustls::HttpsConnector<hyper::client::connect::HttpConnector>, hyper::body::Body>
>,
gp: Vec<&'static str>,
gpm: Vec<(&'static str, &'static str)>,
}
impl<'n> Engine<'n> {
async fn _projects_locations_accelerator_types_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_accelerator_types_get(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_accelerator_types_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_accelerator_types_list(opt.value_of("parent").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"page-token" => {
call = call.page_token(value.unwrap_or(""));
},
"page-size" => {
call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer"));
},
"order-by" => {
call = call.order_by(value.unwrap_or(""));
},
"filter" => {
call = call.filter(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["page-token", "filter", "order-by", "page-size"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_get(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_list(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"page-token" => {
call = call.page_token(value.unwrap_or(""));
},
"page-size" => {
call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer"));
},
"filter" => {
call = call.filter(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["page-token", "filter", "page-size"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_nodes_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"accelerator-type" => Some(("acceleratorType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"cidr-block" => Some(("cidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"health" => Some(("health", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"health-description" => Some(("healthDescription", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"ip-address" => Some(("ipAddress", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })),
"name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"network" => Some(("network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"port" => Some(("port", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"scheduling-config.preemptible" => Some(("schedulingConfig.preemptible", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"scheduling-config.reserved" => Some(("schedulingConfig.reserved", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"service-account" => Some(("serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"tensorflow-version" => Some(("tensorflowVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["accelerator-type", "cidr-block", "create-time", "description", "health", "health-description", "ip-address", "labels", "name", "network", "port", "preemptible", "reserved", "scheduling-config", "service-account", "state", "tensorflow-version"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::Node = json::value::from_value(object).unwrap();
let mut call = self.hub.projects().locations_nodes_create(request, opt.value_of("parent").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"node-id" => {
call = call.node_id(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["node-id"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_nodes_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_nodes_delete(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_nodes_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_nodes_get(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_nodes_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_nodes_list(opt.value_of("parent").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"page-token" => {
call = call.page_token(value.unwrap_or(""));
},
"page-size" => {
call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer"));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["page-token", "page-size"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_nodes_reimage(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"tensorflow-version" => Some(("tensorflowVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["tensorflow-version"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::ReimageNodeRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.projects().locations_nodes_reimage(request, opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_nodes_start(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec![]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::StartNodeRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.projects().locations_nodes_start(request, opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_nodes_stop(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec![]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::StopNodeRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.projects().locations_nodes_stop(request, opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_operations_cancel(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_operations_cancel(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_operations_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_operations_delete(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_operations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_operations_get(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_operations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_operations_list(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"page-token" => {
call = call.page_token(value.unwrap_or(""));
},
"page-size" => {
call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer"));
},
"filter" => {
call = call.filter(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["page-token", "filter", "page-size"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_tensorflow_versions_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_tensorflow_versions_get(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_tensorflow_versions_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_tensorflow_versions_list(opt.value_of("parent").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"page-token" => {
call = call.page_token(value.unwrap_or(""));
},
"page-size" => {
call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer"));
},
"order-by" => {
call = call.order_by(value.unwrap_or(""));
},
"filter" => {
call = call.filter(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["page-token", "filter", "order-by", "page-size"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _doit(&self, dry_run: bool) -> Result<Result<(), DoitError>, Option<InvalidOptionsError>> {
let mut err = InvalidOptionsError::new();
let mut call_result: Result<(), DoitError> = Ok(());
let mut err_opt: Option<InvalidOptionsError> = None;
match self.opt.subcommand() {
("projects", Some(opt)) => {
match opt.subcommand() {
("locations-accelerator-types-get", Some(opt)) => {
call_result = self._projects_locations_accelerator_types_get(opt, dry_run, &mut err).await;
},
("locations-accelerator-types-list", Some(opt)) => {
call_result = self._projects_locations_accelerator_types_list(opt, dry_run, &mut err).await;
},
("locations-get", Some(opt)) => {
call_result = self._projects_locations_get(opt, dry_run, &mut err).await;
},
("locations-list", Some(opt)) => {
call_result = self._projects_locations_list(opt, dry_run, &mut err).await;
},
("locations-nodes-create", Some(opt)) => {
call_result = self._projects_locations_nodes_create(opt, dry_run, &mut err).await;
},
("locations-nodes-delete", Some(opt)) => {
call_result = self._projects_locations_nodes_delete(opt, dry_run, &mut err).await;
},
("locations-nodes-get", Some(opt)) => {
call_result = self._projects_locations_nodes_get(opt, dry_run, &mut err).await;
},
("locations-nodes-list", Some(opt)) => {
call_result = self._projects_locations_nodes_list(opt, dry_run, &mut err).await;
},
("locations-nodes-reimage", Some(opt)) => {
call_result = self._projects_locations_nodes_reimage(opt, dry_run, &mut err).await;
},
("locations-nodes-start", Some(opt)) => {
call_result = self._projects_locations_nodes_start(opt, dry_run, &mut err).await;
},
("locations-nodes-stop", Some(opt)) => {
call_result = self._projects_locations_nodes_stop(opt, dry_run, &mut err).await;
},
("locations-operations-cancel", Some(opt)) => {
call_result = self._projects_locations_operations_cancel(opt, dry_run, &mut err).await;
},
("locations-operations-delete", Some(opt)) => {
call_result = self._projects_locations_operations_delete(opt, dry_run, &mut err).await;
},
("locations-operations-get", Some(opt)) => {
call_result = self._projects_locations_operations_get(opt, dry_run, &mut err).await;
},
("locations-operations-list", Some(opt)) => {
call_result = self._projects_locations_operations_list(opt, dry_run, &mut err).await;
},
("locations-tensorflow-versions-get", Some(opt)) => {
call_result = self._projects_locations_tensorflow_versions_get(opt, dry_run, &mut err).await;
},
("locations-tensorflow-versions-list", Some(opt)) => {
call_result = self._projects_locations_tensorflow_versions_list(opt, dry_run, &mut err).await;
},
_ => {
err.issues.push(CLIError::MissingMethodError("projects".to_string()));
writeln!(io::stderr(), "{}\n", opt.usage()).ok();
}
}
},
_ => {
err.issues.push(CLIError::MissingCommandError);
writeln!(io::stderr(), "{}\n", self.opt.usage()).ok();
}
}
if dry_run {
if err.issues.len() > 0 {
err_opt = Some(err);
}
Err(err_opt)
} else {
Ok(call_result)
}
}
// Please note that this call will fail if any part of the opt can't be handled
async fn new(opt: ArgMatches<'n>) -> Result<Engine<'n>, InvalidOptionsError> {
let (config_dir, secret) = {
let config_dir = match client::assure_config_dir_exists(opt.value_of("folder").unwrap_or("~/.google-service-cli")) {
Err(e) => return Err(InvalidOptionsError::single(e, 3)),
Ok(p) => p,
};
match client::application_secret_from_directory(&config_dir, "tpu1-alpha1-secret.json",
"{\"installed\":{\"auth_uri\":\"https://accounts.google.com/o/oauth2/auth\",\"client_secret\":\"hCsslbCUyfehWMmbkG8vTYxG\",\"token_uri\":\"https://accounts.google.com/o/oauth2/token\",\"client_email\":\"\",\"redirect_uris\":[\"urn:ietf:wg:oauth:2.0:oob\",\"oob\"],\"client_x509_cert_url\":\"\",\"client_id\":\"620010449518-9ngf7o4dhs0dka470npqvor6dc5lqb9b.apps.googleusercontent.com\",\"auth_provider_x509_cert_url\":\"https://www.googleapis.com/oauth2/v1/certs\"}}") {
Ok(secret) => (config_dir, secret),
Err(e) => return Err(InvalidOptionsError::single(e, 4))
}
};
let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
secret,
yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
).persist_tokens_to_disk(format!("{}/tpu1-alpha1", config_dir)).build().await.unwrap();
let client =
if opt.is_present("debug") {
hyper::Client::with_connector(mock::TeeConnector {
connector: hyper::net::HttpsConnector::new(hyper_rustls::TlsClient::new())
})
} else {
hyper::Client::with_connector(hyper::net::HttpsConnector::new(hyper_rustls::TlsClient::new()))
};
let engine = Engine {
opt: opt,
hub: api::TPU::new(client, auth),
gp: vec!["$-xgafv", "access-token", "alt", "callback", "fields", "key", "oauth-token", "pretty-print", "quota-user", "upload-type", "upload-protocol"],
gpm: vec![
("$-xgafv", "$.xgafv"),
("access-token", "access_token"),
("oauth-token", "oauth_token"),
("pretty-print", "prettyPrint"),
("quota-user", "quotaUser"),
("upload-type", "uploadType"),
("upload-protocol", "upload_protocol"),
]
};
match engine._doit(true).await {
Err(Some(err)) => Err(err),
Err(None) => Ok(engine),
Ok(_) => unreachable!(),
}
}
async fn doit(&self) -> Result<(), DoitError> {
match self._doit(false).await {
Ok(res) => res,
Err(_) => unreachable!(),
}
}
}
#[tokio::main]
async fn main() {
let mut exit_status = 0i32;
let arg_data = [
("projects", "methods: 'locations-accelerator-types-get', 'locations-accelerator-types-list', 'locations-get', 'locations-list', 'locations-nodes-create', 'locations-nodes-delete', 'locations-nodes-get', 'locations-nodes-list', 'locations-nodes-reimage', 'locations-nodes-start', 'locations-nodes-stop', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-tensorflow-versions-get' and 'locations-tensorflow-versions-list'", vec![
("locations-accelerator-types-get",
Some(r##"Gets AcceleratorType."##),
"Details at http://byron.github.io/google-apis-rs/google_tpu1_alpha1_cli/projects_locations-accelerator-types-get",
vec![
(Some(r##"name"##),
None,
Some(r##"The resource name."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-accelerator-types-list",
Some(r##"Lists accelerator types supported by this API."##),
"Details at http://byron.github.io/google-apis-rs/google_tpu1_alpha1_cli/projects_locations-accelerator-types-list",
vec![
(Some(r##"parent"##),
None,
Some(r##"The parent resource name."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-get",
Some(r##"Gets information about a location."##),
"Details at http://byron.github.io/google-apis-rs/google_tpu1_alpha1_cli/projects_locations-get",
vec![
(Some(r##"name"##),
None,
Some(r##"Resource name for the location."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-list",
Some(r##"Lists information about the supported locations for this service."##),
"Details at http://byron.github.io/google-apis-rs/google_tpu1_alpha1_cli/projects_locations-list",
vec![
(Some(r##"name"##),
None,
Some(r##"The resource that owns the locations collection, if applicable."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-nodes-create",
Some(r##"Creates a node."##),
"Details at http://byron.github.io/google-apis-rs/google_tpu1_alpha1_cli/projects_locations-nodes-create",
vec![
(Some(r##"parent"##),
None,
Some(r##"The parent resource name."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-nodes-delete",
Some(r##"Deletes a node."##),
"Details at http://byron.github.io/google-apis-rs/google_tpu1_alpha1_cli/projects_locations-nodes-delete",
vec![
(Some(r##"name"##),
None,
Some(r##"The resource name."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-nodes-get",
Some(r##"Gets the details of a node."##),
"Details at http://byron.github.io/google-apis-rs/google_tpu1_alpha1_cli/projects_locations-nodes-get",
vec![
(Some(r##"name"##),
None,
Some(r##"The resource name."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-nodes-list",
Some(r##"Lists nodes."##),
"Details at http://byron.github.io/google-apis-rs/google_tpu1_alpha1_cli/projects_locations-nodes-list",
vec![
(Some(r##"parent"##),
None,
Some(r##"The parent resource name."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-nodes-reimage",
Some(r##"Reimages a node's OS."##),
"Details at http://byron.github.io/google-apis-rs/google_tpu1_alpha1_cli/projects_locations-nodes-reimage",
vec![
(Some(r##"name"##),
None,
Some(r##"The resource name."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-nodes-start",
Some(r##"Starts a node."##),
"Details at http://byron.github.io/google-apis-rs/google_tpu1_alpha1_cli/projects_locations-nodes-start",
vec![
(Some(r##"name"##),
None,
Some(r##"The resource name."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-nodes-stop",
Some(r##"Stops a node."##),
"Details at http://byron.github.io/google-apis-rs/google_tpu1_alpha1_cli/projects_locations-nodes-stop",
vec![
(Some(r##"name"##),
None,
Some(r##"The resource name."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-operations-cancel",
Some(r##"Starts asynchronous cancellation on a long-running operation. The server
makes a best effort to cancel the operation, but success is not
guaranteed. If the server doesn't support this method, it returns
`google.rpc.Code.UNIMPLEMENTED`. Clients can use
Operations.GetOperation or
other methods to check whether the cancellation succeeded or whether the
operation completed despite cancellation. On successful cancellation,
the operation is not deleted; instead, it becomes an operation with
an Operation.error value with a google.rpc.Status.code of 1,
corresponding to `Code.CANCELLED`."##),
"Details at http://byron.github.io/google-apis-rs/google_tpu1_alpha1_cli/projects_locations-operations-cancel",
vec![
(Some(r##"name"##),
None,
Some(r##"The name of the operation resource to be cancelled."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-operations-delete",
Some(r##"Deletes a long-running operation. This method indicates that the client is
no longer interested in the operation result. It does not cancel the
operation. If the server doesn't support this method, it returns
`google.rpc.Code.UNIMPLEMENTED`."##),
"Details at http://byron.github.io/google-apis-rs/google_tpu1_alpha1_cli/projects_locations-operations-delete",
vec![
(Some(r##"name"##),
None,
Some(r##"The name of the operation resource to be deleted."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-operations-get",
Some(r##"Gets the latest state of a long-running operation. Clients can use this
method to poll the operation result at intervals as recommended by the API
service."##),
"Details at http://byron.github.io/google-apis-rs/google_tpu1_alpha1_cli/projects_locations-operations-get",
vec![
(Some(r##"name"##),
None,
Some(r##"The name of the operation resource."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-operations-list",
Some(r##"Lists operations that match the specified filter in the request. If the
server doesn't support this method, it returns `UNIMPLEMENTED`.
NOTE: the `name` binding allows API services to override the binding
to use different resource name schemes, such as `users/*/operations`. To
override the binding, API services can add a binding such as
`"/v1/{name=users/*}/operations"` to their service configuration.
For backwards compatibility, the default name includes the operations
collection id, however overriding users must ensure the name binding
is the parent resource, without the operations collection id."##),
"Details at http://byron.github.io/google-apis-rs/google_tpu1_alpha1_cli/projects_locations-operations-list",
vec![
(Some(r##"name"##),
None,
Some(r##"The name of the operation's parent resource."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-tensorflow-versions-get",
Some(r##"Gets TensorFlow Version."##),
"Details at http://byron.github.io/google-apis-rs/google_tpu1_alpha1_cli/projects_locations-tensorflow-versions-get",
vec![
(Some(r##"name"##),
None,
Some(r##"The resource name."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-tensorflow-versions-list",
Some(r##"Lists TensorFlow versions supported by this API."##),
"Details at http://byron.github.io/google-apis-rs/google_tpu1_alpha1_cli/projects_locations-tensorflow-versions-list",
vec![
(Some(r##"parent"##),
None,
Some(r##"The parent resource name."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
]),
];
let mut app = App::new("tpu1-alpha1")
.author("Sebastian Thiel <[email protected]>")
.version("1.0.14+20200707")
.about("TPU API provides customers with access to Google TPU technology.")
.after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_tpu1_alpha1_cli")
.arg(Arg::with_name("url")
.long("scope")
.help("Specify the authentication a method should be executed in. Each scope requires the user to grant this application permission to use it.If unset, it defaults to the shortest scope url for a particular method.")
.multiple(true)
.takes_value(true))
.arg(Arg::with_name("folder")
.long("config-dir")
.help("A directory into which we will store our persistent data. Defaults to a user-writable directory that we will create during the first invocation.[default: ~/.google-service-cli")
.multiple(false)
.takes_value(true))
.arg(Arg::with_name("debug")
.long("debug")
.help("Output all server communication to standard error. `tx` and `rx` are placed into the same stream.")
.multiple(false)
.takes_value(false))
.arg(Arg::with_name("debug-auth")
.long("debug-auth")
.help("Output all communication related to authentication to standard error. `tx` and `rx` are placed into the same stream.")
.multiple(false)
.takes_value(false));
for &(main_command_name, about, ref subcommands) in arg_data.iter() {
let mut mcmd = SubCommand::with_name(main_command_name).about(about);
for &(sub_command_name, ref desc, url_info, ref args) in subcommands {
let mut scmd = SubCommand::with_name(sub_command_name);
if let &Some(desc) = desc {
scmd = scmd.about(desc);
}
scmd = scmd.after_help(url_info);
for &(ref arg_name, ref flag, ref desc, ref required, ref multi) in args {
let arg_name_str =
match (arg_name, flag) {
(&Some(an), _ ) => an,
(_ , &Some(f)) => f,
_ => unreachable!(),
};
let mut arg = Arg::with_name(arg_name_str)
.empty_values(false);
if let &Some(short_flag) = flag {
arg = arg.short(short_flag);
}
if let &Some(desc) = desc {
arg = arg.help(desc);
}
if arg_name.is_some() && flag.is_some() {
arg = arg.takes_value(true);
}
if let &Some(required) = required {
arg = arg.required(required);
}
if let &Some(multi) = multi {
arg = arg.multiple(multi);
}
scmd = scmd.arg(arg);
}
mcmd = mcmd.subcommand(scmd);
}
app = app.subcommand(mcmd);
}
let matches = app.get_matches();
let debug = matches.is_present("debug");
match Engine::new(matches) {
Err(err) => {
exit_status = err.exit_code;
writeln!(io::stderr(), "{}", err).ok();
},
Ok(engine) => {
if let Err(doit_err) = engine.doit().await {
exit_status = 1;
match doit_err {
DoitError::IoError(path, err) => {
writeln!(io::stderr(), "Failed to open output file '{}': {}", path, err).ok();
},
DoitError::ApiError(err) => {
if debug {
writeln!(io::stderr(), "{:#?}", err).ok();
} else {
writeln!(io::stderr(), "{}", err).ok();
}
}
}
}
}
}
std::process::exit(exit_status);
}
| 48.670732 | 526 | 0.436505 |
237027b13b91c08e79eee68d5f234eb6a92ca698 | 1,790 | use crate::deserialize::{self, FromSql};
use crate::mysql::{Mysql, MysqlValue};
use crate::serialize::{self, IsNull, Output, ToSql};
use crate::sql_types;
use std::io::prelude::*;
impl FromSql<sql_types::Json, Mysql> for serde_json::Value {
fn from_sql(value: Option<MysqlValue<'_>>) -> deserialize::Result<Self> {
let value = not_none!(value);
serde_json::from_slice(value.as_bytes()).map_err(|_| "Invalid Json".into())
}
}
impl ToSql<sql_types::Json, Mysql> for serde_json::Value {
fn to_sql<W: Write>(&self, out: &mut Output<W, Mysql>) -> serialize::Result {
serde_json::to_writer(out, self)
.map(|_| IsNull::No)
.map_err(Into::into)
}
}
#[test]
fn json_to_sql() {
let mut bytes = Output::test();
let test_json = serde_json::Value::Bool(true);
ToSql::<sql_types::Json, Mysql>::to_sql(&test_json, &mut bytes).unwrap();
assert_eq!(bytes, b"true");
}
#[test]
fn some_json_from_sql() {
use crate::mysql::MysqlType;
let input_json = b"true";
let output_json: serde_json::Value = FromSql::<sql_types::Json, Mysql>::from_sql(Some(
MysqlValue::new(input_json, MysqlType::String),
))
.unwrap();
assert_eq!(output_json, serde_json::Value::Bool(true));
}
#[test]
fn bad_json_from_sql() {
use crate::mysql::MysqlType;
let uuid: Result<serde_json::Value, _> = FromSql::<sql_types::Json, Mysql>::from_sql(Some(
MysqlValue::new(b"boom", MysqlType::String),
));
assert_eq!(uuid.unwrap_err().to_string(), "Invalid Json");
}
#[test]
fn no_json_from_sql() {
let uuid: Result<serde_json::Value, _> = FromSql::<sql_types::Json, Mysql>::from_sql(None);
assert_eq!(
uuid.unwrap_err().to_string(),
"Unexpected null for non-null column"
);
}
| 30.862069 | 95 | 0.643575 |
21c521aebc590642ee893c6b3f48e452d6e8a133 | 1,494 | // This file is part of olympus-xmp. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/raphaelcohn/olympus-xmp/master/COPYRIGHT. No part of olympus-xmp, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2022 The developers of olympus-xmp. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/raphaelcohn/olympus-xmp/master/COPYRIGHT.
/// A parse error.
#[derive(Debug)]
pub enum StringLiteralToDomainTypeParseError<StrParseError: error::Error, TryFromError: error::Error>
{
#[allow(missing_docs)]
StrParse(StrParseError),
#[allow(missing_docs)]
TryFrom(TryFromError)
}
impl<StrParseError: error::Error, TryFromError: error::Error> Display for StringLiteralToDomainTypeParseError<StrParseError, TryFromError>
{
#[inline(always)]
fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result
{
Debug::fmt(self, formatter)
}
}
impl<StrParseError: 'static + error::Error, TryFromError: 'static + error::Error> error::Error for StringLiteralToDomainTypeParseError<StrParseError, TryFromError>
{
#[inline(always)]
fn source(&self) -> Option<&(dyn error::Error + 'static)>
{
use StringLiteralToDomainTypeParseError::*;
match self
{
StrParse(cause) => Some(cause),
TryFrom(cause) => Some(cause),
}
}
}
| 37.35 | 390 | 0.755689 |
0e7af7baf47127281d58147846e9c882e48a3661 | 39 | fn main() {
risc0_build::link();
}
| 9.75 | 24 | 0.538462 |
ac9ec3369a10ab9ed1681e35cdb7be0dbda86e83 | 821 | use crate::store::{ReadStore, Row, WriteStore};
use crate::util::Bytes;
pub struct FakeStore;
impl ReadStore for FakeStore {
fn get(&self, _key: &[u8]) -> Option<Bytes> {
None
}
fn scan(&self, _prefix: &[u8]) -> Vec<Row> {
vec![]
}
}
impl WriteStore for FakeStore {
fn write(&self, _rows: Vec<Row>) {}
fn flush(&self) {}
}
#[cfg(test)]
mod tests {
#[test]
fn test_fakestore() {
use crate::fake;
use crate::store::{ReadStore, Row, WriteStore};
let store = fake::FakeStore {};
store.write(vec![Row {
key: b"k".to_vec(),
value: b"v".to_vec(),
}]);
store.flush();
// nothing was actually written
assert!(store.get(b"").is_none());
assert!(store.scan(b"").is_empty());
}
}
| 21.605263 | 55 | 0.53106 |
8a36e26bc657b3c9173756ec79d9a3dcb43c30ab | 481 | macro_rules! timeit {
($func:expr) => ({
let t1 = std::time::Instant::now();
println!("{:?}", $func);
let t2 = std::time::Instant::now().duration_since(t1);
println!("{}", t2.as_secs() as f64 + t2.subsec_nanos() as f64 / 1000000000.00);
})
}
fn main() {
fn mult_3_5(n: usize) -> usize {
(0..n)
.filter(|x| x % 3 == 0 || x % 5 == 0)
.fold(0, |acc, item| acc + item)
}
timeit!(mult_3_5(1000));
}
| 24.05 | 87 | 0.471933 |
69579201579543e1ed43166130761c2a094edaf0 | 1,200 | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use move_core_types::language_storage::ModuleId;
use std::{cell::RefCell, collections::hash_map::HashMap, hash::Hash, rc::Rc};
use vm::CompiledModule;
pub struct ModuleCacheImpl<K, V> {
id_map: RefCell<HashMap<K, usize>>,
modules: RefCell<Vec<Rc<V>>>,
}
impl<K, V> ModuleCacheImpl<K, V>
where
K: Eq + Hash,
{
pub fn new() -> Self {
Self {
id_map: RefCell::new(HashMap::new()),
modules: RefCell::new(vec![]),
}
}
pub fn insert(&self, key: K, module: V) -> Rc<V> {
self.modules.borrow_mut().push(Rc::new(module));
let idx = self.modules.borrow().len() - 1;
self.id_map.borrow_mut().insert(key, idx);
self.modules
.borrow()
.last()
.expect("ModuleCache: last() after push() impossible failure")
.clone()
}
pub fn get(&self, key: &K) -> Option<Rc<V>> {
self.id_map
.borrow()
.get(&key)
.and_then(|idx| self.modules.borrow().get(*idx).cloned())
}
}
pub type ModuleCache = ModuleCacheImpl<ModuleId, CompiledModule>;
| 27.272727 | 77 | 0.5725 |
3a096e9c6a8b86c5d116f84dbabc956b0ca820c5 | 2,769 | //! This crate provides Serde's two derive macros.
//!
//! ```edition2018
//! # use serde_derive::{Serialize, Deserialize};
//! #
//! #[derive(Serialize, Deserialize)]
//! # struct S;
//! #
//! # fn main() {}
//! ```
//!
//! Please refer to [https://serde.rs/derive.html] for how to set this up.
//!
//! [https://serde.rs/derive.html]: https://serde.rs/derive.html
#![doc(html_root_url = "https://docs.rs/serde_derive/1.0.123")]
#![allow(unknown_lints, bare_trait_objects)]
#![deny(clippy::all, clippy::pedantic)]
// Ignored clippy lints
#![allow(
clippy::cognitive_complexity,
clippy::enum_variant_names,
// clippy bug: https://github.com/rust-lang/rust-clippy/issues/6797
clippy::manual_map,
clippy::match_like_matches_macro,
clippy::needless_pass_by_value,
clippy::too_many_arguments,
clippy::trivially_copy_pass_by_ref,
clippy::used_underscore_binding,
clippy::wildcard_in_or_patterns,
// clippy bug: https://github.com/rust-lang/rust-clippy/issues/5704
clippy::unnested_or_patterns,
)]
// Ignored clippy_pedantic lints
#![allow(
clippy::cast_possible_truncation,
clippy::checked_conversions,
clippy::doc_markdown,
clippy::enum_glob_use,
clippy::filter_map,
clippy::indexing_slicing,
clippy::items_after_statements,
clippy::let_underscore_drop,
clippy::map_err_ignore,
clippy::match_same_arms,
clippy::module_name_repetitions,
clippy::must_use_candidate,
clippy::option_if_let_else,
clippy::similar_names,
clippy::single_match_else,
clippy::struct_excessive_bools,
clippy::too_many_lines,
clippy::unseparated_literal_suffix,
clippy::unused_self,
clippy::use_self,
clippy::wildcard_imports
)]
#[macro_use]
extern crate quote;
#[macro_use]
extern crate syn;
extern crate proc_macro;
extern crate proc_macro2;
mod internals;
use proc_macro::TokenStream;
use syn::DeriveInput;
#[macro_use]
mod bound;
#[macro_use]
mod fragment;
mod de;
mod dummy;
mod pretend;
mod ser;
mod try;
#[proc_macro_derive(Serialize, attributes(serde))]
pub fn derive_serialize(input: TokenStream) -> TokenStream {
let mut input = parse_macro_input!(input as DeriveInput);
ser::expand_derive_serialize(&mut input)
.unwrap_or_else(to_compile_errors)
.into()
}
#[proc_macro_derive(Deserialize, attributes(serde))]
pub fn derive_deserialize(input: TokenStream) -> TokenStream {
let mut input = parse_macro_input!(input as DeriveInput);
de::expand_derive_deserialize(&mut input)
.unwrap_or_else(to_compile_errors)
.into()
}
fn to_compile_errors(errors: Vec<syn::Error>) -> proc_macro2::TokenStream {
let compile_errors = errors.iter().map(syn::Error::to_compile_error);
quote!(#(#compile_errors)*)
}
| 26.883495 | 75 | 0.716143 |
f86ccc66589a2242a6d7829cecf0a6216e6473e7 | 5,604 | // Copyright 2017 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Generate X509 certificate and associated RSA public/private keypair
use openssl::x509::{X509, X509Name};
use openssl::rsa::Rsa;
use openssl::pkey::PKey;
use openssl::hash::MessageDigest;
use openssl::asn1::Asn1Time;
use openssl::bn;
use openssl::conf::{Conf, ConfMethod};
use openssl::x509::extension;
use errors::*;
pub struct Config {
rsa_bits: u32,
expire_in_days: u32,
}
impl Config {
pub fn new(rsa_bits: u32, expire_in_days: u32) -> Self {
Config {
rsa_bits: rsa_bits,
expire_in_days: expire_in_days,
}
}
}
pub fn generate_cert(config: &Config) -> Result<(X509, PKey)> {
// generate an rsa public/private keypair
let rsa = Rsa::generate(config.rsa_bits)
.chain_err(|| "failed to generate rsa keypair")?;
// put it into the pkey struct
let pkey = PKey::from_rsa(rsa)
.chain_err(|| "failed to create pkey struct from rsa keypair")?;
// make a new x509 certificate with the pkey we generated
let mut x509builder = X509::builder()
.chain_err(|| "failed to make x509 builder")?;
x509builder.set_version(2)
.chain_err(|| "failed to set x509 version")?;
// set the serial number to some big random positive integer
let mut serial = bn::BigNum::new()
.chain_err(|| "failed to make new bignum")?;
serial.rand(32, bn::MSB_ONE, false)
.chain_err(|| "failed to generate random bignum")?;
let serial = serial.to_asn1_integer()
.chain_err(|| "failed to get asn1 integer from bignum")?;
x509builder.set_serial_number(&serial)
.chain_err(|| "failed to set x509 serial number")?;
// call fails without expiration dates
// I guess they are important anyway, but still
x509builder.set_not_before(&Asn1Time::days_from_now(0).unwrap())
.chain_err(|| "failed to set x509 start date")?;
x509builder.set_not_after(&Asn1Time::days_from_now(config.expire_in_days).unwrap())
.chain_err(|| "failed to set x509 expiration date")?;
// add the issuer and subject name
// it's set to "/CN=LinuxTransport"
// if we want we can make that configurable later
let mut x509namebuilder = X509Name::builder()
.chain_err(|| "failed to get x509name builder")?;
x509namebuilder.append_entry_by_text("CN", "LinuxTransport")
.chain_err(|| "failed to append /CN=LinuxTransport to x509name builder")?;
let x509name = x509namebuilder.build();
x509builder.set_issuer_name(&x509name)
.chain_err(|| "failed to set x509 issuer name")?;
x509builder.set_subject_name(&x509name)
.chain_err(|| "failed to set x509 subject name")?;
// set the public key
x509builder.set_pubkey(&pkey)
.chain_err(|| "failed to set x509 pubkey")?;
// it also needs several extensions
// in the openssl configuration file, these are set when generating certs
// basicConstraints=CA:true
// subjectKeyIdentifier=hash
// authorityKeyIdentifier=keyid:always,issuer
// that means these extensions get added to certs generated using the
// command line tool automatically. but since we are constructing it, we
// need to add them manually.
// we need to do them one at a time, and they need to be in this order
let conf = Conf::new(ConfMethod::default())
.chain_err(|| "failed to make new conf struct")?;
// it seems like everything depends on the basic constraints, so let's do
// that first.
let bc = extension::BasicConstraints::new()
.ca()
.build()
.chain_err(|| "failed to build BasicConstraints extension")?;
x509builder.append_extension(bc)
.chain_err(|| "failed to append BasicConstraints extension")?;
// the akid depends on the skid. I guess it copies the skid when the cert is
// self-signed or something, I'm not really sure.
let skid = {
// we need to wrap these in a block because the builder gets borrowed away
// from us
let ext_con = x509builder.x509v3_context(None, Some(&conf));
extension::SubjectKeyIdentifier::new()
.build(&ext_con)
.chain_err(|| "failed to build SubjectKeyIdentifier extention")?
};
x509builder.append_extension(skid)
.chain_err(|| "failed to append SubjectKeyIdentifier extention")?;
// now that the skid is added we can add the akid
let akid = {
let ext_con = x509builder.x509v3_context(None, Some(&conf));
extension::AuthorityKeyIdentifier::new()
.keyid(true)
.issuer(false)
.build(&ext_con)
.chain_err(|| "failed to build AuthorityKeyIdentifier extention")?
};
x509builder.append_extension(akid)
.chain_err(|| "failed to append AuthorityKeyIdentifier extention")?;
// self-sign the certificate
x509builder.sign(&pkey, MessageDigest::sha256())
.chain_err(|| "failed to self-sign x509 cert")?;
let x509 = x509builder.build();
Ok((x509, pkey))
}
| 39.188811 | 87 | 0.668451 |
d7f68e5f0f46aa79ff20fc92c7d6b611d32ee209 | 4,258 | // Copyright 2018-2021 Parity Technologies (UK) Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::GenerateCode;
use derive_more::From;
use proc_macro2::TokenStream as TokenStream2;
use quote::{
quote,
quote_spanned,
};
use syn::spanned::Spanned as _;
/// Generator to create the ink! storage struct and important trait implementations.
#[derive(From)]
pub struct Storage<'a> {
contract: &'a ir::Contract,
}
impl_as_ref_for_generator!(Storage);
impl GenerateCode for Storage<'_> {
fn generate_code(&self) -> TokenStream2 {
let storage_span = self.contract.module().storage().span();
let access_env_impls = self.generate_access_env_trait_impls();
let storage_struct = self.generate_storage_struct();
let use_emit_event =
self.contract.module().events().next().is_some().then(|| {
// Required to allow for `self.env().emit_event(..)` in messages and constructors.
quote! { use ::ink_lang::codegen::EmitEvent as _; }
});
quote_spanned!(storage_span =>
#storage_struct
#access_env_impls
const _: () = {
// Used to make `self.env()` and `Self::env()` available in message code.
#[allow(unused_imports)]
use ::ink_lang::codegen::{
Env as _,
StaticEnv as _,
};
#use_emit_event
};
)
}
}
impl Storage<'_> {
fn generate_access_env_trait_impls(&self) -> TokenStream2 {
let storage_ident = &self.contract.module().storage().ident();
quote! {
const _: () = {
impl<'a> ::ink_lang::codegen::Env for &'a #storage_ident {
type EnvAccess = ::ink_lang::EnvAccess<
'a, <#storage_ident as ::ink_lang::reflect::ContractEnv>::Env>;
fn env(self) -> Self::EnvAccess {
<<Self as ::ink_lang::codegen::Env>::EnvAccess
as ::core::default::Default>::default()
}
}
impl<'a> ::ink_lang::codegen::StaticEnv for #storage_ident {
type EnvAccess = ::ink_lang::EnvAccess<
'static, <#storage_ident as ::ink_lang::reflect::ContractEnv>::Env>;
fn env() -> Self::EnvAccess {
<<Self as ::ink_lang::codegen::StaticEnv>::EnvAccess
as ::core::default::Default>::default()
}
}
};
}
}
/// Generates the storage struct definition.
fn generate_storage_struct(&self) -> TokenStream2 {
let storage = self.contract.module().storage();
let span = storage.span();
let ident = storage.ident();
let attrs = storage.attrs();
let fields = storage.fields();
quote_spanned!( span =>
#(#attrs)*
#[cfg_attr(
feature = "std",
derive(::ink_storage::traits::StorageLayout)
)]
#[derive(::ink_storage::traits::SpreadLayout)]
#[cfg_attr(test, derive(::core::fmt::Debug))]
pub struct #ident {
#( #fields ),*
}
const _: () = {
impl ::ink_lang::reflect::ContractName for #ident {
const NAME: &'static str = ::core::stringify!(#ident);
}
impl ::ink_lang::codegen::ContractRootKey for #ident {
const ROOT_KEY: ::ink_primitives::Key = ::ink_primitives::Key::new([0x00; 32]);
}
};
)
}
}
| 36.393162 | 99 | 0.543213 |
e67580142ffef3d22248bc4eb525fd5c332cd264 | 139 | use crate::ir::*;
#[derive(Debug, Clone)]
pub struct IrDartAnnotation {
pub content: String,
pub library: Option<IrDartImport>,
}
| 17.375 | 38 | 0.683453 |
7102b1d24a271373c3347fc6801c9485255f7e4b | 1,935 | // Copyright 2018 by Nedim Sabic (RabbitStack)
// http://rabbitstack.github.io
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
use pnetlink::packet::netlink::{NetlinkConnection, NetlinkRequestBuilder, NetlinkReader};
use pnetlink::packet::route::link::{IfInfoPacketBuilder, IFLA_IFNAME, IFLA_LINKINFO, IFLA_INFO_KIND, RTM_NEWLINK};
use pnetlink::packet::route::RtAttrPacket;
use pnetlink::packet::route::route::WithPayload;
use pnetlink::packet::netlink::NetlinkMsgFlags;
use pnet_macros_support::packet::Packet;
use std::io;
use std::io::Write;
pub trait Bridge {
/// Creates a new bridge kernel device.
fn new_bridge(&mut self, name: &str) -> io::Result<()>;
}
impl Bridge for NetlinkConnection {
/// Creates a new bridge kernel device.
fn new_bridge(&mut self, name: &str) -> io::Result<()> {
let ifi = {
IfInfoPacketBuilder::new().
append(RtAttrPacket::create_with_payload(IFLA_IFNAME, name)).
append(RtAttrPacket::create_with_payload(
IFLA_LINKINFO, RtAttrPacket::create_with_payload(IFLA_INFO_KIND, "bridge"))).build()
};
let req = NetlinkRequestBuilder::new(RTM_NEWLINK, NetlinkMsgFlags::NLM_F_CREATE | NetlinkMsgFlags::NLM_F_EXCL | NetlinkMsgFlags::NLM_F_ACK)
.append(ifi).build();
self.write(req.packet())?;
let reader = NetlinkReader::new(self);
reader.read_to_end()
}
}
| 41.170213 | 147 | 0.703359 |
fe6a6ec6d138c2b4588dea84f8f36f0a58f68202 | 1,629 |
pub struct IconRiceBowl {
props: crate::Props,
}
impl yew::Component for IconRiceBowl {
type Properties = crate::Props;
type Message = ();
fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self
{
Self { props }
}
fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender
{
true
}
fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender
{
false
}
fn view(&self) -> yew::prelude::Html
{
yew::prelude::html! {
<svg
class=self.props.class.unwrap_or("")
width=self.props.size.unwrap_or(24).to_string()
height=self.props.size.unwrap_or(24).to_string()
viewBox="0 0 24 24"
fill=self.props.fill.unwrap_or("none")
stroke=self.props.color.unwrap_or("currentColor")
stroke-width=self.props.stroke_width.unwrap_or(2).to_string()
stroke-linecap=self.props.stroke_linecap.unwrap_or("round")
stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round")
>
<svg xmlns="http://www.w3.org/2000/svg" enable-background="new 0 0 24 24" height="24" viewBox="0 0 24 24" width="24"><rect fill="none" height="24" width="24"/><path d="M22,12L22,12c0-5.52-4.48-10-10-10S2,6.48,2,12c0,3.69,2.47,6.86,6,8.25V22h8v-1.75C19.53,18.86,22,15.69,22,12z M20,12h-4 V5.08C18.39,6.47,20,9.05,20,12z M14,4.26V12h-4V4.26C10.64,4.1,11.31,4,12,4S13.36,4.1,14,4.26z M4,12c0-2.95,1.61-5.53,4-6.92V12 H4z"/></svg>
</svg>
}
}
}
| 35.413043 | 438 | 0.585635 |
e52ef45c2ecf953e5a9ee6fe1b723fd3b250e29b | 1,006 | // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-cloudabi no std::fs support
#![feature(try_trait)]
use std::ops::Try;
fn main() {
// error for a `Try` type on a non-`Try` fn
std::fs::File::open("foo")?; //~ ERROR the `?` operator can only
// a non-`Try` type on a non-`Try` fn
()?; //~ ERROR the `?` operator can only
// an unrelated use of `Try`
try_trait_generic::<()>(); //~ ERROR the trait bound
}
fn try_trait_generic<T: Try>() -> T {
// and a non-`Try` object on a `Try` fn.
()?; //~ ERROR the `?` operator can only
loop {}
}
| 27.944444 | 68 | 0.646123 |
e4dcbfef8c9e8109e28a17472807a7d14228589c | 10,220 |
use clap::*;
pub fn build_cli() -> App<'static, 'static> {
let format = Arg::with_name("format")
.help("Output format")
.short("f")
.long("format")
.takes_value(true);
app_from_crate!()
.arg(Arg::with_name("database-name")
.help("Database name")
.short("n")
.long("name")
.takes_value(true))
.arg(Arg::with_name("database-path")
.help("Path to *.sqlite")
.short("p")
.long("path")
.takes_value(true))
.arg(Arg::with_name("alias-file")
.help("Path to *.yaml")
.short("a")
.long("alias")
.takes_value(true))
.arg(Arg::with_name("max-retry")
.help("Maximum retry")
.long("max-retry")
.takes_value(true))
.subcommand(SubCommand::with_name("alias")
.alias("a")
.about("Define expression alias")
.arg(Arg::with_name("local")
.help("Database local alias")
.short("l")
.long("local")
.takes_value(false))
.arg(Arg::with_name("recursive")
.help("Recursive")
.short("r")
.long("recursive")
.takes_value(false))
.arg(Arg::with_name("name"))
.arg(Arg::with_name("expression")
.min_values(0)))
.subcommand(SubCommand::with_name("completions")
.about("Generates completion scripts for your shell")
.arg(Arg::with_name("shell")
.required(true)
.possible_values(&["bash", "fish", "zsh"])
.help("The shell to generate the script for")))
.subcommand(SubCommand::with_name("compute")
.about("Compute hashes")
.arg(format.clone())
.arg(Arg::with_name("where")
.help("SQL Where clause")
.required(true)
.min_values(1))
.arg(Arg::with_name("chunk")
.help("Chunk size")
.long("chunk")
.takes_value(true)))
.subcommand(SubCommand::with_name("expand")
.about("Show alias expanded expression")
.arg(Arg::with_name("full")
.help("Full")
.short("f")
.long("full")
.takes_value(false))
.arg(Arg::with_name("expression")
.required(true)))
.subcommand(SubCommand::with_name("get")
.about("Get image information")
.arg(format.clone())
.arg(Arg::with_name("path")
.required(true)))
.subcommand(SubCommand::with_name("history")
.about("Search expression history"))
.subcommand(
load_args(
SubCommand::with_name("load")
.alias("l")
.about("Load directory or file")
.arg(Arg::with_name("update")
.help("Update exising files")
.short("u")
.long("update")
.takes_value(false))
.arg(Arg::with_name("path")
.required(true)
.min_values(1))))
.subcommand(
load_args(
SubCommand::with_name("load-list")
.alias("l")
.about("Load from list file")
.arg(Arg::with_name("update")
.help("Update exising files")
.short("u")
.long("update")
.takes_value(false))
.arg(Arg::with_name("list-file")
.required(true)
.min_values(0))))
.subcommand(SubCommand::with_name("meta")
.about("Compute metaformation")
.arg(Arg::with_name("path")
.required(true))
.arg(format.clone()))
.subcommand(SubCommand::with_name("path")
.about("Show database path"))
.subcommand(SubCommand::with_name("reset")
.about("Clear all data"))
.subcommand(SubCommand::with_name("search")
.alias("s")
.alias("select")
.about("Search images")
.arg(format)
.arg(Arg::with_name("vacuum")
.help("Remove entries that do not exist")
.short("v")
.long("vacuum")
.takes_value(false))
.arg(Arg::with_name("where")
.help("SQL Where clause")
.required(true)
.min_values(1)))
.subcommand(SubCommand::with_name("server")
.about("Web App")
.arg(Arg::with_name("download-to")
.help("Download to this directory")
.short("d")
.long("download-to")
.takes_value(true))
.arg(Arg::with_name("port")
.help("Server port")
.short("p")
.long("port")
.takes_value(true))
.arg(Arg::with_name("root")
.help("Static file root")
.short("r")
.long("root")
.takes_value(true)))
.subcommand(SubCommand::with_name("tag")
.alias("t")
.about("Manage tags")
.subcommand(SubCommand::with_name("add")
.alias("a")
.about("Add tags")
.arg(Arg::with_name("path")
.required(true))
.arg(Arg::with_name("source")
.required(true))
.arg(Arg::with_name("tag")
.required(true)
.min_values(1)))
.subcommand(SubCommand::with_name("clear")
.alias("c")
.about("Clear tags")
.arg(Arg::with_name("path")
.required(true))
.arg(Arg::with_name("source")
.required(true)))
.subcommand(SubCommand::with_name("remove")
.alias("r")
.about("Remove tags")
.arg(Arg::with_name("path")
.required(true))
.arg(Arg::with_name("source")
.required(true))
.arg(Arg::with_name("tag")
.required(true)
.min_values(1)))
.subcommand(SubCommand::with_name("set")
.alias("s")
.about("Set tags")
.arg(Arg::with_name("path")
.required(true))
.arg(Arg::with_name("source")
.required(true))
.arg(Arg::with_name("tag")
.min_values(0)))
.subcommand(SubCommand::with_name("show")
.alias("S")
.about("Show tags")
.arg(Arg::with_name("path")
.required(false))))
.subcommand(SubCommand::with_name("unalias")
.alias("s")
.about("Unalias")
.arg(Arg::with_name("local")
.help("Database local alias")
.short("l")
.long("local")
.takes_value(false))
.arg(Arg::with_name("name")
.required(true)))
.subcommand(SubCommand::with_name("vacuum")
.about("Remove deleted files")
.arg(Arg::with_name("prefix")
.help("Path prefix")
.short("p")
.long("prefix")
.takes_value(true)))
}
fn load_args<'a, 'b>(app: App<'a, 'b>) -> App<'a, 'b> {
app.arg(Arg::with_name("tag-script")
.help("Tag generator script")
.short("t")
.long("tag-script")
.takes_value(true))
.arg(Arg::with_name("tag-source")
.help("Tag source")
.long("tag-source")
.takes_value(true))
.arg(Arg::with_name("check-extension")
.help("Check file extension before load")
.short("c")
.long("check-extension"))
.arg(Arg::with_name("dhash")
.help("Compute dhash")
.short("d")
.long("dhash"))
.arg(Arg::with_name("dry-run")
.help("Dry run")
.long("dry-run")
.takes_value(false))
.arg(Arg::with_name("skip-errors")
.help("Skip errors")
.short("s")
.long("skip-errors")
.takes_value(false))
}
| 42.761506 | 73 | 0.378474 |
f85d7e10ecf6d381369efaed58a31c5b5654da5b | 1,827 | #[cfg(not(windows))]
pub(crate) mod syscalls;
#[cfg(not(target_os = "redox"))]
mod dir;
#[cfg(not(any(
target_os = "dragonfly",
target_os = "illumos",
target_os = "ios",
target_os = "freebsd",
target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
target_os = "redox",
target_os = "wasi"
)))]
mod makedev;
mod types;
#[cfg(not(target_os = "redox"))]
pub use dir::{Dir, DirEntry};
#[cfg(not(any(
target_os = "dragonfly",
target_os = "illumos",
target_os = "ios",
target_os = "freebsd",
target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
target_os = "redox",
target_os = "wasi"
)))]
pub use makedev::{major, makedev, minor};
#[cfg(not(any(
target_os = "dragonfly",
target_os = "illumos",
target_os = "ios",
target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
target_os = "redox"
)))]
pub use types::Advice;
#[cfg(not(any(
target_os = "illumos",
target_os = "netbsd",
target_os = "openbsd",
target_os = "redox"
)))]
pub use types::FallocateFlags;
#[cfg(not(target_os = "wasi"))]
pub use types::FlockOperation;
#[cfg(not(any(
target_os = "illumos",
target_os = "netbsd",
target_os = "redox",
target_os = "wasi"
)))]
pub use types::StatFs;
#[cfg(any(target_os = "ios", target_os = "macos"))]
pub use types::{copyfile_state_t, CloneFlags, CopyfileFlags};
pub use types::{Access, Dev, FdFlags, FileType, Mode, OFlags, RawMode, Stat};
#[cfg(not(target_os = "redox"))]
pub use types::{AtFlags, UTIME_NOW, UTIME_OMIT};
#[cfg(any(target_os = "android", target_os = "linux"))]
pub use types::{FsWord, MemfdFlags, RenameFlags, ResolveFlags, NFS_SUPER_MAGIC, PROC_SUPER_MAGIC};
#[cfg(all(target_os = "linux", target_env = "gnu"))]
pub use types::{Statx, StatxFlags};
| 26.478261 | 98 | 0.633279 |
ab5bcc3b8b9d35283f61544e988ef2981c9f2c5d | 41,196 | //! A module containing code related to SpanTree generation.
use crate::prelude::*;
use enso_text::unit::*;
use crate::generate::context::CalledMethodInfo;
use crate::node;
use crate::node::InsertionPointType;
use crate::node::Payload;
use crate::ArgumentInfo;
use crate::Node;
use crate::SpanTree;
use ast::assoc::Assoc;
use ast::crumbs::Located;
use ast::opr::GeneralizedInfix;
use ast::Ast;
use ast::HasRepr;
use ast::MacroAmbiguousSegment;
use ast::MacroMatchSegment;
// ==============
// === Export ===
// ==============
pub mod context;
pub mod macros;
pub use context::Context;
// =============
// === Trait ===
// =============
/// A trait for all types from which we can generate referred SpanTree. Meant to be implemented for
/// all AST-like structures.
pub trait SpanTreeGenerator<T> {
/// Generate node with it's whole subtree.
fn generate_node(
&self,
kind: impl Into<node::Kind>,
context: &impl Context,
) -> FallibleResult<Node<T>>;
/// Generate tree for this AST treated as root for the whole expression.
fn generate_tree(&self, context: &impl Context) -> FallibleResult<SpanTree<T>> {
let root = self.generate_node(node::Kind::Root, context)?;
Ok(SpanTree { root })
}
}
// ==============
// === String ===
// ==============
impl<T: Payload> SpanTreeGenerator<T> for &str {
fn generate_node(
&self,
kind: impl Into<node::Kind>,
_: &impl Context,
) -> FallibleResult<Node<T>> {
Ok(Node::<T>::new().with_kind(kind).with_size(self.chars().count().into()))
}
}
impl<T: Payload> SpanTreeGenerator<T> for String {
fn generate_node(
&self,
kind: impl Into<node::Kind>,
context: &impl Context,
) -> FallibleResult<Node<T>> {
self.as_str().generate_node(kind, context)
}
}
// =================
// === Utilities ===
// =================
// === Child Generator ===
/// An utility to generate children with increasing offsets.
#[derive(Debug, Default)]
struct ChildGenerator<T> {
current_offset: Bytes,
children: Vec<node::Child<T>>,
}
impl<T: Payload> ChildGenerator<T> {
/// Add spacing to current generator state. It will be taken into account for the next generated
/// children's offsets
fn spacing(&mut self, size: usize) {
self.current_offset += Bytes::from(size);
}
fn generate_ast_node(
&mut self,
child_ast: Located<Ast>,
kind: impl Into<node::Kind>,
context: &impl Context,
) -> FallibleResult<&mut node::Child<T>> {
let kind = kind.into();
let node = child_ast.item.generate_node(kind, context)?;
Ok(self.add_node(child_ast.crumbs, node))
}
fn add_node(&mut self, ast_crumbs: ast::Crumbs, node: Node<T>) -> &mut node::Child<T> {
let offset = self.current_offset;
let child = node::Child { node, offset, ast_crumbs };
self.current_offset += child.node.size;
self.children.push(child);
self.children.last_mut().unwrap()
}
fn generate_empty_node(&mut self, insert_type: InsertionPointType) -> &mut node::Child<T> {
let child = node::Child {
node: Node::<T>::new().with_kind(insert_type),
offset: self.current_offset,
ast_crumbs: vec![],
};
self.children.push(child);
self.children.last_mut().unwrap()
}
fn reverse_children(&mut self) {
self.children.reverse();
for child in &mut self.children {
child.offset = self.current_offset - child.offset - child.node.size;
}
}
}
/// =============================
/// === Trait Implementations ===
/// =============================
/// Helper structure constructed from Ast that consists base of prefix application.
///
/// It recognizes whether the base uses a method-style notation (`this.method` instead of
/// `method this`) and what is the invoked function name.
#[derive(Clone, Debug)]
struct ApplicationBase<'a> {
/// The name of invoked function.
function_name: Option<&'a str>,
/// True when Ast uses method notation to pass this as an invocation target.
has_target: bool,
}
impl<'a> ApplicationBase<'a> {
fn new(ast: &'a Ast) -> Self {
if let Some(chain) = ast::opr::as_access_chain(ast) {
let get_name = || -> Option<&'a str> {
let crumbs = chain.enumerate_operands().last()??.crumbs;
let ast = ast.get_traversing(&crumbs).ok()?;
ast::identifier::name(ast)
};
ApplicationBase { function_name: get_name(), has_target: true }
} else {
ApplicationBase { function_name: ast::identifier::name(ast), has_target: false }
}
}
fn prefix_params(
&self,
invocation_info: Option<CalledMethodInfo>,
) -> impl ExactSizeIterator<Item = ArgumentInfo> {
let mut ret = invocation_info.map(|info| info.parameters).unwrap_or_default().into_iter();
if self.has_target {
ret.next();
}
ret
}
}
// === AST ===
impl<T: Payload> SpanTreeGenerator<T> for Ast {
fn generate_node(
&self,
kind: impl Into<node::Kind>,
context: &impl Context,
) -> FallibleResult<Node<T>> {
generate_node_for_ast(self, kind.into(), context)
}
}
fn generate_node_for_ast<T: Payload>(
ast: &Ast,
kind: node::Kind,
context: &impl Context,
) -> FallibleResult<Node<T>> {
// Code like `ast.func` or `a+b+c`.
if let Some(infix) = GeneralizedInfix::try_new(ast) {
let chain = infix.flatten();
let app_base = ApplicationBase::new(ast);
let invocation = || -> Option<CalledMethodInfo> {
context.call_info(ast.id?, Some(app_base.function_name?))
}();
// All prefix params are missing arguments, since there is no prefix application.
let missing_args = app_base.prefix_params(invocation);
let arity = missing_args.len();
let base_node_kind = if arity == 0 { kind.clone() } else { node::Kind::Operation };
let node = chain.generate_node(base_node_kind, context)?;
let provided_prefix_arg_count = 0;
Ok(generate_expected_arguments(node, kind, provided_prefix_arg_count, missing_args))
} else {
match ast.shape() {
ast::Shape::Prefix(_) =>
ast::prefix::Chain::from_ast(ast).unwrap().generate_node(kind, context),
// Lambdas should fall in _ case, because we don't want to create subports for
// them
ast::Shape::Match(_) if ast::macros::as_lambda_match(ast).is_none() =>
ast::known::Match::try_new(ast.clone_ref()).unwrap().generate_node(kind, context),
ast::Shape::Ambiguous(_) => ast::known::Ambiguous::try_new(ast.clone_ref())
.unwrap()
.generate_node(kind, context),
_ => {
let size = ast.len();
let ast_id = ast.id;
let children = default();
let name = ast::identifier::name(ast);
let payload = default();
if let Some(info) = ast.id.and_then(|id| context.call_info(id, name)) {
let node = {
let kind = node::Kind::Operation;
Node { kind, size, children, ast_id, payload }
};
// Note that in this place it is impossible that Ast is in form of
// `this.method` -- it is covered by the former if arm. As such, we don't
// need to use `ApplicationBase` here as we do elsewhere.
let provided_prefix_arg_count = 0;
let params = info.parameters.into_iter();
Ok(generate_expected_arguments(node, kind, provided_prefix_arg_count, params))
} else {
Ok(Node { kind, size, children, ast_id, payload })
}
}
}
}
}
// === Operators (Sections and Infixes) ===
impl<T: Payload> SpanTreeGenerator<T> for ast::opr::Chain {
fn generate_node(
&self,
kind: impl Into<node::Kind>,
context: &impl Context,
) -> FallibleResult<Node<T>> {
generate_node_for_opr_chain(self, kind.into(), context)
}
}
fn generate_node_for_opr_chain<T: Payload>(
this: &ast::opr::Chain,
kind: node::Kind,
context: &impl Context,
) -> FallibleResult<Node<T>> {
// Removing operands is possible only when chain has at least 3 of them
// (target and two arguments).
let removable = this.args.len() >= 2;
let node_and_offset: FallibleResult<(Node<T>, usize)> = match &this.target {
Some(target) => {
let kind = node::Kind::this().with_removable(removable);
let node = target.arg.generate_node(kind, context)?;
Ok((node, target.offset))
}
None => Ok((Node::<T>::new().with_kind(InsertionPointType::BeforeTarget), 0)),
};
// In this fold we pass last generated node and offset after it, wrapped in Result.
let (node, _) = this.args.iter().enumerate().fold(node_and_offset, |result, (i, elem)| {
// Here we generate children as the operator would be left-associative. Then, if it is
// actually right associative, we just reverse the generated children and their offsets.
let (node, off) = result?;
let is_first = i == 0;
let is_last = i + 1 == this.args.len();
let has_left = !node.is_insertion_point();
// Target is a first element of chain in this context.
let has_target = is_first && has_left;
let opr_crumbs = elem.crumb_to_operator(has_left);
let opr_ast = Located::new(opr_crumbs, elem.operator.ast().clone_ref());
let left_crumbs = if has_left { vec![elem.crumb_to_previous()] } else { vec![] };
let mut gen = ChildGenerator::default();
if has_target {
gen.generate_empty_node(InsertionPointType::BeforeTarget);
}
gen.add_node(left_crumbs, node);
if has_target {
gen.generate_empty_node(InsertionPointType::AfterTarget);
}
gen.spacing(off);
gen.generate_ast_node(opr_ast, node::Kind::Operation, context)?;
if let Some(operand) = &elem.operand {
let arg_crumbs = elem.crumb_to_operand(has_left);
let arg_ast = Located::new(arg_crumbs, operand.arg.clone_ref());
gen.spacing(operand.offset);
gen.generate_ast_node(
arg_ast,
node::Kind::argument().with_removable(removable),
context,
)?;
}
gen.generate_empty_node(InsertionPointType::Append);
if ast::opr::assoc(&this.operator) == Assoc::Right {
gen.reverse_children();
}
Ok((
Node {
kind: if is_last { kind.clone() } else { node::Kind::Chained },
size: gen.current_offset,
children: gen.children,
ast_id: elem.infix_id,
payload: default(),
},
elem.offset,
))
})?;
Ok(node)
}
// === Application ===
impl<T: Payload> SpanTreeGenerator<T> for ast::prefix::Chain {
fn generate_node(
&self,
kind: impl Into<node::Kind>,
context: &impl Context,
) -> FallibleResult<Node<T>> {
generate_node_for_prefix_chain(self, kind.into(), context)
}
}
fn generate_node_for_prefix_chain<T: Payload>(
this: &ast::prefix::Chain,
kind: node::Kind,
context: &impl Context,
) -> FallibleResult<Node<T>> {
let base = ApplicationBase::new(&this.func);
let invocation_info = this.id().and_then(|id| context.call_info(id, base.function_name));
let known_args = invocation_info.is_some();
let mut known_params = base.prefix_params(invocation_info);
let prefix_arity = this.args.len().max(known_params.len());
use ast::crumbs::PrefixCrumb::*;
// Removing arguments is possible if there at least two of them
let removable = this.args.len() >= 2;
let node = this.func.generate_node(node::Kind::Operation, context);
let ret = this.args.iter().enumerate().fold(node, |node, (i, arg)| {
let node = node?;
let is_first = i == 0;
let is_last = i + 1 == prefix_arity;
let arg_kind = if is_first && !base.has_target {
node::Kind::from(node::Kind::this().with_removable(removable))
} else {
node::Kind::from(node::Kind::argument().with_removable(removable))
};
let mut gen = ChildGenerator::default();
gen.add_node(vec![Func.into()], node);
gen.spacing(arg.sast.off);
if !known_args && matches!(arg_kind, node::Kind::This { .. }) {
gen.generate_empty_node(InsertionPointType::BeforeTarget);
}
let arg_ast = arg.sast.wrapped.clone_ref();
let arg_child: &mut node::Child<T> =
gen.generate_ast_node(Located::new(Arg, arg_ast), arg_kind, context)?;
if let Some(info) = known_params.next() {
arg_child.node.set_argument_info(info)
}
if !known_args {
gen.generate_empty_node(InsertionPointType::Append);
}
Ok(Node {
kind: if is_last { kind.clone() } else { node::Kind::Chained },
size: gen.current_offset,
children: gen.children,
ast_id: arg.prefix_id,
payload: default(),
})
})?;
Ok(generate_expected_arguments(ret, kind, this.args.len(), known_params))
}
// === Match ===
impl<T: Payload> SpanTreeGenerator<T> for ast::known::Match {
fn generate_node(
&self,
kind: impl Into<node::Kind>,
context: &impl Context,
) -> FallibleResult<Node<T>> {
generate_node_for_known_match(self, kind.into(), context)
}
}
fn generate_node_for_known_match<T: Payload>(
this: &ast::known::Match,
kind: node::Kind,
context: &impl Context,
) -> FallibleResult<Node<T>> {
let removable = false;
let children_kind = node::Kind::argument().with_removable(removable);
let mut gen = ChildGenerator::default();
if let Some(pat) = &this.pfx {
for macros::AstInPattern { ast, crumbs } in macros::all_ast_nodes_in_pattern(pat) {
let ast_crumb = ast::crumbs::MatchCrumb::Pfx { val: crumbs };
let located_ast = Located::new(ast_crumb, ast.wrapped);
gen.generate_ast_node(located_ast, children_kind.clone(), context)?;
gen.spacing(ast.off);
}
}
let first_segment_index = 0;
generate_children_from_segment(&mut gen, first_segment_index, &this.segs.head, context)?;
for (index, segment) in this.segs.tail.iter().enumerate() {
gen.spacing(segment.off);
generate_children_from_segment(&mut gen, index + 1, &segment.wrapped, context)?;
}
Ok(Node {
kind,
size: gen.current_offset,
children: gen.children,
ast_id: this.id(),
payload: default(),
})
}
fn generate_children_from_segment<T: Payload>(
gen: &mut ChildGenerator<T>,
index: usize,
segment: &MacroMatchSegment<Ast>,
context: &impl Context,
) -> FallibleResult {
// generate child for head
let ast = segment.head.clone_ref();
let segment_crumb = ast::crumbs::SegmentMatchCrumb::Head;
let ast_crumb = ast::crumbs::MatchCrumb::Segs { val: segment_crumb, index };
let located_ast = Located::new(ast_crumb, ast);
gen.generate_ast_node(located_ast, node::Kind::Token, context)?;
for macros::AstInPattern { ast, crumbs } in macros::all_ast_nodes_in_pattern(&segment.body) {
let child_kind = match crumbs.last() {
Some(ast::crumbs::PatternMatchCrumb::Tok) => node::Kind::Token,
_ => node::Kind::argument().into(),
};
gen.spacing(ast.off);
let segment_crumb = ast::crumbs::SegmentMatchCrumb::Body { val: crumbs };
let ast_crumb = ast::crumbs::MatchCrumb::Segs { val: segment_crumb, index };
let located_ast = Located::new(ast_crumb, ast.wrapped);
gen.generate_ast_node(located_ast, child_kind, context)?;
}
Ok(())
}
// === Ambiguous ==
impl<T: Payload> SpanTreeGenerator<T> for ast::known::Ambiguous {
fn generate_node(
&self,
kind: impl Into<node::Kind>,
context: &impl Context,
) -> FallibleResult<Node<T>> {
generate_node_for_known_ambiguous(self, kind.into(), context)
}
}
fn generate_node_for_known_ambiguous<T: Payload>(
this: &ast::known::Ambiguous,
kind: node::Kind,
context: &impl Context,
) -> FallibleResult<Node<T>> {
let mut gen = ChildGenerator::default();
let first_segment_index = 0;
generate_children_from_ambiguous(&mut gen, first_segment_index, &this.segs.head, context)?;
for (index, segment) in this.segs.tail.iter().enumerate() {
gen.spacing(segment.off);
generate_children_from_ambiguous(&mut gen, index + 1, &segment.wrapped, context)?;
}
Ok(Node {
kind,
size: gen.current_offset,
children: gen.children,
ast_id: this.id(),
payload: default(),
})
}
fn generate_children_from_ambiguous<T: Payload>(
gen: &mut ChildGenerator<T>,
index: usize,
segment: &MacroAmbiguousSegment<Ast>,
context: &impl Context,
) -> FallibleResult {
let children_kind = node::Kind::argument();
// generate child for head
let ast = segment.head.clone_ref();
let segment_crumb = ast::crumbs::AmbiguousSegmentCrumb::Head;
let ast_crumb = ast::crumbs::AmbiguousCrumb { field: segment_crumb, index };
let located_ast = Located::new(ast_crumb, ast);
gen.generate_ast_node(located_ast, node::Kind::Token, context)?;
if let Some(sast) = &segment.body {
gen.spacing(sast.off);
let field = ast::crumbs::AmbiguousSegmentCrumb::Body;
let located_ast =
Located::new(ast::crumbs::AmbiguousCrumb { index, field }, sast.clone_ref());
gen.generate_ast_node(located_ast, children_kind, context)?;
}
Ok(())
}
// === Common Utility ==
/// Build a prefix application-like span tree structure where the prefix argument has not been
/// provided but instead its information is known from method's ArgumentInfo.
///
/// `index` is the argument's position in the prefix chain which may be different from parameter
/// index in the method's parameter list.
fn generate_expected_argument<T: Payload>(
node: Node<T>,
kind: node::Kind,
index: usize,
is_last: bool,
argument_info: ArgumentInfo,
) -> Node<T> {
let mut gen = ChildGenerator::default();
gen.add_node(ast::Crumbs::new(), node);
let arg_node = gen.generate_empty_node(InsertionPointType::ExpectedArgument(index));
arg_node.node.set_argument_info(argument_info);
Node {
kind: if is_last { kind } else { node::Kind::Chained },
size: gen.current_offset,
children: gen.children,
ast_id: None,
payload: default(),
}
}
fn generate_expected_arguments<T: Payload>(
node: Node<T>,
kind: node::Kind,
supplied_prefix_arg_count: usize,
expected_args: impl ExactSizeIterator<Item = ArgumentInfo>,
) -> Node<T> {
let arity = supplied_prefix_arg_count + expected_args.len();
(supplied_prefix_arg_count..).zip(expected_args).fold(node, |node, (index, parameter)| {
let is_last = index + 1 == arity;
generate_expected_argument(node, kind.clone(), index, is_last, parameter)
})
}
// ===================
// === MockContext ===
// ===================
use ast::Id;
/// Mock version of `Context`. Useful for debugging and testing.
#[derive(Clone, Debug, Default)]
pub struct MockContext {
map: HashMap<Id, CalledMethodInfo>,
}
impl MockContext {
/// Constructor.
pub fn new_single(id: Id, info: CalledMethodInfo) -> Self {
let mut ret = Self::default();
ret.map.insert(id, info);
ret
}
}
impl Context for MockContext {
fn call_info(&self, id: Id, _name: Option<&str>) -> Option<CalledMethodInfo> {
self.map.get(&id).cloned()
}
}
// ============
// === Test ===
// ============
#[cfg(test)]
mod test {
use super::*;
use crate::builder::TreeBuilder;
use crate::generate::context::CalledMethodInfo;
use crate::node;
use crate::node::InsertionPointType::*;
use crate::node::Payload;
use crate::ArgumentInfo;
use ast::crumbs::AmbiguousCrumb;
use ast::crumbs::AmbiguousSegmentCrumb;
use ast::crumbs::InfixCrumb;
use ast::crumbs::PatternMatchCrumb;
use ast::crumbs::PrefixCrumb;
use ast::crumbs::SectionLeftCrumb;
use ast::crumbs::SectionRightCrumb;
use ast::crumbs::SectionSidesCrumb;
use ast::Crumbs;
use ast::IdMap;
use parser::Parser;
use wasm_bindgen_test::wasm_bindgen_test;
use wasm_bindgen_test::wasm_bindgen_test_configure;
wasm_bindgen_test_configure!(run_in_browser);
/// A helper function which removes information about expression id from thw tree rooted at
/// `node`.
///
/// It is used in tests. Because parser can assign id as he pleases, therefore to keep tests
/// cleaner the expression ids are removed before comparing trees.
fn clear_expression_ids<T>(node: &mut Node<T>) {
node.ast_id = None;
for child in &mut node.children {
clear_expression_ids(&mut child.node);
}
}
/// A helper function which removes parameter information from nodes.
///
/// It is used in tests. Because constructing trees with set parameter infos is troublesome,
/// it is often more convenient to test them separately and then erase infos and test for shape.
fn clear_parameter_infos<T: Payload>(node: &mut Node<T>) {
node.set_argument_info(default());
for child in &mut node.children {
clear_parameter_infos(&mut child.node);
}
}
#[wasm_bindgen_test]
fn generating_span_tree() {
let parser = Parser::new_or_panic();
let mut id_map = IdMap::default();
id_map.generate(0..15);
id_map.generate(0..11);
id_map.generate(12..13);
id_map.generate(14..15);
id_map.generate(4..11);
let ast = parser.parse_line_ast_with_id_map("2 + foo bar - 3", id_map.clone()).unwrap();
let mut tree = ast.generate_tree(&context::Empty).unwrap(): SpanTree;
// Check the expression ids we defined:
for id_map_entry in id_map.vec {
let (span, id) = id_map_entry;
let node = tree.root_ref().find_by_span(&span);
assert!(node.is_some(), "Node with span {} not found", span);
assert_eq!(node.unwrap().node.ast_id, Some(id));
}
// Check the other fields:
clear_expression_ids(&mut tree.root);
let expected = TreeBuilder::new(15)
.add_empty_child(0, BeforeTarget)
.add_child(0, 11, node::Kind::this(), InfixCrumb::LeftOperand)
.add_empty_child(0, BeforeTarget)
.add_leaf(0, 1, node::Kind::this(), InfixCrumb::LeftOperand)
.add_empty_child(1, AfterTarget)
.add_leaf(2, 1, node::Kind::Operation, InfixCrumb::Operator)
.add_child(4, 7, node::Kind::argument(), InfixCrumb::RightOperand)
.add_leaf(0, 3, node::Kind::Operation, PrefixCrumb::Func)
.add_empty_child(4, BeforeTarget)
.add_leaf(4, 3, node::Kind::this(), PrefixCrumb::Arg)
.add_empty_child(7, Append)
.done()
.add_empty_child(11, Append)
.done()
.add_empty_child(11, AfterTarget)
.add_leaf(12, 1, node::Kind::Operation, InfixCrumb::Operator)
.add_leaf(14, 1, node::Kind::argument(), InfixCrumb::RightOperand)
.add_empty_child(15, Append)
.build();
assert_eq!(expected, tree)
}
#[wasm_bindgen_test]
fn generate_span_tree_with_chains() {
let parser = Parser::new_or_panic();
let ast = parser.parse_line_ast("2 + 3 + foo bar baz 13 + 5").unwrap();
let mut tree = ast.generate_tree(&context::Empty).unwrap(): SpanTree;
clear_expression_ids(&mut tree.root);
let expected = TreeBuilder::new(26)
.add_child(0, 22, node::Kind::Chained, InfixCrumb::LeftOperand)
.add_child(0, 5, node::Kind::Chained, InfixCrumb::LeftOperand)
.add_empty_child(0, BeforeTarget)
.add_leaf(0, 1, node::Kind::this().removable(), InfixCrumb::LeftOperand)
.add_empty_child(1, AfterTarget)
.add_leaf(2, 1, node::Kind::Operation, InfixCrumb::Operator)
.add_leaf(4, 1, node::Kind::argument().removable(), InfixCrumb::RightOperand)
.add_empty_child(5, Append)
.done()
.add_leaf(6, 1, node::Kind::Operation, InfixCrumb::Operator)
.add_child(8, 14, node::Kind::argument().removable(), InfixCrumb::RightOperand)
.add_child(0, 11, node::Kind::Chained, PrefixCrumb::Func)
.add_child(0, 7, node::Kind::Chained, PrefixCrumb::Func)
.add_leaf(0, 3, node::Kind::Operation, PrefixCrumb::Func)
.add_empty_child(4, BeforeTarget)
.add_leaf(4, 3, node::Kind::this().removable(), PrefixCrumb::Arg)
.add_empty_child(7, Append)
.done()
.add_leaf(8, 3, node::Kind::argument().removable(), PrefixCrumb::Arg)
.add_empty_child(11, Append)
.done()
.add_leaf(12, 2, node::Kind::argument().removable(), PrefixCrumb::Arg)
.add_empty_child(14, Append)
.done()
.add_empty_child(22, Append)
.done()
.add_leaf(23, 1, node::Kind::Operation, InfixCrumb::Operator)
.add_leaf(25, 1, node::Kind::argument().removable(), InfixCrumb::RightOperand)
.add_empty_child(26, Append)
.build();
assert_eq!(expected, tree);
}
#[wasm_bindgen_test]
fn generating_span_tree_from_right_assoc_operator() {
let parser = Parser::new_or_panic();
let ast = parser.parse_line_ast("1,2,3").unwrap();
let mut tree = ast.generate_tree(&context::Empty).unwrap(): SpanTree;
clear_expression_ids(&mut tree.root);
let expected = TreeBuilder::new(5)
.add_empty_child(0, Append)
.add_leaf(0, 1, node::Kind::argument().removable(), InfixCrumb::LeftOperand)
.add_leaf(1, 1, node::Kind::Operation, InfixCrumb::Operator)
.add_child(2, 3, node::Kind::Chained, InfixCrumb::RightOperand)
.add_empty_child(0, Append)
.add_leaf(0, 1, node::Kind::argument().removable(), InfixCrumb::LeftOperand)
.add_leaf(1, 1, node::Kind::Operation, InfixCrumb::Operator)
.add_empty_child(2, AfterTarget)
.add_leaf(2, 1, node::Kind::this().removable(), InfixCrumb::RightOperand)
.add_empty_child(3, BeforeTarget)
.done()
.build();
assert_eq!(expected, tree)
}
#[wasm_bindgen_test]
fn generating_span_tree_from_section() {
let parser = Parser::new_or_panic();
// The star makes `SectionSides` ast being one of the parameters of + chain. First + makes
// SectionRight, and last + makes SectionLeft.
let ast = parser.parse_line_ast("+ * + + 2 +").unwrap();
let mut tree = ast.generate_tree(&context::Empty).unwrap(): SpanTree;
clear_expression_ids(&mut tree.root);
let expected = TreeBuilder::new(11)
.add_child(0, 9, node::Kind::Chained, SectionLeftCrumb::Arg)
.add_child(0, 5, node::Kind::Chained, InfixCrumb::LeftOperand)
.add_child(0, 3, node::Kind::Chained, SectionLeftCrumb::Arg)
.add_empty_child(0, BeforeTarget)
.add_leaf(0, 1, node::Kind::Operation, SectionRightCrumb::Opr)
.add_child(2, 1, node::Kind::argument().removable(), SectionRightCrumb::Arg)
.add_empty_child(0, BeforeTarget)
.add_leaf(0, 1, node::Kind::Operation, SectionSidesCrumb)
.add_empty_child(1, Append)
.done()
.add_empty_child(3, Append)
.done()
.add_leaf(4, 1, node::Kind::Operation, SectionLeftCrumb::Opr)
.add_empty_child(5, Append)
.done()
.add_leaf(6, 1, node::Kind::Operation, InfixCrumb::Operator)
.add_leaf(8, 1, node::Kind::argument().removable(), InfixCrumb::RightOperand)
.add_empty_child(9, Append)
.done()
.add_leaf(10, 1, node::Kind::Operation, SectionLeftCrumb::Opr)
.add_empty_child(11, Append)
.build();
assert_eq!(expected, tree);
}
#[wasm_bindgen_test]
fn generating_span_tree_from_right_assoc_section() {
let parser = Parser::new_or_panic();
let ast = parser.parse_line_ast(",2,").unwrap();
let mut tree = ast.generate_tree(&context::Empty).unwrap(): SpanTree;
clear_expression_ids(&mut tree.root);
let expected = TreeBuilder::new(3)
.add_empty_child(0, Append)
.add_leaf(0, 1, node::Kind::Operation, SectionRightCrumb::Opr)
.add_child(1, 2, node::Kind::Chained, SectionRightCrumb::Arg)
.add_empty_child(0, Append)
.add_leaf(0, 1, node::Kind::argument().removable(), SectionLeftCrumb::Arg)
.add_leaf(1, 1, node::Kind::Operation, SectionLeftCrumb::Opr)
.add_empty_child(2, BeforeTarget)
.done()
.build();
assert_eq!(expected, tree);
}
#[wasm_bindgen_test]
fn generating_span_tree_from_matched_macros() {
use PatternMatchCrumb::*;
let parser = Parser::new_or_panic();
let mut id_map = IdMap::default();
id_map.generate(0..29);
let expression = "if foo then (a + b) x else ()";
let ast = parser.parse_line_ast_with_id_map(expression, id_map.clone()).unwrap();
let mut tree = ast.generate_tree(&context::Empty).unwrap(): SpanTree;
// Check if expression id is set
let (_, expected_id) = id_map.vec.first().unwrap();
assert_eq!(tree.root_ref().ast_id, Some(*expected_id));
// Check the other fields
clear_expression_ids(&mut tree.root);
let seq = Seq { right: false };
let if_then_else_cr = vec![seq, Or, Build];
let parens_cr = vec![seq, Or, Or, Build];
let expected = TreeBuilder::new(29)
.add_leaf(0, 2, node::Kind::Token, segment_head_crumbs(0))
.add_leaf(3, 3, node::Kind::argument(), segment_body_crumbs(0, &if_then_else_cr))
.add_leaf(7, 4, node::Kind::Token, segment_head_crumbs(1))
.add_child(12, 9, node::Kind::argument(), segment_body_crumbs(1, &if_then_else_cr))
.add_child(0, 7, node::Kind::Operation, PrefixCrumb::Func)
.add_leaf(0, 1, node::Kind::Token, segment_head_crumbs(0))
.add_child(1, 5, node::Kind::argument(), segment_body_crumbs(0, &parens_cr))
.add_empty_child(0, BeforeTarget)
.add_leaf(0, 1, node::Kind::this(), InfixCrumb::LeftOperand)
.add_empty_child(1, AfterTarget)
.add_leaf(2, 1, node::Kind::Operation, InfixCrumb::Operator)
.add_leaf(4, 1, node::Kind::argument(), InfixCrumb::RightOperand)
.add_empty_child(5, Append)
.done()
.add_leaf(6, 1, node::Kind::Token, segment_head_crumbs(1))
.done()
.add_empty_child(8, BeforeTarget)
.add_leaf(8, 1, node::Kind::this(), PrefixCrumb::Arg)
.add_empty_child(9, Append)
.done()
.add_leaf(22, 4, node::Kind::Token, segment_head_crumbs(2))
.add_child(27, 2, node::Kind::argument(), segment_body_crumbs(2, &if_then_else_cr))
.add_leaf(0, 1, node::Kind::Token, segment_head_crumbs(0))
.add_leaf(1, 1, node::Kind::Token, segment_head_crumbs(1))
.done()
.build();
assert_eq!(expected, tree);
}
#[wasm_bindgen_test]
fn generating_span_tree_from_matched_list_macro() {
use PatternMatchCrumb::*;
let parser = Parser::new_or_panic();
let expression = "[a,b]";
let ast = parser.parse_line_ast(expression).unwrap();
let mut tree = ast.generate_tree(&context::Empty).unwrap(): SpanTree;
// Check the other fields
clear_expression_ids(&mut tree.root);
let left_seq = Seq { right: false };
let right_seq = Seq { right: true };
let many = Many { index: 0 };
let first_element_cr = vec![left_seq, Or, Or, left_seq, Build];
let second_element_cr = vec![left_seq, Or, Or, right_seq, many, right_seq, Build];
let comma_cr = vec![left_seq, Or, Or, right_seq, many, left_seq, Tok];
let expected = TreeBuilder::new(5)
.add_leaf(0, 1, node::Kind::Token, segment_head_crumbs(0))
.add_leaf(1, 1, node::Kind::argument(), segment_body_crumbs(0, &first_element_cr))
.add_leaf(2, 1, node::Kind::Token, segment_body_crumbs(0, &comma_cr))
.add_leaf(3, 1, node::Kind::argument(), segment_body_crumbs(0, &second_element_cr))
.add_leaf(4, 1, node::Kind::Token, segment_head_crumbs(1))
.build();
assert_eq!(expected, tree);
}
#[wasm_bindgen_test]
fn generating_span_tree_from_ambiguous_macros() {
let parser = Parser::new_or_panic();
let mut id_map = IdMap::default();
id_map.generate(0..2);
let ast = parser.parse_line_ast_with_id_map("(4", id_map.clone()).unwrap();
let mut tree = ast.generate_tree(&context::Empty).unwrap(): SpanTree;
// Check the expression id:
let (_, expected_id) = id_map.vec.first().unwrap();
assert_eq!(tree.root_ref().ast_id, Some(*expected_id));
// Check the other fields:
clear_expression_ids(&mut tree.root);
let head_crumb = AmbiguousCrumb { index: 0, field: AmbiguousSegmentCrumb::Head };
let body_crumb = AmbiguousCrumb { index: 0, field: AmbiguousSegmentCrumb::Body };
let expected = TreeBuilder::new(2)
.add_leaf(0, 1, node::Kind::Token, head_crumb)
.add_leaf(1, 1, node::Kind::argument(), body_crumb)
.build();
assert_eq!(expected, tree);
}
#[wasm_bindgen_test]
fn generating_span_tree_for_lambda() {
let parser = Parser::new_or_panic();
let ast = parser.parse_line_ast("foo a-> b + c").unwrap();
let mut tree = ast.generate_tree(&context::Empty).unwrap(): SpanTree;
clear_expression_ids(&mut tree.root);
let expected = TreeBuilder::new(13)
.add_leaf(0, 3, node::Kind::Operation, PrefixCrumb::Func)
.add_empty_child(4, BeforeTarget)
.add_leaf(4, 9, node::Kind::this(), PrefixCrumb::Arg)
.add_empty_child(13, Append)
.build();
assert_eq!(expected, tree);
}
#[wasm_bindgen_test]
fn generating_span_tree_for_unfinished_call() {
let parser = Parser::new_or_panic();
let this_param =
ArgumentInfo { name: Some("this".to_owned()), tp: Some("Any".to_owned()) };
let param1 =
ArgumentInfo { name: Some("arg1".to_owned()), tp: Some("Number".to_owned()) };
let param2 = ArgumentInfo { name: Some("arg2".to_owned()), tp: None };
// === Single function name ===
let ast = parser.parse_line_ast("foo").unwrap();
let invocation_info = CalledMethodInfo { parameters: vec![this_param.clone()] };
let ctx = MockContext::new_single(ast.id.unwrap(), invocation_info);
let mut tree = SpanTree::new(&ast, &ctx).unwrap(): SpanTree;
match tree.root_ref().leaf_iter().collect_vec().as_slice() {
[_func, arg0] => assert_eq!(arg0.argument_info().as_ref(), Some(&this_param)),
sth_else => panic!("There should be 2 leaves, found: {}", sth_else.len()),
}
let expected = TreeBuilder::new(3)
.add_leaf(0, 3, node::Kind::Operation, Crumbs::default())
.add_empty_child(3, ExpectedArgument(0))
.build();
clear_expression_ids(&mut tree.root);
clear_parameter_infos(&mut tree.root);
assert_eq!(tree, expected);
// === Complete application chain ===
let ast = parser.parse_line_ast("foo here").unwrap();
let invocation_info = CalledMethodInfo { parameters: vec![this_param.clone()] };
let ctx = MockContext::new_single(ast.id.unwrap(), invocation_info);
let mut tree = SpanTree::new(&ast, &ctx).unwrap(): SpanTree;
match tree.root_ref().leaf_iter().collect_vec().as_slice() {
[_func, arg0] => assert_eq!(arg0.argument_info().as_ref(), Some(&this_param)),
sth_else => panic!("There should be 2 leaves, found: {}", sth_else.len()),
}
let expected = TreeBuilder::new(8)
.add_leaf(0, 3, node::Kind::Operation, PrefixCrumb::Func)
.add_leaf(4, 4, node::Kind::this(), PrefixCrumb::Arg)
.build();
clear_expression_ids(&mut tree.root);
clear_parameter_infos(&mut tree.root);
assert_eq!(tree, expected);
// === Partial application chain ===
let ast = parser.parse_line_ast("foo here").unwrap();
let invocation_info = CalledMethodInfo {
parameters: vec![this_param.clone(), param1.clone(), param2.clone()],
};
let ctx = MockContext::new_single(ast.id.unwrap(), invocation_info);
let mut tree = SpanTree::new(&ast, &ctx).unwrap(): SpanTree;
match tree.root_ref().leaf_iter().collect_vec().as_slice() {
[_func, arg0, arg1, arg2] => {
assert_eq!(arg0.argument_info().as_ref(), Some(&this_param));
assert_eq!(arg1.argument_info().as_ref(), Some(¶m1));
assert_eq!(arg2.argument_info().as_ref(), Some(¶m2));
}
sth_else => panic!("There should be 4 leaves, found: {}", sth_else.len()),
}
let expected = TreeBuilder::new(8)
.add_child(0, 8, node::Kind::Chained, Crumbs::default())
.add_child(0, 8, node::Kind::Chained, Crumbs::default())
.add_leaf(0, 3, node::Kind::Operation, PrefixCrumb::Func)
.add_leaf(4, 4, node::Kind::this(), PrefixCrumb::Arg)
.done()
.add_empty_child(8, ExpectedArgument(1))
.done()
.add_empty_child(8, ExpectedArgument(2))
.build();
clear_expression_ids(&mut tree.root);
clear_parameter_infos(&mut tree.root);
assert_eq!(tree, expected);
// === Partial application chain - this argument ===
let ast = parser.parse_line_ast("here.foo").unwrap();
let invocation_info =
CalledMethodInfo { parameters: vec![this_param, param1.clone(), param2.clone()] };
let ctx = MockContext::new_single(ast.id.unwrap(), invocation_info);
let mut tree = SpanTree::new(&ast, &ctx).unwrap(): SpanTree;
match tree.root_ref().leaf_iter().collect_vec().as_slice() {
[_, _this, _, _, _func, _, arg1, arg2] => {
assert_eq!(arg1.argument_info().as_ref(), Some(¶m1));
assert_eq!(arg2.argument_info().as_ref(), Some(¶m2));
}
sth_else => panic!("There should be 8 leaves, found: {}", sth_else.len()),
}
let expected = TreeBuilder::new(8)
.add_child(0, 8, node::Kind::Chained, Crumbs::default())
.add_child(0, 8, node::Kind::Operation, Crumbs::default())
.add_empty_child(0, BeforeTarget)
.add_leaf(0, 4, node::Kind::this(), InfixCrumb::LeftOperand)
.add_empty_child(4, AfterTarget)
.add_leaf(4, 1, node::Kind::Operation, InfixCrumb::Operator)
.add_leaf(5, 3, node::Kind::argument(), InfixCrumb::RightOperand)
.add_empty_child(8, Append)
.done()
.add_empty_child(8, ExpectedArgument(0))
.done()
.add_empty_child(8, ExpectedArgument(1))
.build();
clear_expression_ids(&mut tree.root);
clear_parameter_infos(&mut tree.root);
assert_eq!(tree, expected);
}
fn segment_body_crumbs(
index: usize,
pattern_crumb: &[PatternMatchCrumb],
) -> ast::crumbs::MatchCrumb {
let val = ast::crumbs::SegmentMatchCrumb::Body { val: pattern_crumb.to_vec() };
ast::crumbs::MatchCrumb::Segs { val, index }
}
fn segment_head_crumbs(index: usize) -> ast::crumbs::MatchCrumb {
let val = ast::crumbs::SegmentMatchCrumb::Head;
ast::crumbs::MatchCrumb::Segs { val, index }
}
}
| 38.32186 | 100 | 0.600689 |
643906f691377b994f94e8993b634760d8428fae | 478 | // ANCHOR: here
pub trait Summary {
fn summarize_author(&self) -> String;
fn summarize(&self) -> String {
format!("(Read more from {}...)", self.summarize_author())
}
}
// ANCHOR_END: here
pub struct Tweet {
pub username: String,
pub content: String,
pub reply: bool,
pub retweet: bool,
}
// ANCHOR: impl
impl Summary for Tweet {
fn summarize_author(&self) -> String {
format!("@{}", self.username)
}
}
// ANCHOR_END: impl
| 19.12 | 66 | 0.60251 |
f51c84cd167e0772e076353555f08c454da96ad0 | 16,735 | use std::{
ffi::CString,
fmt,
os::unix::{net::UnixStream, prelude::RawFd},
sync::Arc,
};
use crate::protocol::{Interface, Message, ObjectInfo};
pub use crate::types::server::{Credentials, DisconnectReason, GlobalInfo, InitError, InvalidId};
use super::server_impl;
/// A trait representing your data associated to an object
///
/// You will only be given access to it as a `&` reference, so you
/// need to handle interior mutability by yourself.
///
/// The methods of this trait will be invoked internally every time a
/// new object is created to initialize its data.
pub trait ObjectData<D>: downcast_rs::DowncastSync {
/// Dispatch a request for the associated object
///
/// If the request has a NewId argument, the callback must return the object data
/// for the newly created object
fn request(
self: Arc<Self>,
handle: &mut Handle<D>,
data: &mut D,
client_id: ClientId,
msg: Message<ObjectId>,
) -> Option<Arc<dyn ObjectData<D>>>;
/// Notification that the object has been destroyed and is no longer active
fn destroyed(&self, data: &mut D, client_id: ClientId, object_id: ObjectId);
/// Helper for forwarding a Debug implementation of your `ObjectData` type
///
/// By default will just print `ObjectData { ... }`
#[cfg_attr(coverage, no_coverage)]
fn debug(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("ObjectData").finish_non_exhaustive()
}
}
downcast_rs::impl_downcast!(sync ObjectData<D>);
impl<D: 'static> std::fmt::Debug for dyn ObjectData<D> {
#[cfg_attr(coverage, no_coverage)]
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.debug(f)
}
}
/// A trait representing the handling of new bound globals
pub trait GlobalHandler<D>: downcast_rs::DowncastSync {
/// Check if given client is allowed to interact with given global
///
/// If this function returns false, the client will not be notified of the existence
/// of this global, and any attempt to bind it will result in a protocol error as if
/// the global did not exist.
///
/// Default implementation always return true.
fn can_view(
&self,
_client_id: ClientId,
_client_data: &Arc<dyn ClientData<D>>,
_global_id: GlobalId,
) -> bool {
true
}
/// A global has been bound
///
/// Given client bound given global, creating given object.
///
/// The method must return the object data for the newly created object.
fn bind(
self: Arc<Self>,
handle: &mut Handle<D>,
data: &mut D,
client_id: ClientId,
global_id: GlobalId,
object_id: ObjectId,
) -> Arc<dyn ObjectData<D>>;
/// Helper for forwarding a Debug implementation of your `GlobalHandler` type
///
/// By default will just print `GlobalHandler { ... }`
#[cfg_attr(coverage, no_coverage)]
fn debug(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("GlobalHandler").finish_non_exhaustive()
}
}
impl<D: 'static> std::fmt::Debug for dyn GlobalHandler<D> {
#[cfg_attr(coverage, no_coverage)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.debug(f)
}
}
downcast_rs::impl_downcast!(sync GlobalHandler<D>);
/// A trait representing your data associated to a clientObjectData
pub trait ClientData<D>: downcast_rs::DowncastSync {
/// Notification that a client was initialized
fn initialized(&self, client_id: ClientId);
/// Notification that a client is disconnected
fn disconnected(&self, client_id: ClientId, reason: DisconnectReason);
/// Helper for forwarding a Debug implementation of your `ClientData` type
///
/// By default will just print `GlobalHandler { ... }`
#[cfg_attr(coverage, no_coverage)]
fn debug(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("ClientData").finish_non_exhaustive()
}
}
impl<D: 'static> std::fmt::Debug for dyn ClientData<D> {
#[cfg_attr(coverage, no_coverage)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.debug(f)
}
}
downcast_rs::impl_downcast!(sync ClientData<D>);
/// An id of an object on a wayland server.
#[derive(Clone, PartialEq, Eq)]
pub struct ObjectId {
pub(crate) id: server_impl::InnerObjectId,
}
impl ObjectId {
/// Returns whether this object is a null object.
pub fn is_null(&self) -> bool {
self.id.is_null()
}
/// Returns the interface of this object.
pub fn interface(&self) -> &'static Interface {
self.id.interface()
}
/// Check if two object IDs are associated with the same client
///
/// *Note:* This may spuriously return `false` if one (or both) of the objects to compare
/// is no longer valid.
pub fn same_client_as(&self, other: &ObjectId) -> bool {
self.id.same_client_as(&other.id)
}
/// Return the protocol-level numerical ID of this object
///
/// Protocol IDs are reused after object destruction, so this should not be used as a
/// unique identifier,
pub fn protocol_id(&self) -> u32 {
self.id.protocol_id()
}
}
impl fmt::Display for ObjectId {
#[cfg_attr(coverage, no_coverage)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.id.fmt(f)
}
}
impl fmt::Debug for ObjectId {
#[cfg_attr(coverage, no_coverage)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.id.fmt(f)
}
}
/// An id of a client connected to the server.
#[derive(Clone, PartialEq, Eq)]
pub struct ClientId {
pub(crate) id: server_impl::InnerClientId,
}
impl fmt::Debug for ClientId {
#[cfg_attr(coverage, no_coverage)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.id.fmt(f)
}
}
/// The ID of a global
#[derive(Clone, PartialEq, Eq)]
pub struct GlobalId {
pub(crate) id: server_impl::InnerGlobalId,
}
impl fmt::Debug for GlobalId {
#[cfg_attr(coverage, no_coverage)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.id.fmt(f)
}
}
/// Main handle of a backend to the Wayland protocol
///
/// This type hosts most of the protocol-related functionality of the backend, and is the
/// main entry point for manipulating Wayland objects. It can be retrieved both from
/// the backend via [`Backend::handle()`](super::Backend::handle), and is given to you as argument
/// in most event callbacks.
#[derive(Debug)]
pub struct Handle<D: 'static> {
pub(crate) handle: server_impl::InnerHandle<D>,
}
impl<D> Handle<D> {
/// Returns information about some object.
#[inline]
pub fn object_info(&self, id: ObjectId) -> Result<ObjectInfo, InvalidId> {
self.handle.object_info(id.id)
}
/// Returns the id of the client which owns the object.
#[inline]
pub fn get_client(&self, id: ObjectId) -> Result<ClientId, InvalidId> {
self.handle.get_client(id.id)
}
/// Returns the data associated with a client.
#[inline]
pub fn get_client_data(&self, id: ClientId) -> Result<Arc<dyn ClientData<D>>, InvalidId> {
self.handle.get_client_data(id.id)
}
/// Retrive the [`Credentials`] of a client
#[inline]
pub fn get_client_credentials(&self, id: ClientId) -> Result<Credentials, InvalidId> {
self.handle.get_client_credentials(id.id)
}
/// Returns an iterator over all clients connected to the server.
#[inline]
pub fn all_clients<'b>(&'b self) -> Box<dyn Iterator<Item = ClientId> + 'b> {
self.handle.all_clients()
}
/// Returns an iterator over all objects owned by a client.
#[inline]
pub fn all_objects_for<'b>(
&'b self,
client_id: ClientId,
) -> Result<Box<dyn Iterator<Item = ObjectId> + 'b>, InvalidId> {
self.handle.all_objects_for(client_id.id)
}
/// Retrieve the `ObjectId` for a wayland object given its protocol numerical ID
#[inline]
pub fn object_for_protocol_id(
&self,
client_id: ClientId,
interface: &'static Interface,
protocol_id: u32,
) -> Result<ObjectId, InvalidId> {
self.handle.object_for_protocol_id(client_id.id, interface, protocol_id)
}
/// Create a new object for given client
///
/// To ensure state coherence of the protocol, the created object should be immediately
/// sent as a "New ID" argument in an event to the client.
#[inline]
pub fn create_object(
&mut self,
client_id: ClientId,
interface: &'static Interface,
version: u32,
data: Arc<dyn ObjectData<D>>,
) -> Result<ObjectId, InvalidId> {
self.handle.create_object(client_id.id, interface, version, data)
}
/// Returns an object id that represents a null object.
#[inline]
pub fn null_id(&mut self) -> ObjectId {
self.handle.null_id()
}
/// Send an event to the client
///
/// Returns an error if the sender ID of the provided message is no longer valid.
///
/// **Panic:**
///
/// Checks against the protocol specification are done, and this method will panic if they do
/// not pass:
///
/// - the message opcode must be valid for the sender interface
/// - the argument list must match the prototype for the message associated with this opcode
#[inline]
pub fn send_event(&mut self, msg: Message<ObjectId>) -> Result<(), InvalidId> {
self.handle.send_event(msg)
}
/// Returns the data associated with an object.
#[inline]
pub fn get_object_data(&self, id: ObjectId) -> Result<Arc<dyn ObjectData<D>>, InvalidId> {
self.handle.get_object_data(id.id)
}
/// Sets the data associated with some object.
#[inline]
pub fn set_object_data(
&mut self,
id: ObjectId,
data: Arc<dyn ObjectData<D>>,
) -> Result<(), InvalidId> {
self.handle.set_object_data(id.id, data)
}
/// Posts an error on an object. This will also disconnect the client which created the object.
#[inline]
pub fn post_error(&mut self, object_id: ObjectId, error_code: u32, message: CString) {
self.handle.post_error(object_id.id, error_code, message)
}
/// Kills the connection to a client.
///
/// The disconnection reason determines the error message that is sent to the client (if any).
#[inline]
pub fn kill_client(&mut self, client_id: ClientId, reason: DisconnectReason) {
self.handle.kill_client(client_id.id, reason)
}
/// Creates a global of the specified interface and version and then advertises it to clients.
///
/// The clients which the global is advertised to is determined by the implementation of the [`GlobalHandler`].
#[inline]
pub fn create_global(
&mut self,
interface: &'static Interface,
version: u32,
handler: Arc<dyn GlobalHandler<D>>,
) -> GlobalId {
GlobalId { id: self.handle.create_global(interface, version, handler) }
}
/// Disables a global object that is currently active.
///
/// The global removal will be signaled to all currently connected clients. New clients will not know of the global,
/// but the associated state and callbacks will not be freed. As such, clients that still try to bind the global
/// afterwards (because they have not yet realized it was removed) will succeed.
#[inline]
pub fn disable_global(&mut self, id: GlobalId) {
self.handle.disable_global(id.id)
}
/// Removes a global object and free its ressources.
///
/// The global object will no longer be considered valid by the server, clients trying to bind it will be killed,
/// and the global ID is freed for re-use.
///
/// It is advised to first disable a global and wait some amount of time before removing it, to ensure all clients
/// are correctly aware of its removal. Note that clients will generally not expect globals that represent a capability
/// of the server to be removed, as opposed to globals representing peripherals (like `wl_output` or `wl_seat`).
#[inline]
pub fn remove_global(&mut self, id: GlobalId) {
self.handle.remove_global(id.id)
}
/// Returns information about a global.
#[inline]
pub fn global_info(&self, id: GlobalId) -> Result<GlobalInfo, InvalidId> {
self.handle.global_info(id.id)
}
/// Returns the handler which manages the visibility and notifies when a client has bound the global.
#[inline]
pub fn get_global_handler(&self, id: GlobalId) -> Result<Arc<dyn GlobalHandler<D>>, InvalidId> {
self.handle.get_global_handler(id.id)
}
}
/// A backend object that represents the state of a wayland server.
///
/// A backend is used to drive a wayland server by receiving requests, dispatching messages to the appropriate
/// handlers and flushes requests to be sent back to the client.
#[derive(Debug)]
pub struct Backend<D: 'static> {
pub(crate) backend: server_impl::InnerBackend<D>,
}
impl<D> Backend<D> {
/// Initialize a new Wayland backend
#[inline]
pub fn new() -> Result<Self, InitError> {
Ok(Backend { backend: server_impl::InnerBackend::new()? })
}
/// Initializes a connection to a client.
///
/// The `data` parameter contains data that will be associated with the client.
#[inline]
pub fn insert_client(
&mut self,
stream: UnixStream,
data: Arc<dyn ClientData<D>>,
) -> std::io::Result<ClientId> {
Ok(ClientId { id: self.backend.insert_client(stream, data)? })
}
/// Flushes pending events destined for a client.
///
/// If no client is specified, all pending events are flushed to all clients.
#[inline]
pub fn flush(&mut self, client: Option<ClientId>) -> std::io::Result<()> {
self.backend.flush(client)
}
/// Returns a handle which represents the server side state of the backend.
///
/// The handle provides a variety of functionality, such as querying information about wayland objects,
/// obtaining data associated with a client and it's objects, and creating globals.
#[inline]
pub fn handle(&mut self) -> &mut Handle<D> {
self.backend.handle()
}
/// Returns the underlying file descriptor.
///
/// The file descriptor may be monitored for activity with a polling mechanism such as epoll or kqueue.
/// When it becomes readable, this means there are pending messages that would be dispatched if you call
/// [`Backend::dispatch_all_clients`].
///
/// The file descriptor should not be used for any other purpose than monitoring it.
#[inline]
pub fn poll_fd(&self) -> RawFd {
self.backend.poll_fd()
}
/// Dispatches all pending messages from the specified client.
///
/// This method will not block if there are no pending messages.
///
/// The provided `data` will be provided to the handler of messages received from the client.
///
/// For performance reasons, use of this function should be integrated with an event loop, monitoring the
/// file descriptor associated with the client and only calling this method when messages are available.
#[inline]
pub fn dispatch_client(&mut self, data: &mut D, client_id: ClientId) -> std::io::Result<usize> {
self.backend.dispatch_client(data, client_id.id)
}
/// Dispatches all pending messages from all clients.
///
/// This method will not block if there are no pending messages.
///
/// The provided `data` will be provided to the handler of messages received from the clients.
///
/// For performance reasons, use of this function should be integrated with an event loop, monitoring the
/// file descriptor retrieved by [`Backend::poll_fd`] and only calling this method when messages are
/// available.
#[inline]
pub fn dispatch_all_clients(&mut self, data: &mut D) -> std::io::Result<usize> {
self.backend.dispatch_all_clients(data)
}
}
pub(crate) struct DumbObjectData;
impl<D> ObjectData<D> for DumbObjectData {
#[cfg_attr(coverage, no_coverage)]
fn request(
self: Arc<Self>,
_handle: &mut Handle<D>,
_data: &mut D,
_client_id: ClientId,
_msg: Message<ObjectId>,
) -> Option<Arc<dyn ObjectData<D>>> {
unreachable!()
}
#[cfg_attr(coverage, no_coverage)]
fn destroyed(&self, _: &mut D, _client_id: ClientId, _object_id: ObjectId) {}
}
| 34.93737 | 123 | 0.65127 |
75573d38b22d76c265bde4f07372dfefccccfb99 | 9,067 | use std::fmt::{self, Debug, Formatter};
use url::Url;
use core::source::{Source, SourceId};
use core::GitReference;
use core::{Dependency, Package, PackageId, Registry, Summary};
use util::Config;
use util::errors::CargoResult;
use util::hex::short_hash;
use sources::PathSource;
use sources::git::utils::{GitRemote, GitRevision};
/* TODO: Refactor GitSource to delegate to a PathSource
*/
pub struct GitSource<'cfg> {
remote: GitRemote,
reference: GitReference,
source_id: SourceId,
path_source: Option<PathSource<'cfg>>,
rev: Option<GitRevision>,
ident: String,
config: &'cfg Config,
}
impl<'cfg> GitSource<'cfg> {
pub fn new(source_id: &SourceId, config: &'cfg Config) -> CargoResult<GitSource<'cfg>> {
assert!(source_id.is_git(), "id is not git, id={}", source_id);
let remote = GitRemote::new(source_id.url());
let ident = ident(source_id.url())?;
let reference = match source_id.precise() {
Some(s) => GitReference::Rev(s.to_string()),
None => source_id.git_reference().unwrap().clone(),
};
let source = GitSource {
remote,
reference,
source_id: source_id.clone(),
path_source: None,
rev: None,
ident,
config,
};
Ok(source)
}
pub fn url(&self) -> &Url {
self.remote.url()
}
pub fn read_packages(&mut self) -> CargoResult<Vec<Package>> {
if self.path_source.is_none() {
self.update()?;
}
self.path_source.as_mut().unwrap().read_packages()
}
}
fn ident(url: &Url) -> CargoResult<String> {
let url = canonicalize_url(url)?;
let ident = url.path_segments()
.and_then(|mut s| s.next_back())
.unwrap_or("");
let ident = if ident == "" { "_empty" } else { ident };
Ok(format!("{}-{}", ident, short_hash(&url)))
}
// Some hacks and heuristics for making equivalent URLs hash the same
pub fn canonicalize_url(url: &Url) -> CargoResult<Url> {
let mut url = url.clone();
// cannot-be-a-base-urls are not supported
// eg. github.com:rust-lang-nursery/rustfmt.git
if url.cannot_be_a_base() {
bail!(
"invalid url `{}`: cannot-be-a-base-URLs are not supported",
url
)
}
// Strip a trailing slash
if url.path().ends_with('/') {
url.path_segments_mut().unwrap().pop_if_empty();
}
// HACKHACK: For github URL's specifically just lowercase
// everything. GitHub treats both the same, but they hash
// differently, and we're gonna be hashing them. This wants a more
// general solution, and also we're almost certainly not using the
// same case conversion rules that GitHub does. (#84)
if url.host_str() == Some("github.com") {
url.set_scheme("https").unwrap();
let path = url.path().to_lowercase();
url.set_path(&path);
}
// Repos generally can be accessed with or w/o '.git'
let needs_chopping = url.path().ends_with(".git");
if needs_chopping {
let last = {
let last = url.path_segments().unwrap().next_back().unwrap();
last[..last.len() - 4].to_owned()
};
url.path_segments_mut().unwrap().pop().push(&last);
}
Ok(url)
}
impl<'cfg> Debug for GitSource<'cfg> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "git repo at {}", self.remote.url())?;
match self.reference.pretty_ref() {
Some(s) => write!(f, " ({})", s),
None => Ok(()),
}
}
}
impl<'cfg> Registry for GitSource<'cfg> {
fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
let src = self.path_source
.as_mut()
.expect("BUG: update() must be called before query()");
src.query(dep, f)
}
fn supports_checksums(&self) -> bool {
false
}
fn requires_precise(&self) -> bool {
true
}
}
impl<'cfg> Source for GitSource<'cfg> {
fn source_id(&self) -> &SourceId {
&self.source_id
}
fn update(&mut self) -> CargoResult<()> {
let lock =
self.config
.git_path()
.open_rw(".cargo-lock-git", self.config, "the git checkouts")?;
let db_path = lock.parent().join("db").join(&self.ident);
if self.config.cli_unstable().offline && !db_path.exists() {
bail!(
"can't checkout from '{}': you are in the offline mode (-Z offline)",
self.remote.url()
);
}
// Resolve our reference to an actual revision, and check if the
// database already has that revision. If it does, we just load a
// database pinned at that revision, and if we don't we issue an update
// to try to find the revision.
let actual_rev = self.remote.rev_for(&db_path, &self.reference);
let should_update = actual_rev.is_err() || self.source_id.precise().is_none();
let (db, actual_rev) = if should_update && !self.config.cli_unstable().offline {
self.config.shell().status(
"Updating",
format!("git repository `{}`", self.remote.url()),
)?;
trace!("updating git source `{:?}`", self.remote);
self.remote
.checkout(&db_path, &self.reference, self.config)?
} else {
(self.remote.db_at(&db_path)?, actual_rev.unwrap())
};
// Don’t use the full hash,
// to contribute less to reaching the path length limit on Windows:
// https://github.com/servo/servo/pull/14397
let short_id = db.to_short_id(actual_rev.clone()).unwrap();
let checkout_path = lock.parent()
.join("checkouts")
.join(&self.ident)
.join(short_id.as_str());
// Copy the database to the checkout location. After this we could drop
// the lock on the database as we no longer needed it, but we leave it
// in scope so the destructors here won't tamper with too much.
// Checkout is immutable, so we don't need to protect it with a lock once
// it is created.
db.copy_to(actual_rev.clone(), &checkout_path, self.config)?;
let source_id = self.source_id.with_precise(Some(actual_rev.to_string()));
let path_source = PathSource::new_recursive(&checkout_path, &source_id, self.config);
self.path_source = Some(path_source);
self.rev = Some(actual_rev);
self.path_source.as_mut().unwrap().update()
}
fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
trace!(
"getting packages for package id `{}` from `{:?}`",
id,
self.remote
);
self.path_source
.as_mut()
.expect("BUG: update() must be called before get()")
.download(id)
}
fn fingerprint(&self, _pkg: &Package) -> CargoResult<String> {
Ok(self.rev.as_ref().unwrap().to_string())
}
}
#[cfg(test)]
mod test {
use url::Url;
use super::ident;
use util::ToUrl;
#[test]
pub fn test_url_to_path_ident_with_path() {
let ident = ident(&url("https://github.com/carlhuda/cargo")).unwrap();
assert!(ident.starts_with("cargo-"));
}
#[test]
pub fn test_url_to_path_ident_without_path() {
let ident = ident(&url("https://github.com")).unwrap();
assert!(ident.starts_with("_empty-"));
}
#[test]
fn test_canonicalize_idents_by_stripping_trailing_url_slash() {
let ident1 = ident(&url("https://github.com/PistonDevelopers/piston/")).unwrap();
let ident2 = ident(&url("https://github.com/PistonDevelopers/piston")).unwrap();
assert_eq!(ident1, ident2);
}
#[test]
fn test_canonicalize_idents_by_lowercasing_github_urls() {
let ident1 = ident(&url("https://github.com/PistonDevelopers/piston")).unwrap();
let ident2 = ident(&url("https://github.com/pistondevelopers/piston")).unwrap();
assert_eq!(ident1, ident2);
}
#[test]
fn test_canonicalize_idents_by_stripping_dot_git() {
let ident1 = ident(&url("https://github.com/PistonDevelopers/piston")).unwrap();
let ident2 = ident(&url("https://github.com/PistonDevelopers/piston.git")).unwrap();
assert_eq!(ident1, ident2);
}
#[test]
fn test_canonicalize_idents_different_protocols() {
let ident1 = ident(&url("https://github.com/PistonDevelopers/piston")).unwrap();
let ident2 = ident(&url("git://github.com/PistonDevelopers/piston")).unwrap();
assert_eq!(ident1, ident2);
}
#[test]
fn test_canonicalize_cannot_be_a_base_urls() {
assert!(ident(&url("github.com:PistonDevelopers/piston")).is_err());
assert!(ident(&url("google.com:PistonDevelopers/piston")).is_err());
}
fn url(s: &str) -> Url {
s.to_url().unwrap()
}
}
| 32.038869 | 93 | 0.587846 |
4ba1cbe07994a4f933611156746d6e910c3a53c4 | 100 | use dade::model;
#[model]
struct TestModel {
#[field(gt = 2.0)]
value: u128,
}
fn main() {}
| 12.5 | 22 | 0.56 |
67726690b2f0fc3d5a37fc2014e8109ea41cd9a9 | 3,668 | // SPDX-License-Identifier: (MIT OR Apache-2.0)
use std::cmp::min;
use std::fmt;
use std::io::{self, Read, Seek, SeekFrom, Write};
use std::mem;
use std::str::FromStr;
use time::Tm;
use super::DirectoryEntryHeader;
use crate::{FileRef, ISO9660Reader, ISOError, Result};
#[derive(Clone)]
pub struct ISOFile<T: ISO9660Reader> {
pub(crate) header: DirectoryEntryHeader,
pub identifier: String,
// File version; ranges from 1 to 32767
pub version: u16,
file: FileRef<T>,
}
impl<T: ISO9660Reader> fmt::Debug for ISOFile<T> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("ISOFile")
.field("header", &self.header)
.field("identifier", &self.identifier)
.field("version", &self.version)
.finish()
}
}
impl<T: ISO9660Reader> ISOFile<T> {
pub(crate) fn new(
header: DirectoryEntryHeader,
mut identifier: String,
file: FileRef<T>,
) -> Result<ISOFile<T>> {
// Files (not directories) in ISO 9660 have a version number, which is
// provided at the end of the identifier, seperated by ';'.
// If not, assume 1.
let version = match identifier.rfind(';') {
Some(idx) => {
let version = u16::from_str(&identifier[idx + 1..])?;
identifier.truncate(idx);
version
},
None => 1
};
// Files without an extension have a '.' at the end
if identifier.ends_with('.') {
identifier.pop();
}
Ok(ISOFile {
header,
identifier,
version,
file,
})
}
pub fn size(&self) -> u32 {
self.header.extent_length
}
pub fn time(&self) -> Tm {
self.header.time
}
pub fn read(&self) -> ISOFileReader<T> {
ISOFileReader {
buf: unsafe { mem::uninitialized() },
buf_lba: None,
seek: 0,
start_lba: self.header.extent_loc,
size: self.size() as usize,
file: self.file.clone(),
}
}
}
pub struct ISOFileReader<T: ISO9660Reader> {
buf: [u8; 2048],
buf_lba: Option<u64>,
seek: usize,
start_lba: u32,
size: usize,
file: FileRef<T>,
}
impl<T: ISO9660Reader> Read for ISOFileReader<T> {
#[cfg(feature = "nightly")]
unsafe fn initializer(&self) -> std::io::Initializer {
std::io::Initializer::nop()
}
fn read(&mut self, mut buf: &mut [u8]) -> io::Result<usize> {
let mut seek = self.seek;
while !buf.is_empty() && seek < self.size {
let lba = self.start_lba as u64 + (seek as u64 / 2048);
if self.buf_lba != Some(lba) {
self.file.read_at(&mut self.buf, lba)?;
self.buf_lba = Some(lba);
}
let start = seek % 2048;
let end = min(self.size - (seek / 2048) * 2048, 2048);
seek += buf.write(&self.buf[start..end]).unwrap();
}
let bytes = seek - self.seek;
self.seek = seek;
Ok(bytes)
}
}
impl<T: ISO9660Reader> Seek for ISOFileReader<T> {
fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {
let seek = match pos {
SeekFrom::Start(pos) => pos as i64,
SeekFrom::End(pos) => self.size as i64 + pos,
SeekFrom::Current(pos) => self.seek as i64 + pos,
};
if seek < 0 {
Err(io::Error::new(io::ErrorKind::InvalidInput, "Invalid seek"))
} else {
self.seek = seek as usize;
Ok(seek as u64)
}
}
}
| 27.17037 | 78 | 0.530262 |
1e04637a91007ffa265d0bb035471f35bf0df43a | 5,034 | use crate::*;
use sqlparser::ast::{BinaryOperator, Expr};
fn op_name(op: &BinaryOperator) -> Result<String, ErrorDesc> {
let op_name = match op {
BinaryOperator::Plus => "+=",
BinaryOperator::Minus => "-=",
BinaryOperator::Eq => "===",
BinaryOperator::GtEq => ">==",
BinaryOperator::Gt => ">==",
_ => "unimplemented",
};
if op_name == "unimplemented" {
Err(format!("Operation {} not implemented", op_name))?
} else {
Ok(op_name.to_string())
}
}
fn op_name_init(op: &BinaryOperator) -> Result<String, ErrorDesc> {
let op_name = match op {
BinaryOperator::Plus => "+",
BinaryOperator::Minus => "-",
BinaryOperator::Eq => "==",
BinaryOperator::GtEq => ">=",
BinaryOperator::Gt => ">",
_ => "unimplemented",
};
if op_name == "unimplemented" {
Err(format!("Operation {} not implemented", op_name))?
} else {
Ok(op_name.to_string())
}
}
fn column_ref<'a>(
name: &str,
input: &'a Vec<&ColumnWrapper>,
) -> Result<(&'a ColumnWrapper<'a>, usize), ErrorDesc> {
let pos = input.iter().position(|c| c.name().as_deref() == Some(name));
if let Some(pos) = pos {
Ok((&input[pos], pos))
} else {
Err(format!(
"Column {} not found in the list of input columns",
name
))?
}
}
pub fn parseexpr_rec<'a>(
expr: &Expr,
input: &'a Vec<&ColumnWrapper>,
dict: &Dictionary,
) -> Result<Expression, ErrorDesc> {
match expr {
Expr::BinaryOp { left, op, right } => match (&(**left), &(**right)) {
(Expr::Identifier(lhs), Expr::Identifier(rhs)) => {
let (lhs_col, lhs_pos) = column_ref(&lhs.value, input)?;
let (rhs_col, rhs_pos) = column_ref(&rhs.value, input)?;
let op_name = op_name_init(op)?;
let signature = Signature::new(
&op_name,
vec![lhs_col.typeid(), rhs_col.typeid()],
vec![lhs_col.typename().clone(), rhs_col.typename().clone()],
);
Ok(Expression::new(
signature,
Binding::OwnedColumn,
vec![Binding::RefColumn(lhs_pos), Binding::RefColumn(rhs_pos)],
))
}
(Expr::Identifier(lhs), e) => {
let expr_right = parseexpr_rec(e, input, dict)?;
let (lhs_col, lhs_pos) = column_ref(&lhs.value, input)?;
let op_name = op_name_init(op)?;
let signature = Signature::new(
&op_name,
vec![lhs_col.typeid(), expr_right.output_type(dict)?],
vec![
lhs_col.typename().clone(),
expr_right.output_typename(dict)?,
],
);
Ok(Expression::new(
signature,
Binding::OwnedColumn,
vec![
Binding::RefColumn(lhs_pos),
Binding::Expr(Box::new(expr_right)),
],
))
}
(e, Expr::Identifier(rhs)) => {
let (rhs_col, rhs_pos) = column_ref(&rhs.value, input)?;
let expr_left = parseexpr_rec(e, input, dict)?;
let op_name = op_name(op)?;
let signature = Signature::new(
&op_name,
vec![expr_left.output_type(dict)?, rhs_col.typeid()],
vec![expr_left.output_typename(dict)?, rhs_col.typename().clone()],
);
Ok(Expression::new(
signature,
Binding::Expr(Box::new(expr_left)),
vec![Binding::RefColumn(rhs_pos)],
))
}
_ => panic!(),
},
Expr::Nested(e) => parseexpr_rec(&(**e), input, dict),
Expr::Function(f) => {
if f.name.0[0].value == "hash" {
if f.args.len() == 0 {
Err(format!("Function supplied has no arguments: {:?}", f))?
}
let e = f.args[0].clone();
match e {
Expr::Identifier(col) => {
let (_col, _pos) = column_ref(&col.value, input)?;
let _op_name = "hash=";
Err("Hash not yet implemented")?
}
_ => Err("Only hash(col) supported")?,
}
} else {
Err("Only the function hash is implemented")?
}
}
_ => Err(format!("Expression not implemented: {:?}", expr))?,
}
}
pub fn parseexpr(
expr: &Expr,
input: &Vec<&ColumnWrapper>,
dict: &Dictionary,
) -> Result<Expression, ErrorDesc> {
let expr_output = parseexpr_rec(expr, input, dict);
expr_output
}
| 32.901961 | 87 | 0.461661 |
f8ae0155501a62d5a45cde87e63b489f664e27d9 | 15,141 | //! # Module to work with `HD Wallets`
//!
//! Currently supports only Ledger Nano S & Ledger Blue
//! `HD(Hierarchical Deterministic) Wallet` specified in
//! [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.medёiawiki)
mod error;
mod apdu;
mod keystore;
mod comm;
use self::apdu::ApduBuilder;
use self::comm::sendrecv;
pub use self::error::Error;
pub use self::keystore::HdwalletCrypto;
use super::{Address, ECDSA_SIGNATURE_BYTES, Signature, to_arr, to_bytes};
use hidapi::{HidApi, HidDevice, HidDeviceInfo};
use regex::Regex;
use std::{thread, time};
use std::str::{FromStr, from_utf8};
const GET_ETH_ADDRESS: u8 = 0x02;
const SIGN_ETH_TRANSACTION: u8 = 0x04;
const CHUNK_SIZE: usize = 255;
const LEDGER_VID: u16 = 0x2c97;
const LEDGER_PID: u16 = 0x0001; // for Nano S model
const DERIVATION_INDEX_SIZE: usize = 4;
#[allow(dead_code)]
pub const ETC_DERIVATION_PATH: [u8; 21] = [
5,
0x80,
0,
0,
44,
0x80,
0,
0,
60,
0x80,
0x02,
0x73,
0xd0,
0x80,
0,
0,
0,
0,
0,
0,
0,
]; // 44'/60'/160720'/0'/0
/// Type used for device listing,
/// String corresponds to file descriptor of the device
pub type DevicesList = Vec<(Address, String)>;
///
#[derive(Debug)]
struct Device {
///
fd: String,
///
address: Address,
///
hid_info: HidDeviceInfo,
}
impl PartialEq for Device {
fn eq(&self, other: &Device) -> bool {
self.fd == other.fd
}
}
impl From<HidDeviceInfo> for Device {
fn from(hid_info: HidDeviceInfo) -> Self {
let info = hid_info.clone();
Device {
fd: hid_info.path,
address: Address::default(),
hid_info: info,
}
}
}
/// Parse HD path into byte array
pub fn path_to_arr(hd_str: &str) -> Result<Vec<u8>, Error> {
lazy_static! {
static ref INVALID_PATH_RE: Regex = Regex::new(r#"[^0-9'/]"#).unwrap();
}
if INVALID_PATH_RE.is_match(hd_str) {
return Err(Error::HDWalletError(
format!("Invalid `hd_path` format: {}", hd_str),
));
}
let mut buf = Vec::new();
{
let parse = |s: &str| {
let mut str = s.to_string();
let mut v: u64 = 0;
if str.ends_with("'") {
v += 0x80000000;
str.remove(s.len() - 1);
}
match str.parse::<u64>() {
Ok(d) => v += d,
Err(_) => {
return Err(Error::HDWalletError(
format!("Invalid `hd_path` format: {}", hd_str),
))
}
}
buf.extend(to_bytes(v, 4));
Ok(())
};
hd_str.split("/").map(parse).collect::<Vec<_>>();
}
Ok(buf)
}
/// Parse HD path into byte array
/// prefixed with count of derivation indexes
pub fn to_prefixed_path(hd_str: &str) -> Result<Vec<u8>, Error> {
let v = path_to_arr(hd_str)?;
let count = (v.len() / DERIVATION_INDEX_SIZE) as u8;
let mut buf = Vec::with_capacity(v.len() + 1);
buf.push(count);
buf.extend(v);
Ok(buf)
}
/// `Wallet Manager` to handle all interaction with HD wallet
pub struct WManager {
/// HID point used for communication
hid: HidApi,
/// List of available wallets
devices: Vec<Device>,
/// Derivation path
hd_path: Option<Vec<u8>>,
}
impl WManager {
/// Creates new `Wallet Manager` with a specified
/// derivation path
pub fn new(hd_path: Option<Vec<u8>>) -> Result<WManager, Error> {
Ok(Self {
hid: HidApi::new()?,
devices: Vec::new(),
hd_path: hd_path,
})
}
/// Decides what HD path to use
fn pick_hd_path(&self, h: Option<Vec<u8>>) -> Result<Vec<u8>, Error> {
if self.hd_path.is_none() && h.is_none() {
return Err(Error::HDWalletError("HD path is not specified".to_string()));
}
Ok(h.or(self.hd_path.clone()).unwrap())
}
/// Get address
///
/// # Arguments:
/// fd - file descriptor to corresponding HID device
/// hd_path - optional HD path, prefixed with count of derivation indexes
///
pub fn get_address(&self, fd: &str, hd_path: Option<Vec<u8>>) -> Result<Address, Error> {
let hd_path = self.pick_hd_path(hd_path)?;
let apdu = ApduBuilder::new(GET_ETH_ADDRESS)
.with_data(&hd_path)
.build();
debug!("DEBUG get address: {:?}", &fd);
let handle = self.open(fd)?;
let addr = sendrecv(&handle, &apdu)
.and_then(|res| match res.len() {
107 => Ok(res),
_ => Err(Error::HDWalletError(
"Address read returned invalid data length".to_string(),
)),
})
.and_then(|res: Vec<u8>| {
from_utf8(&res[67..107])
.map(|ptr| ptr.to_string())
.map_err(|e| {
Error::HDWalletError(format!("Can't parse address: {}", e.to_string()))
})
})
.and_then(|s| {
Address::from_str(&s).map_err(|e| {
Error::HDWalletError(format!("Can't parse address: {}", e.to_string()))
})
})?;
Ok(addr)
}
/// Sign transaction
///
/// # Arguments:
/// fd - file descriptor to corresponding HID device
/// tr - RLP packed transaction
/// hd_path - optional HD path, prefixed with count of derivation indexes
///
pub fn sign_transaction(
&self,
fd: &str,
tr: &[u8],
hd_path: Option<Vec<u8>>,
) -> Result<Signature, Error> {;
let hd_path = self.pick_hd_path(hd_path)?;
let _mock = Vec::new();
let (init, cont) = match tr.len() {
0...CHUNK_SIZE => (tr, _mock.as_slice()),
_ => tr.split_at(CHUNK_SIZE - hd_path.len()),
};
let init_apdu = ApduBuilder::new(SIGN_ETH_TRANSACTION)
.with_p1(0x00)
.with_data(&hd_path)
.with_data(init)
.build();
let handle = self.open(fd)?;
let mut res = sendrecv(&handle, &init_apdu)?;
for chunk in cont.chunks(CHUNK_SIZE) {
let apdu_cont = ApduBuilder::new(SIGN_ETH_TRANSACTION)
.with_p1(0x80)
.with_data(chunk)
.build();
res = sendrecv(&handle, &apdu_cont)?;
}
debug!("Received signature: {:?}", res);
match res.len() {
ECDSA_SIGNATURE_BYTES => {
let mut val: [u8; ECDSA_SIGNATURE_BYTES] = [0; ECDSA_SIGNATURE_BYTES];
val.copy_from_slice(&res);
Ok(Signature::from(val))
}
v => Err(Error::HDWalletError(format!(
"Invalid signature length. Expected: {}, received: {}",
ECDSA_SIGNATURE_BYTES,
v
))),
}
}
/// List all available devices
pub fn devices(&self) -> DevicesList {
self.devices
.iter()
.map(|d| (d.address.clone(), d.fd.clone()))
.collect()
}
/// Update device list
pub fn update(&mut self, hd_path: Option<Vec<u8>>) -> Result<(), Error> {
let hd_path = self.pick_hd_path(hd_path)?;
self.hid.refresh_devices();
let mut new_devices = Vec::new();
debug!("Start searching for devices: {:?}", self.hid.devices());
for hid_info in self.hid.devices() {
if hid_info.product_id != LEDGER_PID || hid_info.vendor_id != LEDGER_VID {
continue;
}
let mut d = Device::from(hid_info);
d.address = self.get_address(&d.fd, Some(hd_path.clone()))?;
new_devices.push(d);
}
self.devices = new_devices;
debug!("Devices found {:?}", self.devices);
Ok(())
}
fn open(&self, path: &str) -> Result<HidDevice, Error> {
for _ in 0..5 {
match self.hid.open(LEDGER_VID, LEDGER_PID) {
Ok(h) => return Ok(h),
Err(_) => (),
}
thread::sleep(time::Duration::from_millis(1000));
}
Err(Error::HDWalletError(format!("Can't open path: {}", path)))
}
}
#[cfg(test)]
mod tests {
use super::*;
use core::Transaction;
use rustc_serialize::hex::ToHex;
use tests::*;
#[test]
#[ignore]
pub fn should_sign_with_ledger() {
let mut manager = WManager::new(Some(ETC_DERIVATION_PATH.to_vec())).unwrap();
manager.update(None).unwrap();
if manager.devices().is_empty() {
// No device connected, skip test
return;
}
let tx = Transaction {
nonce: 0x00,
gas_price: /* 21000000000 */
to_32bytes("0000000000000000000000000000000\
0000000000000000000000004e3b29200"),
gas_limit: 0x5208,
to: Some("78296F1058dD49C5D6500855F59094F0a2876397"
.parse::<Address>()
.unwrap()),
value: /* 1 ETC */
to_32bytes("00000000000000000000000000000000\
00000000000000000de0b6b3a7640000"),
data: Vec::new(),
};
let chain: u8 = 61;
let rlp = tx.to_rlp(Some(chain));
let fd = &manager.devices()[0].1;
let sign = manager.sign_transaction(&fd, &rlp, None).unwrap();
assert_eq!(tx.raw_from_sig(chain, sign).to_hex(),
"f86d80\
85\
04e3b29200\
82\
5208\
94\
78296f1058dd49c5d6500855f59094f0a2876397\
88\
0de0b6b3a7640000\
80\
81\
9d\
a0\
5cba84eb9aac6854c8ff6aa21b3e0c6c2036e07ebdee44bcf7ace95bab569d8f\
a0\
6eab3be528ef7565c887e147a2d53340c6c9fab5d6f56694681c90b518b64183");
}
#[test]
#[ignore]
pub fn should_sign_with_ledger_big_data() {
let mut manager = WManager::new(Some(ETC_DERIVATION_PATH.to_vec())).unwrap();
manager.update(None).unwrap();
if manager.devices().is_empty() {
// No device connected, skip test
return;
}
let mut data = Vec::new();
// create 512 bytes of data,
// fill with `11cccccccccccc11` 8-byte hex fragment
for _ in 0..64 {
data.push(0x11);
data.extend_from_slice(&[0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc]);
data.push(0x11);
}
let tx = Transaction {
nonce: 0x01,
gas_price: /* 21000000000 */
to_32bytes("0000000000000000000000000000000\
0000000000000000000000004e3b29200"),
gas_limit: 0x5208,
to: Some("c0de379b51d582e1600c76dd1efee8ed024b844a"
.parse::<Address>()
.unwrap()),
value: /* 1 ETC */
to_32bytes("00000000000000000000000000000000\
00000000000000000003f26fcfb7a224"),
data: data,
};
let rlp = tx.to_rlp(None);
let fd = &manager.devices()[0].1;
/*
f9\
022a01\
\
85\
04e3b29200\
\
82\
5208\
\
94\
c0de379b51d582e1600c76dd1efee8ed024b844a\
\
87\
03f26fcfb7a224\
\
b9\
0200\
11cccccccccccc1111cccccccccccc1111cccccccccccc1111cccccccccccc11
11cccccccccccc1111cccccccccccc1111cccccccccccc1111cccccccccccc11\
11cccccccccccc1111cccccccccccc1111cccccccccccc1111cccccccccccc11\
11cccccccccccc1111cccccccccccc1111cccccccccccc1111cccccccccccc11\
11cccccccccccc1111cccccccccccc1111cccccccccccc1111cccccccccccc11\
11cccccccccccc1111cccccccccccc1111cccccccccccc1111cccccccccccc11\
11cccccccccccc1111cccccccccccc1111cccccccccccc1111cccccccccccc11\
11cccccccccccc1111cccccccccccc1111cccccccccccc1111cccccccccccc11\
11cccccccccccc1111cccccccccccc1111cccccccccccc1111cccccccccccc11\
11cccccccccccc1111cccccccccccc1111cccccccccccc1111cccccccccccc11\
11cccccccccccc1111cccccccccccc1111cccccccccccc1111cccccccccccc11\
11cccccccccccc1111cccccccccccc1111cccccccccccc1111cccccccccccc11\
11cccccccccccc1111cccccccccccc1111cccccccccccc1111cccccccccccc11\
11cccccccccccc1111cccccccccccc1111cccccccccccc1111cccccccccccc11\
11cccccccccccc1111cccccccccccc1111cccccccccccc1111cccccccccccc11\
11cccccccccccc1111cccccccccccc1111cccccccccccc1111cccccccccccc11
*/
println!(">> RLP: {:?}", &rlp.to_hex());
let sign = manager.sign_transaction(&fd, &rlp, None);
assert!(sign.is_ok());
debug!("Signature: {:?}", &sign.unwrap());
}
#[test]
#[ignore]
pub fn should_get_address_with_ledger() {
let mut manager = WManager::new(Some(ETC_DERIVATION_PATH.to_vec())).unwrap();
manager.update(None).unwrap();
if manager.devices().is_empty() {
// No device connected, skip test
return;
}
let fd = &manager.devices()[0].1;
let addr = manager.get_address(fd, None).unwrap();
assert_eq!("78296f1058dd49c5d6500855f59094f0a2876397", addr.to_hex());
}
#[test]
#[ignore]
pub fn should_pick_hd_path() {
let buf1 = vec![0];
let buf2 = vec![1];
let mut manager = WManager::new(None).unwrap();
assert_eq!(manager.pick_hd_path(Some(buf1.clone())).unwrap(), buf1);
manager.hd_path = Some(buf2.clone());
assert_eq!(manager.pick_hd_path(Some(buf2.clone())).unwrap(), buf2);
manager.hd_path = Some(buf1.clone());
assert_eq!(manager.pick_hd_path(None).unwrap(), buf1);
}
#[test]
pub fn should_parse_hd_path() {
let path_str = "44'/60'/160720'/0'/0";
assert_eq!(
ETC_DERIVATION_PATH[1..].to_vec(),
path_to_arr(&path_str).unwrap()
);
}
#[test]
pub fn should_fail_parse_hd_path() {
let mut path_str = "44'/60'/160A+_0'/0'/0";
assert!(path_to_arr(&path_str).is_err());
path_str = "44'/60'/16011_11111111111111111zz1111111111111111111111111111111'/0'/0";
assert!(path_to_arr(&path_str).is_err());
}
#[test]
pub fn should_parse_hd_path_into_prefixed() {
let path_str = "44'/60'/160720'/0'/0";
assert_eq!(
ETC_DERIVATION_PATH.to_vec(),
to_prefixed_path(&path_str).unwrap()
);
debug!("prefixed: {:?}", to_prefixed_path(&path_str).unwrap());
}
}
| 30.101392 | 95 | 0.545473 |
e862200d16a2d1d0c54cf77cf22ee333d3e52a10 | 2,933 | use solana_ledger::blockstore::Blockstore;
use solana_runtime::commitment::BlockCommitmentCache;
use std::{
sync::atomic::{AtomicBool, Ordering},
sync::{Arc, RwLock},
thread::{self, Builder, JoinHandle},
};
use tokio::runtime::Runtime;
// Delay uploading the largest confirmed root for this many slots. This is done in an attempt to
// ensure that the `CacheBlockTimeService` has had enough time to add the block time for the root
// before it's uploaded to BigTable.
//
// A more direct connection between CacheBlockTimeService and BigTableUploadService would be
// preferable...
const LARGEST_CONFIRMED_ROOT_UPLOAD_DELAY: usize = 100;
pub struct BigTableUploadService {
thread: JoinHandle<()>,
}
impl BigTableUploadService {
pub fn new(
runtime: Arc<Runtime>,
bigtable_ledger_storage: solana_storage_bigtable::LedgerStorage,
blockstore: Arc<Blockstore>,
block_commitment_cache: Arc<RwLock<BlockCommitmentCache>>,
exit: Arc<AtomicBool>,
) -> Self {
info!("Starting BigTable upload service");
let thread = Builder::new()
.name("bigtable-upload".to_string())
.spawn(move || {
Self::run(
runtime,
bigtable_ledger_storage,
blockstore,
block_commitment_cache,
exit,
)
})
.unwrap();
Self { thread }
}
fn run(
runtime: Arc<Runtime>,
bigtable_ledger_storage: solana_storage_bigtable::LedgerStorage,
blockstore: Arc<Blockstore>,
block_commitment_cache: Arc<RwLock<BlockCommitmentCache>>,
exit: Arc<AtomicBool>,
) {
let mut start_slot = 0;
loop {
if exit.load(Ordering::Relaxed) {
break;
}
let end_slot = block_commitment_cache
.read()
.unwrap()
.highest_confirmed_root()
.saturating_sub(LARGEST_CONFIRMED_ROOT_UPLOAD_DELAY as u64);
if end_slot <= start_slot {
std::thread::sleep(std::time::Duration::from_secs(1));
continue;
}
let result = runtime.block_on(solana_ledger::bigtable_upload::upload_confirmed_blocks(
blockstore.clone(),
bigtable_ledger_storage.clone(),
start_slot,
Some(end_slot),
true,
exit.clone(),
));
match result {
Ok(()) => start_slot = end_slot,
Err(err) => {
warn!("bigtable: upload_confirmed_blocks: {}", err);
std::thread::sleep(std::time::Duration::from_secs(2));
}
}
}
}
pub fn join(self) -> thread::Result<()> {
self.thread.join()
}
}
| 31.202128 | 98 | 0.558814 |
23a77226062d527082d21062c127ee8440b61dda | 85,320 | /**
* Copyright (c) 2016, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree. An additional
* directory.
*
**
*
* THIS FILE IS @generated; DO NOT EDIT IT
* To regenerate this file, run
*
* buck run //hphp/hack/src:generate_full_fidelity
*
**
*
*/
use super::{serialize::WithContext, syntax::Syntax, syntax_variant_generated::*};
use serde::{ser::SerializeStruct, Serialize, Serializer};
impl<'a, T, V> Serialize for WithContext<'a, Syntax<'a, T, V>>
where
T: 'a,
WithContext<'a, T>: Serialize,
{
fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
match self.1.children {
SyntaxVariant::Missing => {
let mut ss = s.serialize_struct("", 1)?;
ss.serialize_field("kind", "missing")?;
ss.end()
}
SyntaxVariant::Token(ref t) => {
let mut ss = s.serialize_struct("", 2)?;
ss.serialize_field("kind", "token")?;
ss.serialize_field("token", &self.with(t))?;
ss.end()
}
SyntaxVariant::SyntaxList(l) => {
let mut ss = s.serialize_struct("", 2)?;
ss.serialize_field("kind", "list")?;
ss.serialize_field("elements", &self.with(l))?;
ss.end()
}
SyntaxVariant::EndOfFile (EndOfFileChildren{token} ) => {
let mut ss = s.serialize_struct("", 2)?;
ss.serialize_field("kind", "end_of_file")?;
ss.serialize_field("end_of_file_token", &self.with(token))?;
ss.end()
}
SyntaxVariant::Script (ScriptChildren{declarations} ) => {
let mut ss = s.serialize_struct("", 2)?;
ss.serialize_field("kind", "script")?;
ss.serialize_field("script_declarations", &self.with(declarations))?;
ss.end()
}
SyntaxVariant::QualifiedName (QualifiedNameChildren{parts} ) => {
let mut ss = s.serialize_struct("", 2)?;
ss.serialize_field("kind", "qualified_name")?;
ss.serialize_field("qualified_name_parts", &self.with(parts))?;
ss.end()
}
SyntaxVariant::SimpleTypeSpecifier (SimpleTypeSpecifierChildren{specifier} ) => {
let mut ss = s.serialize_struct("", 2)?;
ss.serialize_field("kind", "simple_type_specifier")?;
ss.serialize_field("simple_type_specifier", &self.with(specifier))?;
ss.end()
}
SyntaxVariant::LiteralExpression (LiteralExpressionChildren{expression} ) => {
let mut ss = s.serialize_struct("", 2)?;
ss.serialize_field("kind", "literal")?;
ss.serialize_field("literal_expression", &self.with(expression))?;
ss.end()
}
SyntaxVariant::PrefixedStringExpression (PrefixedStringExpressionChildren{name,str} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "prefixed_string")?;
ss.serialize_field("prefixed_string_name", &self.with(name))?;
ss.serialize_field("prefixed_string_str", &self.with(str))?;
ss.end()
}
SyntaxVariant::PrefixedCodeExpression (PrefixedCodeExpressionChildren{prefix,left_backtick,expression,right_backtick} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "prefixed_code")?;
ss.serialize_field("prefixed_code_prefix", &self.with(prefix))?;
ss.serialize_field("prefixed_code_left_backtick", &self.with(left_backtick))?;
ss.serialize_field("prefixed_code_expression", &self.with(expression))?;
ss.serialize_field("prefixed_code_right_backtick", &self.with(right_backtick))?;
ss.end()
}
SyntaxVariant::VariableExpression (VariableExpressionChildren{expression} ) => {
let mut ss = s.serialize_struct("", 2)?;
ss.serialize_field("kind", "variable")?;
ss.serialize_field("variable_expression", &self.with(expression))?;
ss.end()
}
SyntaxVariant::PipeVariableExpression (PipeVariableExpressionChildren{expression} ) => {
let mut ss = s.serialize_struct("", 2)?;
ss.serialize_field("kind", "pipe_variable")?;
ss.serialize_field("pipe_variable_expression", &self.with(expression))?;
ss.end()
}
SyntaxVariant::FileAttributeSpecification (FileAttributeSpecificationChildren{left_double_angle,keyword,colon,attributes,right_double_angle} ) => {
let mut ss = s.serialize_struct("", 6)?;
ss.serialize_field("kind", "file_attribute_specification")?;
ss.serialize_field("file_attribute_specification_left_double_angle", &self.with(left_double_angle))?;
ss.serialize_field("file_attribute_specification_keyword", &self.with(keyword))?;
ss.serialize_field("file_attribute_specification_colon", &self.with(colon))?;
ss.serialize_field("file_attribute_specification_attributes", &self.with(attributes))?;
ss.serialize_field("file_attribute_specification_right_double_angle", &self.with(right_double_angle))?;
ss.end()
}
SyntaxVariant::EnumDeclaration (EnumDeclarationChildren{attribute_spec,modifiers,keyword,name,colon,base,type_,left_brace,use_clauses,enumerators,right_brace} ) => {
let mut ss = s.serialize_struct("", 12)?;
ss.serialize_field("kind", "enum_declaration")?;
ss.serialize_field("enum_attribute_spec", &self.with(attribute_spec))?;
ss.serialize_field("enum_modifiers", &self.with(modifiers))?;
ss.serialize_field("enum_keyword", &self.with(keyword))?;
ss.serialize_field("enum_name", &self.with(name))?;
ss.serialize_field("enum_colon", &self.with(colon))?;
ss.serialize_field("enum_base", &self.with(base))?;
ss.serialize_field("enum_type", &self.with(type_))?;
ss.serialize_field("enum_left_brace", &self.with(left_brace))?;
ss.serialize_field("enum_use_clauses", &self.with(use_clauses))?;
ss.serialize_field("enum_enumerators", &self.with(enumerators))?;
ss.serialize_field("enum_right_brace", &self.with(right_brace))?;
ss.end()
}
SyntaxVariant::EnumUse (EnumUseChildren{keyword,names,semicolon} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "enum_use")?;
ss.serialize_field("enum_use_keyword", &self.with(keyword))?;
ss.serialize_field("enum_use_names", &self.with(names))?;
ss.serialize_field("enum_use_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::Enumerator (EnumeratorChildren{name,equal,value,semicolon} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "enumerator")?;
ss.serialize_field("enumerator_name", &self.with(name))?;
ss.serialize_field("enumerator_equal", &self.with(equal))?;
ss.serialize_field("enumerator_value", &self.with(value))?;
ss.serialize_field("enumerator_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::EnumClassDeclaration (EnumClassDeclarationChildren{attribute_spec,modifiers,enum_keyword,class_keyword,name,colon,base,extends,extends_list,left_brace,elements,right_brace} ) => {
let mut ss = s.serialize_struct("", 13)?;
ss.serialize_field("kind", "enum_class_declaration")?;
ss.serialize_field("enum_class_attribute_spec", &self.with(attribute_spec))?;
ss.serialize_field("enum_class_modifiers", &self.with(modifiers))?;
ss.serialize_field("enum_class_enum_keyword", &self.with(enum_keyword))?;
ss.serialize_field("enum_class_class_keyword", &self.with(class_keyword))?;
ss.serialize_field("enum_class_name", &self.with(name))?;
ss.serialize_field("enum_class_colon", &self.with(colon))?;
ss.serialize_field("enum_class_base", &self.with(base))?;
ss.serialize_field("enum_class_extends", &self.with(extends))?;
ss.serialize_field("enum_class_extends_list", &self.with(extends_list))?;
ss.serialize_field("enum_class_left_brace", &self.with(left_brace))?;
ss.serialize_field("enum_class_elements", &self.with(elements))?;
ss.serialize_field("enum_class_right_brace", &self.with(right_brace))?;
ss.end()
}
SyntaxVariant::EnumClassEnumerator (EnumClassEnumeratorChildren{modifiers,type_,name,initializer,semicolon} ) => {
let mut ss = s.serialize_struct("", 6)?;
ss.serialize_field("kind", "enum_class_enumerator")?;
ss.serialize_field("enum_class_enumerator_modifiers", &self.with(modifiers))?;
ss.serialize_field("enum_class_enumerator_type", &self.with(type_))?;
ss.serialize_field("enum_class_enumerator_name", &self.with(name))?;
ss.serialize_field("enum_class_enumerator_initializer", &self.with(initializer))?;
ss.serialize_field("enum_class_enumerator_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::AliasDeclaration (AliasDeclarationChildren{attribute_spec,modifiers,keyword,name,generic_parameter,constraint,equal,type_,semicolon} ) => {
let mut ss = s.serialize_struct("", 10)?;
ss.serialize_field("kind", "alias_declaration")?;
ss.serialize_field("alias_attribute_spec", &self.with(attribute_spec))?;
ss.serialize_field("alias_modifiers", &self.with(modifiers))?;
ss.serialize_field("alias_keyword", &self.with(keyword))?;
ss.serialize_field("alias_name", &self.with(name))?;
ss.serialize_field("alias_generic_parameter", &self.with(generic_parameter))?;
ss.serialize_field("alias_constraint", &self.with(constraint))?;
ss.serialize_field("alias_equal", &self.with(equal))?;
ss.serialize_field("alias_type", &self.with(type_))?;
ss.serialize_field("alias_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::ContextAliasDeclaration (ContextAliasDeclarationChildren{attribute_spec,keyword,name,generic_parameter,as_constraint,equal,context,semicolon} ) => {
let mut ss = s.serialize_struct("", 9)?;
ss.serialize_field("kind", "context_alias_declaration")?;
ss.serialize_field("ctx_alias_attribute_spec", &self.with(attribute_spec))?;
ss.serialize_field("ctx_alias_keyword", &self.with(keyword))?;
ss.serialize_field("ctx_alias_name", &self.with(name))?;
ss.serialize_field("ctx_alias_generic_parameter", &self.with(generic_parameter))?;
ss.serialize_field("ctx_alias_as_constraint", &self.with(as_constraint))?;
ss.serialize_field("ctx_alias_equal", &self.with(equal))?;
ss.serialize_field("ctx_alias_context", &self.with(context))?;
ss.serialize_field("ctx_alias_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::PropertyDeclaration (PropertyDeclarationChildren{attribute_spec,modifiers,type_,declarators,semicolon} ) => {
let mut ss = s.serialize_struct("", 6)?;
ss.serialize_field("kind", "property_declaration")?;
ss.serialize_field("property_attribute_spec", &self.with(attribute_spec))?;
ss.serialize_field("property_modifiers", &self.with(modifiers))?;
ss.serialize_field("property_type", &self.with(type_))?;
ss.serialize_field("property_declarators", &self.with(declarators))?;
ss.serialize_field("property_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::PropertyDeclarator (PropertyDeclaratorChildren{name,initializer} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "property_declarator")?;
ss.serialize_field("property_name", &self.with(name))?;
ss.serialize_field("property_initializer", &self.with(initializer))?;
ss.end()
}
SyntaxVariant::NamespaceDeclaration (NamespaceDeclarationChildren{header,body} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "namespace_declaration")?;
ss.serialize_field("namespace_header", &self.with(header))?;
ss.serialize_field("namespace_body", &self.with(body))?;
ss.end()
}
SyntaxVariant::NamespaceDeclarationHeader (NamespaceDeclarationHeaderChildren{keyword,name} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "namespace_declaration_header")?;
ss.serialize_field("namespace_keyword", &self.with(keyword))?;
ss.serialize_field("namespace_name", &self.with(name))?;
ss.end()
}
SyntaxVariant::NamespaceBody (NamespaceBodyChildren{left_brace,declarations,right_brace} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "namespace_body")?;
ss.serialize_field("namespace_left_brace", &self.with(left_brace))?;
ss.serialize_field("namespace_declarations", &self.with(declarations))?;
ss.serialize_field("namespace_right_brace", &self.with(right_brace))?;
ss.end()
}
SyntaxVariant::NamespaceEmptyBody (NamespaceEmptyBodyChildren{semicolon} ) => {
let mut ss = s.serialize_struct("", 2)?;
ss.serialize_field("kind", "namespace_empty_body")?;
ss.serialize_field("namespace_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::NamespaceUseDeclaration (NamespaceUseDeclarationChildren{keyword,kind,clauses,semicolon} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "namespace_use_declaration")?;
ss.serialize_field("namespace_use_keyword", &self.with(keyword))?;
ss.serialize_field("namespace_use_kind", &self.with(kind))?;
ss.serialize_field("namespace_use_clauses", &self.with(clauses))?;
ss.serialize_field("namespace_use_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::NamespaceGroupUseDeclaration (NamespaceGroupUseDeclarationChildren{keyword,kind,prefix,left_brace,clauses,right_brace,semicolon} ) => {
let mut ss = s.serialize_struct("", 8)?;
ss.serialize_field("kind", "namespace_group_use_declaration")?;
ss.serialize_field("namespace_group_use_keyword", &self.with(keyword))?;
ss.serialize_field("namespace_group_use_kind", &self.with(kind))?;
ss.serialize_field("namespace_group_use_prefix", &self.with(prefix))?;
ss.serialize_field("namespace_group_use_left_brace", &self.with(left_brace))?;
ss.serialize_field("namespace_group_use_clauses", &self.with(clauses))?;
ss.serialize_field("namespace_group_use_right_brace", &self.with(right_brace))?;
ss.serialize_field("namespace_group_use_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::NamespaceUseClause (NamespaceUseClauseChildren{clause_kind,name,as_,alias} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "namespace_use_clause")?;
ss.serialize_field("namespace_use_clause_kind", &self.with(clause_kind))?;
ss.serialize_field("namespace_use_name", &self.with(name))?;
ss.serialize_field("namespace_use_as", &self.with(as_))?;
ss.serialize_field("namespace_use_alias", &self.with(alias))?;
ss.end()
}
SyntaxVariant::FunctionDeclaration (FunctionDeclarationChildren{attribute_spec,declaration_header,body} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "function_declaration")?;
ss.serialize_field("function_attribute_spec", &self.with(attribute_spec))?;
ss.serialize_field("function_declaration_header", &self.with(declaration_header))?;
ss.serialize_field("function_body", &self.with(body))?;
ss.end()
}
SyntaxVariant::FunctionDeclarationHeader (FunctionDeclarationHeaderChildren{modifiers,keyword,name,type_parameter_list,left_paren,parameter_list,right_paren,contexts,colon,readonly_return,type_,where_clause} ) => {
let mut ss = s.serialize_struct("", 13)?;
ss.serialize_field("kind", "function_declaration_header")?;
ss.serialize_field("function_modifiers", &self.with(modifiers))?;
ss.serialize_field("function_keyword", &self.with(keyword))?;
ss.serialize_field("function_name", &self.with(name))?;
ss.serialize_field("function_type_parameter_list", &self.with(type_parameter_list))?;
ss.serialize_field("function_left_paren", &self.with(left_paren))?;
ss.serialize_field("function_parameter_list", &self.with(parameter_list))?;
ss.serialize_field("function_right_paren", &self.with(right_paren))?;
ss.serialize_field("function_contexts", &self.with(contexts))?;
ss.serialize_field("function_colon", &self.with(colon))?;
ss.serialize_field("function_readonly_return", &self.with(readonly_return))?;
ss.serialize_field("function_type", &self.with(type_))?;
ss.serialize_field("function_where_clause", &self.with(where_clause))?;
ss.end()
}
SyntaxVariant::Contexts (ContextsChildren{left_bracket,types,right_bracket} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "contexts")?;
ss.serialize_field("contexts_left_bracket", &self.with(left_bracket))?;
ss.serialize_field("contexts_types", &self.with(types))?;
ss.serialize_field("contexts_right_bracket", &self.with(right_bracket))?;
ss.end()
}
SyntaxVariant::WhereClause (WhereClauseChildren{keyword,constraints} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "where_clause")?;
ss.serialize_field("where_clause_keyword", &self.with(keyword))?;
ss.serialize_field("where_clause_constraints", &self.with(constraints))?;
ss.end()
}
SyntaxVariant::WhereConstraint (WhereConstraintChildren{left_type,operator,right_type} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "where_constraint")?;
ss.serialize_field("where_constraint_left_type", &self.with(left_type))?;
ss.serialize_field("where_constraint_operator", &self.with(operator))?;
ss.serialize_field("where_constraint_right_type", &self.with(right_type))?;
ss.end()
}
SyntaxVariant::MethodishDeclaration (MethodishDeclarationChildren{attribute,function_decl_header,function_body,semicolon} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "methodish_declaration")?;
ss.serialize_field("methodish_attribute", &self.with(attribute))?;
ss.serialize_field("methodish_function_decl_header", &self.with(function_decl_header))?;
ss.serialize_field("methodish_function_body", &self.with(function_body))?;
ss.serialize_field("methodish_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::MethodishTraitResolution (MethodishTraitResolutionChildren{attribute,function_decl_header,equal,name,semicolon} ) => {
let mut ss = s.serialize_struct("", 6)?;
ss.serialize_field("kind", "methodish_trait_resolution")?;
ss.serialize_field("methodish_trait_attribute", &self.with(attribute))?;
ss.serialize_field("methodish_trait_function_decl_header", &self.with(function_decl_header))?;
ss.serialize_field("methodish_trait_equal", &self.with(equal))?;
ss.serialize_field("methodish_trait_name", &self.with(name))?;
ss.serialize_field("methodish_trait_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::ClassishDeclaration (ClassishDeclarationChildren{attribute,modifiers,xhp,keyword,name,type_parameters,extends_keyword,extends_list,implements_keyword,implements_list,where_clause,body} ) => {
let mut ss = s.serialize_struct("", 13)?;
ss.serialize_field("kind", "classish_declaration")?;
ss.serialize_field("classish_attribute", &self.with(attribute))?;
ss.serialize_field("classish_modifiers", &self.with(modifiers))?;
ss.serialize_field("classish_xhp", &self.with(xhp))?;
ss.serialize_field("classish_keyword", &self.with(keyword))?;
ss.serialize_field("classish_name", &self.with(name))?;
ss.serialize_field("classish_type_parameters", &self.with(type_parameters))?;
ss.serialize_field("classish_extends_keyword", &self.with(extends_keyword))?;
ss.serialize_field("classish_extends_list", &self.with(extends_list))?;
ss.serialize_field("classish_implements_keyword", &self.with(implements_keyword))?;
ss.serialize_field("classish_implements_list", &self.with(implements_list))?;
ss.serialize_field("classish_where_clause", &self.with(where_clause))?;
ss.serialize_field("classish_body", &self.with(body))?;
ss.end()
}
SyntaxVariant::ClassishBody (ClassishBodyChildren{left_brace,elements,right_brace} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "classish_body")?;
ss.serialize_field("classish_body_left_brace", &self.with(left_brace))?;
ss.serialize_field("classish_body_elements", &self.with(elements))?;
ss.serialize_field("classish_body_right_brace", &self.with(right_brace))?;
ss.end()
}
SyntaxVariant::TraitUse (TraitUseChildren{keyword,names,semicolon} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "trait_use")?;
ss.serialize_field("trait_use_keyword", &self.with(keyword))?;
ss.serialize_field("trait_use_names", &self.with(names))?;
ss.serialize_field("trait_use_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::RequireClause (RequireClauseChildren{keyword,kind,name,semicolon} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "require_clause")?;
ss.serialize_field("require_keyword", &self.with(keyword))?;
ss.serialize_field("require_kind", &self.with(kind))?;
ss.serialize_field("require_name", &self.with(name))?;
ss.serialize_field("require_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::ConstDeclaration (ConstDeclarationChildren{attribute_spec,modifiers,keyword,type_specifier,declarators,semicolon} ) => {
let mut ss = s.serialize_struct("", 7)?;
ss.serialize_field("kind", "const_declaration")?;
ss.serialize_field("const_attribute_spec", &self.with(attribute_spec))?;
ss.serialize_field("const_modifiers", &self.with(modifiers))?;
ss.serialize_field("const_keyword", &self.with(keyword))?;
ss.serialize_field("const_type_specifier", &self.with(type_specifier))?;
ss.serialize_field("const_declarators", &self.with(declarators))?;
ss.serialize_field("const_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::ConstantDeclarator (ConstantDeclaratorChildren{name,initializer} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "constant_declarator")?;
ss.serialize_field("constant_declarator_name", &self.with(name))?;
ss.serialize_field("constant_declarator_initializer", &self.with(initializer))?;
ss.end()
}
SyntaxVariant::TypeConstDeclaration (TypeConstDeclarationChildren{attribute_spec,modifiers,keyword,type_keyword,name,type_parameters,type_constraints,equal,type_specifier,semicolon} ) => {
let mut ss = s.serialize_struct("", 11)?;
ss.serialize_field("kind", "type_const_declaration")?;
ss.serialize_field("type_const_attribute_spec", &self.with(attribute_spec))?;
ss.serialize_field("type_const_modifiers", &self.with(modifiers))?;
ss.serialize_field("type_const_keyword", &self.with(keyword))?;
ss.serialize_field("type_const_type_keyword", &self.with(type_keyword))?;
ss.serialize_field("type_const_name", &self.with(name))?;
ss.serialize_field("type_const_type_parameters", &self.with(type_parameters))?;
ss.serialize_field("type_const_type_constraints", &self.with(type_constraints))?;
ss.serialize_field("type_const_equal", &self.with(equal))?;
ss.serialize_field("type_const_type_specifier", &self.with(type_specifier))?;
ss.serialize_field("type_const_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::ContextConstDeclaration (ContextConstDeclarationChildren{modifiers,const_keyword,ctx_keyword,name,type_parameters,constraint,equal,ctx_list,semicolon} ) => {
let mut ss = s.serialize_struct("", 10)?;
ss.serialize_field("kind", "context_const_declaration")?;
ss.serialize_field("context_const_modifiers", &self.with(modifiers))?;
ss.serialize_field("context_const_const_keyword", &self.with(const_keyword))?;
ss.serialize_field("context_const_ctx_keyword", &self.with(ctx_keyword))?;
ss.serialize_field("context_const_name", &self.with(name))?;
ss.serialize_field("context_const_type_parameters", &self.with(type_parameters))?;
ss.serialize_field("context_const_constraint", &self.with(constraint))?;
ss.serialize_field("context_const_equal", &self.with(equal))?;
ss.serialize_field("context_const_ctx_list", &self.with(ctx_list))?;
ss.serialize_field("context_const_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::DecoratedExpression (DecoratedExpressionChildren{decorator,expression} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "decorated_expression")?;
ss.serialize_field("decorated_expression_decorator", &self.with(decorator))?;
ss.serialize_field("decorated_expression_expression", &self.with(expression))?;
ss.end()
}
SyntaxVariant::ParameterDeclaration (ParameterDeclarationChildren{attribute,visibility,call_convention,readonly,type_,name,default_value} ) => {
let mut ss = s.serialize_struct("", 8)?;
ss.serialize_field("kind", "parameter_declaration")?;
ss.serialize_field("parameter_attribute", &self.with(attribute))?;
ss.serialize_field("parameter_visibility", &self.with(visibility))?;
ss.serialize_field("parameter_call_convention", &self.with(call_convention))?;
ss.serialize_field("parameter_readonly", &self.with(readonly))?;
ss.serialize_field("parameter_type", &self.with(type_))?;
ss.serialize_field("parameter_name", &self.with(name))?;
ss.serialize_field("parameter_default_value", &self.with(default_value))?;
ss.end()
}
SyntaxVariant::VariadicParameter (VariadicParameterChildren{call_convention,type_,ellipsis} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "variadic_parameter")?;
ss.serialize_field("variadic_parameter_call_convention", &self.with(call_convention))?;
ss.serialize_field("variadic_parameter_type", &self.with(type_))?;
ss.serialize_field("variadic_parameter_ellipsis", &self.with(ellipsis))?;
ss.end()
}
SyntaxVariant::OldAttributeSpecification (OldAttributeSpecificationChildren{left_double_angle,attributes,right_double_angle} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "old_attribute_specification")?;
ss.serialize_field("old_attribute_specification_left_double_angle", &self.with(left_double_angle))?;
ss.serialize_field("old_attribute_specification_attributes", &self.with(attributes))?;
ss.serialize_field("old_attribute_specification_right_double_angle", &self.with(right_double_angle))?;
ss.end()
}
SyntaxVariant::AttributeSpecification (AttributeSpecificationChildren{attributes} ) => {
let mut ss = s.serialize_struct("", 2)?;
ss.serialize_field("kind", "attribute_specification")?;
ss.serialize_field("attribute_specification_attributes", &self.with(attributes))?;
ss.end()
}
SyntaxVariant::Attribute (AttributeChildren{at,attribute_name} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "attribute")?;
ss.serialize_field("attribute_at", &self.with(at))?;
ss.serialize_field("attribute_attribute_name", &self.with(attribute_name))?;
ss.end()
}
SyntaxVariant::InclusionExpression (InclusionExpressionChildren{require,filename} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "inclusion_expression")?;
ss.serialize_field("inclusion_require", &self.with(require))?;
ss.serialize_field("inclusion_filename", &self.with(filename))?;
ss.end()
}
SyntaxVariant::InclusionDirective (InclusionDirectiveChildren{expression,semicolon} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "inclusion_directive")?;
ss.serialize_field("inclusion_expression", &self.with(expression))?;
ss.serialize_field("inclusion_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::CompoundStatement (CompoundStatementChildren{left_brace,statements,right_brace} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "compound_statement")?;
ss.serialize_field("compound_left_brace", &self.with(left_brace))?;
ss.serialize_field("compound_statements", &self.with(statements))?;
ss.serialize_field("compound_right_brace", &self.with(right_brace))?;
ss.end()
}
SyntaxVariant::ExpressionStatement (ExpressionStatementChildren{expression,semicolon} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "expression_statement")?;
ss.serialize_field("expression_statement_expression", &self.with(expression))?;
ss.serialize_field("expression_statement_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::MarkupSection (MarkupSectionChildren{hashbang,suffix} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "markup_section")?;
ss.serialize_field("markup_hashbang", &self.with(hashbang))?;
ss.serialize_field("markup_suffix", &self.with(suffix))?;
ss.end()
}
SyntaxVariant::MarkupSuffix (MarkupSuffixChildren{less_than_question,name} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "markup_suffix")?;
ss.serialize_field("markup_suffix_less_than_question", &self.with(less_than_question))?;
ss.serialize_field("markup_suffix_name", &self.with(name))?;
ss.end()
}
SyntaxVariant::UnsetStatement (UnsetStatementChildren{keyword,left_paren,variables,right_paren,semicolon} ) => {
let mut ss = s.serialize_struct("", 6)?;
ss.serialize_field("kind", "unset_statement")?;
ss.serialize_field("unset_keyword", &self.with(keyword))?;
ss.serialize_field("unset_left_paren", &self.with(left_paren))?;
ss.serialize_field("unset_variables", &self.with(variables))?;
ss.serialize_field("unset_right_paren", &self.with(right_paren))?;
ss.serialize_field("unset_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::UsingStatementBlockScoped (UsingStatementBlockScopedChildren{await_keyword,using_keyword,left_paren,expressions,right_paren,body} ) => {
let mut ss = s.serialize_struct("", 7)?;
ss.serialize_field("kind", "using_statement_block_scoped")?;
ss.serialize_field("using_block_await_keyword", &self.with(await_keyword))?;
ss.serialize_field("using_block_using_keyword", &self.with(using_keyword))?;
ss.serialize_field("using_block_left_paren", &self.with(left_paren))?;
ss.serialize_field("using_block_expressions", &self.with(expressions))?;
ss.serialize_field("using_block_right_paren", &self.with(right_paren))?;
ss.serialize_field("using_block_body", &self.with(body))?;
ss.end()
}
SyntaxVariant::UsingStatementFunctionScoped (UsingStatementFunctionScopedChildren{await_keyword,using_keyword,expression,semicolon} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "using_statement_function_scoped")?;
ss.serialize_field("using_function_await_keyword", &self.with(await_keyword))?;
ss.serialize_field("using_function_using_keyword", &self.with(using_keyword))?;
ss.serialize_field("using_function_expression", &self.with(expression))?;
ss.serialize_field("using_function_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::WhileStatement (WhileStatementChildren{keyword,left_paren,condition,right_paren,body} ) => {
let mut ss = s.serialize_struct("", 6)?;
ss.serialize_field("kind", "while_statement")?;
ss.serialize_field("while_keyword", &self.with(keyword))?;
ss.serialize_field("while_left_paren", &self.with(left_paren))?;
ss.serialize_field("while_condition", &self.with(condition))?;
ss.serialize_field("while_right_paren", &self.with(right_paren))?;
ss.serialize_field("while_body", &self.with(body))?;
ss.end()
}
SyntaxVariant::IfStatement (IfStatementChildren{keyword,left_paren,condition,right_paren,statement,else_clause} ) => {
let mut ss = s.serialize_struct("", 7)?;
ss.serialize_field("kind", "if_statement")?;
ss.serialize_field("if_keyword", &self.with(keyword))?;
ss.serialize_field("if_left_paren", &self.with(left_paren))?;
ss.serialize_field("if_condition", &self.with(condition))?;
ss.serialize_field("if_right_paren", &self.with(right_paren))?;
ss.serialize_field("if_statement", &self.with(statement))?;
ss.serialize_field("if_else_clause", &self.with(else_clause))?;
ss.end()
}
SyntaxVariant::ElseClause (ElseClauseChildren{keyword,statement} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "else_clause")?;
ss.serialize_field("else_keyword", &self.with(keyword))?;
ss.serialize_field("else_statement", &self.with(statement))?;
ss.end()
}
SyntaxVariant::TryStatement (TryStatementChildren{keyword,compound_statement,catch_clauses,finally_clause} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "try_statement")?;
ss.serialize_field("try_keyword", &self.with(keyword))?;
ss.serialize_field("try_compound_statement", &self.with(compound_statement))?;
ss.serialize_field("try_catch_clauses", &self.with(catch_clauses))?;
ss.serialize_field("try_finally_clause", &self.with(finally_clause))?;
ss.end()
}
SyntaxVariant::CatchClause (CatchClauseChildren{keyword,left_paren,type_,variable,right_paren,body} ) => {
let mut ss = s.serialize_struct("", 7)?;
ss.serialize_field("kind", "catch_clause")?;
ss.serialize_field("catch_keyword", &self.with(keyword))?;
ss.serialize_field("catch_left_paren", &self.with(left_paren))?;
ss.serialize_field("catch_type", &self.with(type_))?;
ss.serialize_field("catch_variable", &self.with(variable))?;
ss.serialize_field("catch_right_paren", &self.with(right_paren))?;
ss.serialize_field("catch_body", &self.with(body))?;
ss.end()
}
SyntaxVariant::FinallyClause (FinallyClauseChildren{keyword,body} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "finally_clause")?;
ss.serialize_field("finally_keyword", &self.with(keyword))?;
ss.serialize_field("finally_body", &self.with(body))?;
ss.end()
}
SyntaxVariant::DoStatement (DoStatementChildren{keyword,body,while_keyword,left_paren,condition,right_paren,semicolon} ) => {
let mut ss = s.serialize_struct("", 8)?;
ss.serialize_field("kind", "do_statement")?;
ss.serialize_field("do_keyword", &self.with(keyword))?;
ss.serialize_field("do_body", &self.with(body))?;
ss.serialize_field("do_while_keyword", &self.with(while_keyword))?;
ss.serialize_field("do_left_paren", &self.with(left_paren))?;
ss.serialize_field("do_condition", &self.with(condition))?;
ss.serialize_field("do_right_paren", &self.with(right_paren))?;
ss.serialize_field("do_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::ForStatement (ForStatementChildren{keyword,left_paren,initializer,first_semicolon,control,second_semicolon,end_of_loop,right_paren,body} ) => {
let mut ss = s.serialize_struct("", 10)?;
ss.serialize_field("kind", "for_statement")?;
ss.serialize_field("for_keyword", &self.with(keyword))?;
ss.serialize_field("for_left_paren", &self.with(left_paren))?;
ss.serialize_field("for_initializer", &self.with(initializer))?;
ss.serialize_field("for_first_semicolon", &self.with(first_semicolon))?;
ss.serialize_field("for_control", &self.with(control))?;
ss.serialize_field("for_second_semicolon", &self.with(second_semicolon))?;
ss.serialize_field("for_end_of_loop", &self.with(end_of_loop))?;
ss.serialize_field("for_right_paren", &self.with(right_paren))?;
ss.serialize_field("for_body", &self.with(body))?;
ss.end()
}
SyntaxVariant::ForeachStatement (ForeachStatementChildren{keyword,left_paren,collection,await_keyword,as_,key,arrow,value,right_paren,body} ) => {
let mut ss = s.serialize_struct("", 11)?;
ss.serialize_field("kind", "foreach_statement")?;
ss.serialize_field("foreach_keyword", &self.with(keyword))?;
ss.serialize_field("foreach_left_paren", &self.with(left_paren))?;
ss.serialize_field("foreach_collection", &self.with(collection))?;
ss.serialize_field("foreach_await_keyword", &self.with(await_keyword))?;
ss.serialize_field("foreach_as", &self.with(as_))?;
ss.serialize_field("foreach_key", &self.with(key))?;
ss.serialize_field("foreach_arrow", &self.with(arrow))?;
ss.serialize_field("foreach_value", &self.with(value))?;
ss.serialize_field("foreach_right_paren", &self.with(right_paren))?;
ss.serialize_field("foreach_body", &self.with(body))?;
ss.end()
}
SyntaxVariant::SwitchStatement (SwitchStatementChildren{keyword,left_paren,expression,right_paren,left_brace,sections,right_brace} ) => {
let mut ss = s.serialize_struct("", 8)?;
ss.serialize_field("kind", "switch_statement")?;
ss.serialize_field("switch_keyword", &self.with(keyword))?;
ss.serialize_field("switch_left_paren", &self.with(left_paren))?;
ss.serialize_field("switch_expression", &self.with(expression))?;
ss.serialize_field("switch_right_paren", &self.with(right_paren))?;
ss.serialize_field("switch_left_brace", &self.with(left_brace))?;
ss.serialize_field("switch_sections", &self.with(sections))?;
ss.serialize_field("switch_right_brace", &self.with(right_brace))?;
ss.end()
}
SyntaxVariant::SwitchSection (SwitchSectionChildren{labels,statements,fallthrough} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "switch_section")?;
ss.serialize_field("switch_section_labels", &self.with(labels))?;
ss.serialize_field("switch_section_statements", &self.with(statements))?;
ss.serialize_field("switch_section_fallthrough", &self.with(fallthrough))?;
ss.end()
}
SyntaxVariant::SwitchFallthrough (SwitchFallthroughChildren{keyword,semicolon} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "switch_fallthrough")?;
ss.serialize_field("fallthrough_keyword", &self.with(keyword))?;
ss.serialize_field("fallthrough_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::CaseLabel (CaseLabelChildren{keyword,expression,colon} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "case_label")?;
ss.serialize_field("case_keyword", &self.with(keyword))?;
ss.serialize_field("case_expression", &self.with(expression))?;
ss.serialize_field("case_colon", &self.with(colon))?;
ss.end()
}
SyntaxVariant::DefaultLabel (DefaultLabelChildren{keyword,colon} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "default_label")?;
ss.serialize_field("default_keyword", &self.with(keyword))?;
ss.serialize_field("default_colon", &self.with(colon))?;
ss.end()
}
SyntaxVariant::ReturnStatement (ReturnStatementChildren{keyword,expression,semicolon} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "return_statement")?;
ss.serialize_field("return_keyword", &self.with(keyword))?;
ss.serialize_field("return_expression", &self.with(expression))?;
ss.serialize_field("return_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::YieldBreakStatement (YieldBreakStatementChildren{keyword,break_,semicolon} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "yield_break_statement")?;
ss.serialize_field("yield_break_keyword", &self.with(keyword))?;
ss.serialize_field("yield_break_break", &self.with(break_))?;
ss.serialize_field("yield_break_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::ThrowStatement (ThrowStatementChildren{keyword,expression,semicolon} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "throw_statement")?;
ss.serialize_field("throw_keyword", &self.with(keyword))?;
ss.serialize_field("throw_expression", &self.with(expression))?;
ss.serialize_field("throw_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::BreakStatement (BreakStatementChildren{keyword,semicolon} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "break_statement")?;
ss.serialize_field("break_keyword", &self.with(keyword))?;
ss.serialize_field("break_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::ContinueStatement (ContinueStatementChildren{keyword,semicolon} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "continue_statement")?;
ss.serialize_field("continue_keyword", &self.with(keyword))?;
ss.serialize_field("continue_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::EchoStatement (EchoStatementChildren{keyword,expressions,semicolon} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "echo_statement")?;
ss.serialize_field("echo_keyword", &self.with(keyword))?;
ss.serialize_field("echo_expressions", &self.with(expressions))?;
ss.serialize_field("echo_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::ConcurrentStatement (ConcurrentStatementChildren{keyword,statement} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "concurrent_statement")?;
ss.serialize_field("concurrent_keyword", &self.with(keyword))?;
ss.serialize_field("concurrent_statement", &self.with(statement))?;
ss.end()
}
SyntaxVariant::SimpleInitializer (SimpleInitializerChildren{equal,value} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "simple_initializer")?;
ss.serialize_field("simple_initializer_equal", &self.with(equal))?;
ss.serialize_field("simple_initializer_value", &self.with(value))?;
ss.end()
}
SyntaxVariant::AnonymousClass (AnonymousClassChildren{class_keyword,left_paren,argument_list,right_paren,extends_keyword,extends_list,implements_keyword,implements_list,body} ) => {
let mut ss = s.serialize_struct("", 10)?;
ss.serialize_field("kind", "anonymous_class")?;
ss.serialize_field("anonymous_class_class_keyword", &self.with(class_keyword))?;
ss.serialize_field("anonymous_class_left_paren", &self.with(left_paren))?;
ss.serialize_field("anonymous_class_argument_list", &self.with(argument_list))?;
ss.serialize_field("anonymous_class_right_paren", &self.with(right_paren))?;
ss.serialize_field("anonymous_class_extends_keyword", &self.with(extends_keyword))?;
ss.serialize_field("anonymous_class_extends_list", &self.with(extends_list))?;
ss.serialize_field("anonymous_class_implements_keyword", &self.with(implements_keyword))?;
ss.serialize_field("anonymous_class_implements_list", &self.with(implements_list))?;
ss.serialize_field("anonymous_class_body", &self.with(body))?;
ss.end()
}
SyntaxVariant::AnonymousFunction (AnonymousFunctionChildren{attribute_spec,async_keyword,function_keyword,left_paren,parameters,right_paren,ctx_list,colon,readonly_return,type_,use_,body} ) => {
let mut ss = s.serialize_struct("", 13)?;
ss.serialize_field("kind", "anonymous_function")?;
ss.serialize_field("anonymous_attribute_spec", &self.with(attribute_spec))?;
ss.serialize_field("anonymous_async_keyword", &self.with(async_keyword))?;
ss.serialize_field("anonymous_function_keyword", &self.with(function_keyword))?;
ss.serialize_field("anonymous_left_paren", &self.with(left_paren))?;
ss.serialize_field("anonymous_parameters", &self.with(parameters))?;
ss.serialize_field("anonymous_right_paren", &self.with(right_paren))?;
ss.serialize_field("anonymous_ctx_list", &self.with(ctx_list))?;
ss.serialize_field("anonymous_colon", &self.with(colon))?;
ss.serialize_field("anonymous_readonly_return", &self.with(readonly_return))?;
ss.serialize_field("anonymous_type", &self.with(type_))?;
ss.serialize_field("anonymous_use", &self.with(use_))?;
ss.serialize_field("anonymous_body", &self.with(body))?;
ss.end()
}
SyntaxVariant::AnonymousFunctionUseClause (AnonymousFunctionUseClauseChildren{keyword,left_paren,variables,right_paren} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "anonymous_function_use_clause")?;
ss.serialize_field("anonymous_use_keyword", &self.with(keyword))?;
ss.serialize_field("anonymous_use_left_paren", &self.with(left_paren))?;
ss.serialize_field("anonymous_use_variables", &self.with(variables))?;
ss.serialize_field("anonymous_use_right_paren", &self.with(right_paren))?;
ss.end()
}
SyntaxVariant::LambdaExpression (LambdaExpressionChildren{attribute_spec,async_,signature,arrow,body} ) => {
let mut ss = s.serialize_struct("", 6)?;
ss.serialize_field("kind", "lambda_expression")?;
ss.serialize_field("lambda_attribute_spec", &self.with(attribute_spec))?;
ss.serialize_field("lambda_async", &self.with(async_))?;
ss.serialize_field("lambda_signature", &self.with(signature))?;
ss.serialize_field("lambda_arrow", &self.with(arrow))?;
ss.serialize_field("lambda_body", &self.with(body))?;
ss.end()
}
SyntaxVariant::LambdaSignature (LambdaSignatureChildren{left_paren,parameters,right_paren,contexts,colon,readonly_return,type_} ) => {
let mut ss = s.serialize_struct("", 8)?;
ss.serialize_field("kind", "lambda_signature")?;
ss.serialize_field("lambda_left_paren", &self.with(left_paren))?;
ss.serialize_field("lambda_parameters", &self.with(parameters))?;
ss.serialize_field("lambda_right_paren", &self.with(right_paren))?;
ss.serialize_field("lambda_contexts", &self.with(contexts))?;
ss.serialize_field("lambda_colon", &self.with(colon))?;
ss.serialize_field("lambda_readonly_return", &self.with(readonly_return))?;
ss.serialize_field("lambda_type", &self.with(type_))?;
ss.end()
}
SyntaxVariant::CastExpression (CastExpressionChildren{left_paren,type_,right_paren,operand} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "cast_expression")?;
ss.serialize_field("cast_left_paren", &self.with(left_paren))?;
ss.serialize_field("cast_type", &self.with(type_))?;
ss.serialize_field("cast_right_paren", &self.with(right_paren))?;
ss.serialize_field("cast_operand", &self.with(operand))?;
ss.end()
}
SyntaxVariant::ScopeResolutionExpression (ScopeResolutionExpressionChildren{qualifier,operator,name} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "scope_resolution_expression")?;
ss.serialize_field("scope_resolution_qualifier", &self.with(qualifier))?;
ss.serialize_field("scope_resolution_operator", &self.with(operator))?;
ss.serialize_field("scope_resolution_name", &self.with(name))?;
ss.end()
}
SyntaxVariant::MemberSelectionExpression (MemberSelectionExpressionChildren{object,operator,name} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "member_selection_expression")?;
ss.serialize_field("member_object", &self.with(object))?;
ss.serialize_field("member_operator", &self.with(operator))?;
ss.serialize_field("member_name", &self.with(name))?;
ss.end()
}
SyntaxVariant::SafeMemberSelectionExpression (SafeMemberSelectionExpressionChildren{object,operator,name} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "safe_member_selection_expression")?;
ss.serialize_field("safe_member_object", &self.with(object))?;
ss.serialize_field("safe_member_operator", &self.with(operator))?;
ss.serialize_field("safe_member_name", &self.with(name))?;
ss.end()
}
SyntaxVariant::EmbeddedMemberSelectionExpression (EmbeddedMemberSelectionExpressionChildren{object,operator,name} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "embedded_member_selection_expression")?;
ss.serialize_field("embedded_member_object", &self.with(object))?;
ss.serialize_field("embedded_member_operator", &self.with(operator))?;
ss.serialize_field("embedded_member_name", &self.with(name))?;
ss.end()
}
SyntaxVariant::YieldExpression (YieldExpressionChildren{keyword,operand} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "yield_expression")?;
ss.serialize_field("yield_keyword", &self.with(keyword))?;
ss.serialize_field("yield_operand", &self.with(operand))?;
ss.end()
}
SyntaxVariant::PrefixUnaryExpression (PrefixUnaryExpressionChildren{operator,operand} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "prefix_unary_expression")?;
ss.serialize_field("prefix_unary_operator", &self.with(operator))?;
ss.serialize_field("prefix_unary_operand", &self.with(operand))?;
ss.end()
}
SyntaxVariant::PostfixUnaryExpression (PostfixUnaryExpressionChildren{operand,operator} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "postfix_unary_expression")?;
ss.serialize_field("postfix_unary_operand", &self.with(operand))?;
ss.serialize_field("postfix_unary_operator", &self.with(operator))?;
ss.end()
}
SyntaxVariant::BinaryExpression (BinaryExpressionChildren{left_operand,operator,right_operand} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "binary_expression")?;
ss.serialize_field("binary_left_operand", &self.with(left_operand))?;
ss.serialize_field("binary_operator", &self.with(operator))?;
ss.serialize_field("binary_right_operand", &self.with(right_operand))?;
ss.end()
}
SyntaxVariant::IsExpression (IsExpressionChildren{left_operand,operator,right_operand} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "is_expression")?;
ss.serialize_field("is_left_operand", &self.with(left_operand))?;
ss.serialize_field("is_operator", &self.with(operator))?;
ss.serialize_field("is_right_operand", &self.with(right_operand))?;
ss.end()
}
SyntaxVariant::AsExpression (AsExpressionChildren{left_operand,operator,right_operand} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "as_expression")?;
ss.serialize_field("as_left_operand", &self.with(left_operand))?;
ss.serialize_field("as_operator", &self.with(operator))?;
ss.serialize_field("as_right_operand", &self.with(right_operand))?;
ss.end()
}
SyntaxVariant::NullableAsExpression (NullableAsExpressionChildren{left_operand,operator,right_operand} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "nullable_as_expression")?;
ss.serialize_field("nullable_as_left_operand", &self.with(left_operand))?;
ss.serialize_field("nullable_as_operator", &self.with(operator))?;
ss.serialize_field("nullable_as_right_operand", &self.with(right_operand))?;
ss.end()
}
SyntaxVariant::UpcastExpression (UpcastExpressionChildren{left_operand,operator,right_operand} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "upcast_expression")?;
ss.serialize_field("upcast_left_operand", &self.with(left_operand))?;
ss.serialize_field("upcast_operator", &self.with(operator))?;
ss.serialize_field("upcast_right_operand", &self.with(right_operand))?;
ss.end()
}
SyntaxVariant::ConditionalExpression (ConditionalExpressionChildren{test,question,consequence,colon,alternative} ) => {
let mut ss = s.serialize_struct("", 6)?;
ss.serialize_field("kind", "conditional_expression")?;
ss.serialize_field("conditional_test", &self.with(test))?;
ss.serialize_field("conditional_question", &self.with(question))?;
ss.serialize_field("conditional_consequence", &self.with(consequence))?;
ss.serialize_field("conditional_colon", &self.with(colon))?;
ss.serialize_field("conditional_alternative", &self.with(alternative))?;
ss.end()
}
SyntaxVariant::EvalExpression (EvalExpressionChildren{keyword,left_paren,argument,right_paren} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "eval_expression")?;
ss.serialize_field("eval_keyword", &self.with(keyword))?;
ss.serialize_field("eval_left_paren", &self.with(left_paren))?;
ss.serialize_field("eval_argument", &self.with(argument))?;
ss.serialize_field("eval_right_paren", &self.with(right_paren))?;
ss.end()
}
SyntaxVariant::IssetExpression (IssetExpressionChildren{keyword,left_paren,argument_list,right_paren} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "isset_expression")?;
ss.serialize_field("isset_keyword", &self.with(keyword))?;
ss.serialize_field("isset_left_paren", &self.with(left_paren))?;
ss.serialize_field("isset_argument_list", &self.with(argument_list))?;
ss.serialize_field("isset_right_paren", &self.with(right_paren))?;
ss.end()
}
SyntaxVariant::FunctionCallExpression (FunctionCallExpressionChildren{receiver,type_args,left_paren,argument_list,right_paren} ) => {
let mut ss = s.serialize_struct("", 6)?;
ss.serialize_field("kind", "function_call_expression")?;
ss.serialize_field("function_call_receiver", &self.with(receiver))?;
ss.serialize_field("function_call_type_args", &self.with(type_args))?;
ss.serialize_field("function_call_left_paren", &self.with(left_paren))?;
ss.serialize_field("function_call_argument_list", &self.with(argument_list))?;
ss.serialize_field("function_call_right_paren", &self.with(right_paren))?;
ss.end()
}
SyntaxVariant::FunctionPointerExpression (FunctionPointerExpressionChildren{receiver,type_args} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "function_pointer_expression")?;
ss.serialize_field("function_pointer_receiver", &self.with(receiver))?;
ss.serialize_field("function_pointer_type_args", &self.with(type_args))?;
ss.end()
}
SyntaxVariant::ParenthesizedExpression (ParenthesizedExpressionChildren{left_paren,expression,right_paren} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "parenthesized_expression")?;
ss.serialize_field("parenthesized_expression_left_paren", &self.with(left_paren))?;
ss.serialize_field("parenthesized_expression_expression", &self.with(expression))?;
ss.serialize_field("parenthesized_expression_right_paren", &self.with(right_paren))?;
ss.end()
}
SyntaxVariant::BracedExpression (BracedExpressionChildren{left_brace,expression,right_brace} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "braced_expression")?;
ss.serialize_field("braced_expression_left_brace", &self.with(left_brace))?;
ss.serialize_field("braced_expression_expression", &self.with(expression))?;
ss.serialize_field("braced_expression_right_brace", &self.with(right_brace))?;
ss.end()
}
SyntaxVariant::ETSpliceExpression (ETSpliceExpressionChildren{dollar,left_brace,expression,right_brace} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "et_splice_expression")?;
ss.serialize_field("et_splice_expression_dollar", &self.with(dollar))?;
ss.serialize_field("et_splice_expression_left_brace", &self.with(left_brace))?;
ss.serialize_field("et_splice_expression_expression", &self.with(expression))?;
ss.serialize_field("et_splice_expression_right_brace", &self.with(right_brace))?;
ss.end()
}
SyntaxVariant::EmbeddedBracedExpression (EmbeddedBracedExpressionChildren{left_brace,expression,right_brace} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "embedded_braced_expression")?;
ss.serialize_field("embedded_braced_expression_left_brace", &self.with(left_brace))?;
ss.serialize_field("embedded_braced_expression_expression", &self.with(expression))?;
ss.serialize_field("embedded_braced_expression_right_brace", &self.with(right_brace))?;
ss.end()
}
SyntaxVariant::ListExpression (ListExpressionChildren{keyword,left_paren,members,right_paren} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "list_expression")?;
ss.serialize_field("list_keyword", &self.with(keyword))?;
ss.serialize_field("list_left_paren", &self.with(left_paren))?;
ss.serialize_field("list_members", &self.with(members))?;
ss.serialize_field("list_right_paren", &self.with(right_paren))?;
ss.end()
}
SyntaxVariant::CollectionLiteralExpression (CollectionLiteralExpressionChildren{name,left_brace,initializers,right_brace} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "collection_literal_expression")?;
ss.serialize_field("collection_literal_name", &self.with(name))?;
ss.serialize_field("collection_literal_left_brace", &self.with(left_brace))?;
ss.serialize_field("collection_literal_initializers", &self.with(initializers))?;
ss.serialize_field("collection_literal_right_brace", &self.with(right_brace))?;
ss.end()
}
SyntaxVariant::ObjectCreationExpression (ObjectCreationExpressionChildren{new_keyword,object} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "object_creation_expression")?;
ss.serialize_field("object_creation_new_keyword", &self.with(new_keyword))?;
ss.serialize_field("object_creation_object", &self.with(object))?;
ss.end()
}
SyntaxVariant::ConstructorCall (ConstructorCallChildren{type_,left_paren,argument_list,right_paren} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "constructor_call")?;
ss.serialize_field("constructor_call_type", &self.with(type_))?;
ss.serialize_field("constructor_call_left_paren", &self.with(left_paren))?;
ss.serialize_field("constructor_call_argument_list", &self.with(argument_list))?;
ss.serialize_field("constructor_call_right_paren", &self.with(right_paren))?;
ss.end()
}
SyntaxVariant::DarrayIntrinsicExpression (DarrayIntrinsicExpressionChildren{keyword,explicit_type,left_bracket,members,right_bracket} ) => {
let mut ss = s.serialize_struct("", 6)?;
ss.serialize_field("kind", "darray_intrinsic_expression")?;
ss.serialize_field("darray_intrinsic_keyword", &self.with(keyword))?;
ss.serialize_field("darray_intrinsic_explicit_type", &self.with(explicit_type))?;
ss.serialize_field("darray_intrinsic_left_bracket", &self.with(left_bracket))?;
ss.serialize_field("darray_intrinsic_members", &self.with(members))?;
ss.serialize_field("darray_intrinsic_right_bracket", &self.with(right_bracket))?;
ss.end()
}
SyntaxVariant::DictionaryIntrinsicExpression (DictionaryIntrinsicExpressionChildren{keyword,explicit_type,left_bracket,members,right_bracket} ) => {
let mut ss = s.serialize_struct("", 6)?;
ss.serialize_field("kind", "dictionary_intrinsic_expression")?;
ss.serialize_field("dictionary_intrinsic_keyword", &self.with(keyword))?;
ss.serialize_field("dictionary_intrinsic_explicit_type", &self.with(explicit_type))?;
ss.serialize_field("dictionary_intrinsic_left_bracket", &self.with(left_bracket))?;
ss.serialize_field("dictionary_intrinsic_members", &self.with(members))?;
ss.serialize_field("dictionary_intrinsic_right_bracket", &self.with(right_bracket))?;
ss.end()
}
SyntaxVariant::KeysetIntrinsicExpression (KeysetIntrinsicExpressionChildren{keyword,explicit_type,left_bracket,members,right_bracket} ) => {
let mut ss = s.serialize_struct("", 6)?;
ss.serialize_field("kind", "keyset_intrinsic_expression")?;
ss.serialize_field("keyset_intrinsic_keyword", &self.with(keyword))?;
ss.serialize_field("keyset_intrinsic_explicit_type", &self.with(explicit_type))?;
ss.serialize_field("keyset_intrinsic_left_bracket", &self.with(left_bracket))?;
ss.serialize_field("keyset_intrinsic_members", &self.with(members))?;
ss.serialize_field("keyset_intrinsic_right_bracket", &self.with(right_bracket))?;
ss.end()
}
SyntaxVariant::VarrayIntrinsicExpression (VarrayIntrinsicExpressionChildren{keyword,explicit_type,left_bracket,members,right_bracket} ) => {
let mut ss = s.serialize_struct("", 6)?;
ss.serialize_field("kind", "varray_intrinsic_expression")?;
ss.serialize_field("varray_intrinsic_keyword", &self.with(keyword))?;
ss.serialize_field("varray_intrinsic_explicit_type", &self.with(explicit_type))?;
ss.serialize_field("varray_intrinsic_left_bracket", &self.with(left_bracket))?;
ss.serialize_field("varray_intrinsic_members", &self.with(members))?;
ss.serialize_field("varray_intrinsic_right_bracket", &self.with(right_bracket))?;
ss.end()
}
SyntaxVariant::VectorIntrinsicExpression (VectorIntrinsicExpressionChildren{keyword,explicit_type,left_bracket,members,right_bracket} ) => {
let mut ss = s.serialize_struct("", 6)?;
ss.serialize_field("kind", "vector_intrinsic_expression")?;
ss.serialize_field("vector_intrinsic_keyword", &self.with(keyword))?;
ss.serialize_field("vector_intrinsic_explicit_type", &self.with(explicit_type))?;
ss.serialize_field("vector_intrinsic_left_bracket", &self.with(left_bracket))?;
ss.serialize_field("vector_intrinsic_members", &self.with(members))?;
ss.serialize_field("vector_intrinsic_right_bracket", &self.with(right_bracket))?;
ss.end()
}
SyntaxVariant::ElementInitializer (ElementInitializerChildren{key,arrow,value} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "element_initializer")?;
ss.serialize_field("element_key", &self.with(key))?;
ss.serialize_field("element_arrow", &self.with(arrow))?;
ss.serialize_field("element_value", &self.with(value))?;
ss.end()
}
SyntaxVariant::SubscriptExpression (SubscriptExpressionChildren{receiver,left_bracket,index,right_bracket} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "subscript_expression")?;
ss.serialize_field("subscript_receiver", &self.with(receiver))?;
ss.serialize_field("subscript_left_bracket", &self.with(left_bracket))?;
ss.serialize_field("subscript_index", &self.with(index))?;
ss.serialize_field("subscript_right_bracket", &self.with(right_bracket))?;
ss.end()
}
SyntaxVariant::EmbeddedSubscriptExpression (EmbeddedSubscriptExpressionChildren{receiver,left_bracket,index,right_bracket} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "embedded_subscript_expression")?;
ss.serialize_field("embedded_subscript_receiver", &self.with(receiver))?;
ss.serialize_field("embedded_subscript_left_bracket", &self.with(left_bracket))?;
ss.serialize_field("embedded_subscript_index", &self.with(index))?;
ss.serialize_field("embedded_subscript_right_bracket", &self.with(right_bracket))?;
ss.end()
}
SyntaxVariant::AwaitableCreationExpression (AwaitableCreationExpressionChildren{attribute_spec,async_,compound_statement} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "awaitable_creation_expression")?;
ss.serialize_field("awaitable_attribute_spec", &self.with(attribute_spec))?;
ss.serialize_field("awaitable_async", &self.with(async_))?;
ss.serialize_field("awaitable_compound_statement", &self.with(compound_statement))?;
ss.end()
}
SyntaxVariant::XHPChildrenDeclaration (XHPChildrenDeclarationChildren{keyword,expression,semicolon} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "xhp_children_declaration")?;
ss.serialize_field("xhp_children_keyword", &self.with(keyword))?;
ss.serialize_field("xhp_children_expression", &self.with(expression))?;
ss.serialize_field("xhp_children_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::XHPChildrenParenthesizedList (XHPChildrenParenthesizedListChildren{left_paren,xhp_children,right_paren} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "xhp_children_parenthesized_list")?;
ss.serialize_field("xhp_children_list_left_paren", &self.with(left_paren))?;
ss.serialize_field("xhp_children_list_xhp_children", &self.with(xhp_children))?;
ss.serialize_field("xhp_children_list_right_paren", &self.with(right_paren))?;
ss.end()
}
SyntaxVariant::XHPCategoryDeclaration (XHPCategoryDeclarationChildren{keyword,categories,semicolon} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "xhp_category_declaration")?;
ss.serialize_field("xhp_category_keyword", &self.with(keyword))?;
ss.serialize_field("xhp_category_categories", &self.with(categories))?;
ss.serialize_field("xhp_category_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::XHPEnumType (XHPEnumTypeChildren{like,keyword,left_brace,values,right_brace} ) => {
let mut ss = s.serialize_struct("", 6)?;
ss.serialize_field("kind", "xhp_enum_type")?;
ss.serialize_field("xhp_enum_like", &self.with(like))?;
ss.serialize_field("xhp_enum_keyword", &self.with(keyword))?;
ss.serialize_field("xhp_enum_left_brace", &self.with(left_brace))?;
ss.serialize_field("xhp_enum_values", &self.with(values))?;
ss.serialize_field("xhp_enum_right_brace", &self.with(right_brace))?;
ss.end()
}
SyntaxVariant::XHPLateinit (XHPLateinitChildren{at,keyword} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "xhp_lateinit")?;
ss.serialize_field("xhp_lateinit_at", &self.with(at))?;
ss.serialize_field("xhp_lateinit_keyword", &self.with(keyword))?;
ss.end()
}
SyntaxVariant::XHPRequired (XHPRequiredChildren{at,keyword} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "xhp_required")?;
ss.serialize_field("xhp_required_at", &self.with(at))?;
ss.serialize_field("xhp_required_keyword", &self.with(keyword))?;
ss.end()
}
SyntaxVariant::XHPClassAttributeDeclaration (XHPClassAttributeDeclarationChildren{keyword,attributes,semicolon} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "xhp_class_attribute_declaration")?;
ss.serialize_field("xhp_attribute_keyword", &self.with(keyword))?;
ss.serialize_field("xhp_attribute_attributes", &self.with(attributes))?;
ss.serialize_field("xhp_attribute_semicolon", &self.with(semicolon))?;
ss.end()
}
SyntaxVariant::XHPClassAttribute (XHPClassAttributeChildren{type_,name,initializer,required} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "xhp_class_attribute")?;
ss.serialize_field("xhp_attribute_decl_type", &self.with(type_))?;
ss.serialize_field("xhp_attribute_decl_name", &self.with(name))?;
ss.serialize_field("xhp_attribute_decl_initializer", &self.with(initializer))?;
ss.serialize_field("xhp_attribute_decl_required", &self.with(required))?;
ss.end()
}
SyntaxVariant::XHPSimpleClassAttribute (XHPSimpleClassAttributeChildren{type_} ) => {
let mut ss = s.serialize_struct("", 2)?;
ss.serialize_field("kind", "xhp_simple_class_attribute")?;
ss.serialize_field("xhp_simple_class_attribute_type", &self.with(type_))?;
ss.end()
}
SyntaxVariant::XHPSimpleAttribute (XHPSimpleAttributeChildren{name,equal,expression} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "xhp_simple_attribute")?;
ss.serialize_field("xhp_simple_attribute_name", &self.with(name))?;
ss.serialize_field("xhp_simple_attribute_equal", &self.with(equal))?;
ss.serialize_field("xhp_simple_attribute_expression", &self.with(expression))?;
ss.end()
}
SyntaxVariant::XHPSpreadAttribute (XHPSpreadAttributeChildren{left_brace,spread_operator,expression,right_brace} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "xhp_spread_attribute")?;
ss.serialize_field("xhp_spread_attribute_left_brace", &self.with(left_brace))?;
ss.serialize_field("xhp_spread_attribute_spread_operator", &self.with(spread_operator))?;
ss.serialize_field("xhp_spread_attribute_expression", &self.with(expression))?;
ss.serialize_field("xhp_spread_attribute_right_brace", &self.with(right_brace))?;
ss.end()
}
SyntaxVariant::XHPOpen (XHPOpenChildren{left_angle,name,attributes,right_angle} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "xhp_open")?;
ss.serialize_field("xhp_open_left_angle", &self.with(left_angle))?;
ss.serialize_field("xhp_open_name", &self.with(name))?;
ss.serialize_field("xhp_open_attributes", &self.with(attributes))?;
ss.serialize_field("xhp_open_right_angle", &self.with(right_angle))?;
ss.end()
}
SyntaxVariant::XHPExpression (XHPExpressionChildren{open,body,close} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "xhp_expression")?;
ss.serialize_field("xhp_open", &self.with(open))?;
ss.serialize_field("xhp_body", &self.with(body))?;
ss.serialize_field("xhp_close", &self.with(close))?;
ss.end()
}
SyntaxVariant::XHPClose (XHPCloseChildren{left_angle,name,right_angle} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "xhp_close")?;
ss.serialize_field("xhp_close_left_angle", &self.with(left_angle))?;
ss.serialize_field("xhp_close_name", &self.with(name))?;
ss.serialize_field("xhp_close_right_angle", &self.with(right_angle))?;
ss.end()
}
SyntaxVariant::TypeConstant (TypeConstantChildren{left_type,separator,right_type} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "type_constant")?;
ss.serialize_field("type_constant_left_type", &self.with(left_type))?;
ss.serialize_field("type_constant_separator", &self.with(separator))?;
ss.serialize_field("type_constant_right_type", &self.with(right_type))?;
ss.end()
}
SyntaxVariant::VectorTypeSpecifier (VectorTypeSpecifierChildren{keyword,left_angle,type_,trailing_comma,right_angle} ) => {
let mut ss = s.serialize_struct("", 6)?;
ss.serialize_field("kind", "vector_type_specifier")?;
ss.serialize_field("vector_type_keyword", &self.with(keyword))?;
ss.serialize_field("vector_type_left_angle", &self.with(left_angle))?;
ss.serialize_field("vector_type_type", &self.with(type_))?;
ss.serialize_field("vector_type_trailing_comma", &self.with(trailing_comma))?;
ss.serialize_field("vector_type_right_angle", &self.with(right_angle))?;
ss.end()
}
SyntaxVariant::KeysetTypeSpecifier (KeysetTypeSpecifierChildren{keyword,left_angle,type_,trailing_comma,right_angle} ) => {
let mut ss = s.serialize_struct("", 6)?;
ss.serialize_field("kind", "keyset_type_specifier")?;
ss.serialize_field("keyset_type_keyword", &self.with(keyword))?;
ss.serialize_field("keyset_type_left_angle", &self.with(left_angle))?;
ss.serialize_field("keyset_type_type", &self.with(type_))?;
ss.serialize_field("keyset_type_trailing_comma", &self.with(trailing_comma))?;
ss.serialize_field("keyset_type_right_angle", &self.with(right_angle))?;
ss.end()
}
SyntaxVariant::TupleTypeExplicitSpecifier (TupleTypeExplicitSpecifierChildren{keyword,left_angle,types,right_angle} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "tuple_type_explicit_specifier")?;
ss.serialize_field("tuple_type_keyword", &self.with(keyword))?;
ss.serialize_field("tuple_type_left_angle", &self.with(left_angle))?;
ss.serialize_field("tuple_type_types", &self.with(types))?;
ss.serialize_field("tuple_type_right_angle", &self.with(right_angle))?;
ss.end()
}
SyntaxVariant::VarrayTypeSpecifier (VarrayTypeSpecifierChildren{keyword,left_angle,type_,trailing_comma,right_angle} ) => {
let mut ss = s.serialize_struct("", 6)?;
ss.serialize_field("kind", "varray_type_specifier")?;
ss.serialize_field("varray_keyword", &self.with(keyword))?;
ss.serialize_field("varray_left_angle", &self.with(left_angle))?;
ss.serialize_field("varray_type", &self.with(type_))?;
ss.serialize_field("varray_trailing_comma", &self.with(trailing_comma))?;
ss.serialize_field("varray_right_angle", &self.with(right_angle))?;
ss.end()
}
SyntaxVariant::FunctionCtxTypeSpecifier (FunctionCtxTypeSpecifierChildren{keyword,variable} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "function_ctx_type_specifier")?;
ss.serialize_field("function_ctx_type_keyword", &self.with(keyword))?;
ss.serialize_field("function_ctx_type_variable", &self.with(variable))?;
ss.end()
}
SyntaxVariant::TypeParameter (TypeParameterChildren{attribute_spec,reified,variance,name,param_params,constraints} ) => {
let mut ss = s.serialize_struct("", 7)?;
ss.serialize_field("kind", "type_parameter")?;
ss.serialize_field("type_attribute_spec", &self.with(attribute_spec))?;
ss.serialize_field("type_reified", &self.with(reified))?;
ss.serialize_field("type_variance", &self.with(variance))?;
ss.serialize_field("type_name", &self.with(name))?;
ss.serialize_field("type_param_params", &self.with(param_params))?;
ss.serialize_field("type_constraints", &self.with(constraints))?;
ss.end()
}
SyntaxVariant::TypeConstraint (TypeConstraintChildren{keyword,type_} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "type_constraint")?;
ss.serialize_field("constraint_keyword", &self.with(keyword))?;
ss.serialize_field("constraint_type", &self.with(type_))?;
ss.end()
}
SyntaxVariant::ContextConstraint (ContextConstraintChildren{keyword,ctx_list} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "context_constraint")?;
ss.serialize_field("ctx_constraint_keyword", &self.with(keyword))?;
ss.serialize_field("ctx_constraint_ctx_list", &self.with(ctx_list))?;
ss.end()
}
SyntaxVariant::DarrayTypeSpecifier (DarrayTypeSpecifierChildren{keyword,left_angle,key,comma,value,trailing_comma,right_angle} ) => {
let mut ss = s.serialize_struct("", 8)?;
ss.serialize_field("kind", "darray_type_specifier")?;
ss.serialize_field("darray_keyword", &self.with(keyword))?;
ss.serialize_field("darray_left_angle", &self.with(left_angle))?;
ss.serialize_field("darray_key", &self.with(key))?;
ss.serialize_field("darray_comma", &self.with(comma))?;
ss.serialize_field("darray_value", &self.with(value))?;
ss.serialize_field("darray_trailing_comma", &self.with(trailing_comma))?;
ss.serialize_field("darray_right_angle", &self.with(right_angle))?;
ss.end()
}
SyntaxVariant::DictionaryTypeSpecifier (DictionaryTypeSpecifierChildren{keyword,left_angle,members,right_angle} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "dictionary_type_specifier")?;
ss.serialize_field("dictionary_type_keyword", &self.with(keyword))?;
ss.serialize_field("dictionary_type_left_angle", &self.with(left_angle))?;
ss.serialize_field("dictionary_type_members", &self.with(members))?;
ss.serialize_field("dictionary_type_right_angle", &self.with(right_angle))?;
ss.end()
}
SyntaxVariant::ClosureTypeSpecifier (ClosureTypeSpecifierChildren{outer_left_paren,readonly_keyword,function_keyword,inner_left_paren,parameter_list,inner_right_paren,contexts,colon,readonly_return,return_type,outer_right_paren} ) => {
let mut ss = s.serialize_struct("", 12)?;
ss.serialize_field("kind", "closure_type_specifier")?;
ss.serialize_field("closure_outer_left_paren", &self.with(outer_left_paren))?;
ss.serialize_field("closure_readonly_keyword", &self.with(readonly_keyword))?;
ss.serialize_field("closure_function_keyword", &self.with(function_keyword))?;
ss.serialize_field("closure_inner_left_paren", &self.with(inner_left_paren))?;
ss.serialize_field("closure_parameter_list", &self.with(parameter_list))?;
ss.serialize_field("closure_inner_right_paren", &self.with(inner_right_paren))?;
ss.serialize_field("closure_contexts", &self.with(contexts))?;
ss.serialize_field("closure_colon", &self.with(colon))?;
ss.serialize_field("closure_readonly_return", &self.with(readonly_return))?;
ss.serialize_field("closure_return_type", &self.with(return_type))?;
ss.serialize_field("closure_outer_right_paren", &self.with(outer_right_paren))?;
ss.end()
}
SyntaxVariant::ClosureParameterTypeSpecifier (ClosureParameterTypeSpecifierChildren{call_convention,readonly,type_} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "closure_parameter_type_specifier")?;
ss.serialize_field("closure_parameter_call_convention", &self.with(call_convention))?;
ss.serialize_field("closure_parameter_readonly", &self.with(readonly))?;
ss.serialize_field("closure_parameter_type", &self.with(type_))?;
ss.end()
}
SyntaxVariant::ClassnameTypeSpecifier (ClassnameTypeSpecifierChildren{keyword,left_angle,type_,trailing_comma,right_angle} ) => {
let mut ss = s.serialize_struct("", 6)?;
ss.serialize_field("kind", "classname_type_specifier")?;
ss.serialize_field("classname_keyword", &self.with(keyword))?;
ss.serialize_field("classname_left_angle", &self.with(left_angle))?;
ss.serialize_field("classname_type", &self.with(type_))?;
ss.serialize_field("classname_trailing_comma", &self.with(trailing_comma))?;
ss.serialize_field("classname_right_angle", &self.with(right_angle))?;
ss.end()
}
SyntaxVariant::FieldSpecifier (FieldSpecifierChildren{question,name,arrow,type_} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "field_specifier")?;
ss.serialize_field("field_question", &self.with(question))?;
ss.serialize_field("field_name", &self.with(name))?;
ss.serialize_field("field_arrow", &self.with(arrow))?;
ss.serialize_field("field_type", &self.with(type_))?;
ss.end()
}
SyntaxVariant::FieldInitializer (FieldInitializerChildren{name,arrow,value} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "field_initializer")?;
ss.serialize_field("field_initializer_name", &self.with(name))?;
ss.serialize_field("field_initializer_arrow", &self.with(arrow))?;
ss.serialize_field("field_initializer_value", &self.with(value))?;
ss.end()
}
SyntaxVariant::ShapeTypeSpecifier (ShapeTypeSpecifierChildren{keyword,left_paren,fields,ellipsis,right_paren} ) => {
let mut ss = s.serialize_struct("", 6)?;
ss.serialize_field("kind", "shape_type_specifier")?;
ss.serialize_field("shape_type_keyword", &self.with(keyword))?;
ss.serialize_field("shape_type_left_paren", &self.with(left_paren))?;
ss.serialize_field("shape_type_fields", &self.with(fields))?;
ss.serialize_field("shape_type_ellipsis", &self.with(ellipsis))?;
ss.serialize_field("shape_type_right_paren", &self.with(right_paren))?;
ss.end()
}
SyntaxVariant::ShapeExpression (ShapeExpressionChildren{keyword,left_paren,fields,right_paren} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "shape_expression")?;
ss.serialize_field("shape_expression_keyword", &self.with(keyword))?;
ss.serialize_field("shape_expression_left_paren", &self.with(left_paren))?;
ss.serialize_field("shape_expression_fields", &self.with(fields))?;
ss.serialize_field("shape_expression_right_paren", &self.with(right_paren))?;
ss.end()
}
SyntaxVariant::TupleExpression (TupleExpressionChildren{keyword,left_paren,items,right_paren} ) => {
let mut ss = s.serialize_struct("", 5)?;
ss.serialize_field("kind", "tuple_expression")?;
ss.serialize_field("tuple_expression_keyword", &self.with(keyword))?;
ss.serialize_field("tuple_expression_left_paren", &self.with(left_paren))?;
ss.serialize_field("tuple_expression_items", &self.with(items))?;
ss.serialize_field("tuple_expression_right_paren", &self.with(right_paren))?;
ss.end()
}
SyntaxVariant::GenericTypeSpecifier (GenericTypeSpecifierChildren{class_type,argument_list} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "generic_type_specifier")?;
ss.serialize_field("generic_class_type", &self.with(class_type))?;
ss.serialize_field("generic_argument_list", &self.with(argument_list))?;
ss.end()
}
SyntaxVariant::NullableTypeSpecifier (NullableTypeSpecifierChildren{question,type_} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "nullable_type_specifier")?;
ss.serialize_field("nullable_question", &self.with(question))?;
ss.serialize_field("nullable_type", &self.with(type_))?;
ss.end()
}
SyntaxVariant::LikeTypeSpecifier (LikeTypeSpecifierChildren{tilde,type_} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "like_type_specifier")?;
ss.serialize_field("like_tilde", &self.with(tilde))?;
ss.serialize_field("like_type", &self.with(type_))?;
ss.end()
}
SyntaxVariant::SoftTypeSpecifier (SoftTypeSpecifierChildren{at,type_} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "soft_type_specifier")?;
ss.serialize_field("soft_at", &self.with(at))?;
ss.serialize_field("soft_type", &self.with(type_))?;
ss.end()
}
SyntaxVariant::AttributizedSpecifier (AttributizedSpecifierChildren{attribute_spec,type_} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "attributized_specifier")?;
ss.serialize_field("attributized_specifier_attribute_spec", &self.with(attribute_spec))?;
ss.serialize_field("attributized_specifier_type", &self.with(type_))?;
ss.end()
}
SyntaxVariant::ReifiedTypeArgument (ReifiedTypeArgumentChildren{reified,type_} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "reified_type_argument")?;
ss.serialize_field("reified_type_argument_reified", &self.with(reified))?;
ss.serialize_field("reified_type_argument_type", &self.with(type_))?;
ss.end()
}
SyntaxVariant::TypeArguments (TypeArgumentsChildren{left_angle,types,right_angle} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "type_arguments")?;
ss.serialize_field("type_arguments_left_angle", &self.with(left_angle))?;
ss.serialize_field("type_arguments_types", &self.with(types))?;
ss.serialize_field("type_arguments_right_angle", &self.with(right_angle))?;
ss.end()
}
SyntaxVariant::TypeParameters (TypeParametersChildren{left_angle,parameters,right_angle} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "type_parameters")?;
ss.serialize_field("type_parameters_left_angle", &self.with(left_angle))?;
ss.serialize_field("type_parameters_parameters", &self.with(parameters))?;
ss.serialize_field("type_parameters_right_angle", &self.with(right_angle))?;
ss.end()
}
SyntaxVariant::TupleTypeSpecifier (TupleTypeSpecifierChildren{left_paren,types,right_paren} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "tuple_type_specifier")?;
ss.serialize_field("tuple_left_paren", &self.with(left_paren))?;
ss.serialize_field("tuple_types", &self.with(types))?;
ss.serialize_field("tuple_right_paren", &self.with(right_paren))?;
ss.end()
}
SyntaxVariant::UnionTypeSpecifier (UnionTypeSpecifierChildren{left_paren,types,right_paren} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "union_type_specifier")?;
ss.serialize_field("union_left_paren", &self.with(left_paren))?;
ss.serialize_field("union_types", &self.with(types))?;
ss.serialize_field("union_right_paren", &self.with(right_paren))?;
ss.end()
}
SyntaxVariant::IntersectionTypeSpecifier (IntersectionTypeSpecifierChildren{left_paren,types,right_paren} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "intersection_type_specifier")?;
ss.serialize_field("intersection_left_paren", &self.with(left_paren))?;
ss.serialize_field("intersection_types", &self.with(types))?;
ss.serialize_field("intersection_right_paren", &self.with(right_paren))?;
ss.end()
}
SyntaxVariant::ErrorSyntax (ErrorSyntaxChildren{error} ) => {
let mut ss = s.serialize_struct("", 2)?;
ss.serialize_field("kind", "error")?;
ss.serialize_field("error_error", &self.with(error))?;
ss.end()
}
SyntaxVariant::ListItem (ListItemChildren{item,separator} ) => {
let mut ss = s.serialize_struct("", 3)?;
ss.serialize_field("kind", "list_item")?;
ss.serialize_field("list_item", &self.with(item))?;
ss.serialize_field("list_separator", &self.with(separator))?;
ss.end()
}
SyntaxVariant::EnumClassLabelExpression (EnumClassLabelExpressionChildren{qualifier,hash,expression} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "enum_class_label")?;
ss.serialize_field("enum_class_label_qualifier", &self.with(qualifier))?;
ss.serialize_field("enum_class_label_hash", &self.with(hash))?;
ss.serialize_field("enum_class_label_expression", &self.with(expression))?;
ss.end()
}
SyntaxVariant::ModuleDeclaration (ModuleDeclarationChildren{attribute_spec,new_keyword,module_keyword,name,left_brace,right_brace} ) => {
let mut ss = s.serialize_struct("", 7)?;
ss.serialize_field("kind", "module_declaration")?;
ss.serialize_field("module_declaration_attribute_spec", &self.with(attribute_spec))?;
ss.serialize_field("module_declaration_new_keyword", &self.with(new_keyword))?;
ss.serialize_field("module_declaration_module_keyword", &self.with(module_keyword))?;
ss.serialize_field("module_declaration_name", &self.with(name))?;
ss.serialize_field("module_declaration_left_brace", &self.with(left_brace))?;
ss.serialize_field("module_declaration_right_brace", &self.with(right_brace))?;
ss.end()
}
SyntaxVariant::ModuleMembershipDeclaration (ModuleMembershipDeclarationChildren{module_keyword,name,semicolon} ) => {
let mut ss = s.serialize_struct("", 4)?;
ss.serialize_field("kind", "module_membership_declaration")?;
ss.serialize_field("module_membership_declaration_module_keyword", &self.with(module_keyword))?;
ss.serialize_field("module_membership_declaration_name", &self.with(name))?;
ss.serialize_field("module_membership_declaration_semicolon", &self.with(semicolon))?;
ss.end()
}
}
}
}
| 55.801177 | 235 | 0.739756 |
48baead074ac7d5e2442af585c2c0d95e24c0c22 | 543 | // run-pass
#![allow(unused_mut)]
#![feature(box_syntax)]
#[derive(Clone)]
struct Triple {
x: isize,
y: isize,
z: isize,
}
fn test(x: bool, foo: Box<Triple>) -> isize {
let bar = foo;
let mut y: Box<Triple>;
if x { y = bar; } else { y = box Triple{x: 4, y: 5, z: 6}; }
return y.y;
}
pub fn main() {
let x: Box<_> = box Triple{x: 1, y: 2, z: 3};
assert_eq!(test(true, x.clone()), 2);
assert_eq!(test(true, x.clone()), 2);
assert_eq!(test(true, x.clone()), 2);
assert_eq!(test(false, x), 5);
}
| 20.884615 | 64 | 0.537753 |
6adaf01fb52444575f4efa705ecc9d3c1bd776f4 | 106 | #![feature(lang_items)]
extern "C" {
#[lang = "cake"]
fn cake(); //~ ERROR E0264
}
fn main() {}
| 11.777778 | 30 | 0.509434 |
3a8c10542acc46f30b3c8e30b1be4bf07682305d | 3,528 | use nanachi::{
buffer::{Buffer, GenericBuffer},
pixel::Rgba,
path_builder::PathBuilder,
fill_color,
matrix::Matrix,
compositor,
context::{Context, FillStyle},
image::RgbaImage,
fill_rule,
draw_image::draw_image_pixel_perfect,
};
fn main() {
let (width, height) = (320, 320);
let mut img = GenericBuffer::from_pixel(width, height, rgba(250, 250, 250, 0));
#[allow(arithmetic_overflow)]
let mut i = 0 - 1;
f(&mut img, {i += 1; i}, compositor::Clear);
f(&mut img, {i += 1; i}, compositor::Src);
f(&mut img, {i += 1; i}, compositor::Dst);
f(&mut img, {i += 1; i}, compositor::SrcOver);
f(&mut img, {i += 1; i}, compositor::SrcIn);
f(&mut img, {i += 1; i}, compositor::SrcOut);
f(&mut img, {i += 1; i}, compositor::SrcAtop);
f(&mut img, {i += 1; i}, compositor::DstOver);
f(&mut img, {i += 1; i}, compositor::DstIn);
f(&mut img, {i += 1; i}, compositor::DstOut);
f(&mut img, {i += 1; i}, compositor::DstAtop);
f(&mut img, {i += 1; i}, compositor::Xor);
f(&mut img, {i += 1; i}, compositor::Add);
f(&mut img, {i += 1; i}, compositor::Darken);
f(&mut img, {i += 1; i}, compositor::Lighten);
f(&mut img, {i += 1; i}, compositor::Multiply);
f(&mut img, {i += 1; i}, compositor::Screen);
f(&mut img, {i += 1; i}, compositor::Overlay);
f(&mut img, {i += 1; i}, compositor::HardLight);
f(&mut img, {i += 1; i}, compositor::Dodge);
f(&mut img, {i += 1; i}, compositor::Burn);
f(&mut img, {i += 1; i}, compositor::SoftLight);
f(&mut img, {i += 1; i}, compositor::Difference);
f(&mut img, {i += 1; i}, compositor::Exclusion);
let img: RgbaImage = (&img).into();
let res = img.save("./composite_test_f32.png");
println!("save: {:?}", res);
}
fn f<C: compositor::Compositor<Rgba> + 'static>(img: &mut GenericBuffer<Rgba>, i: usize, c: C) {
let mut pb = PathBuilder::new();
pb.move_to(-10.0, -20.0);
pb.line_to(10.0, -20.0);
pb.line_to(10.0, 20.0);
pb.line_to(-10.0, 20.0);
pb.close();
let path = pb.end();
let fc1 = fill_color::LinearGradient::new(
(-10.0, 0.0),
(10.0, 0.0),
vec![
(0.1, rgba(255, 0, 0, 150)),
(0.4, rgba(255, 0, 0, 255)),
(0.6, rgba(255, 0, 0, 255)),
(0.9, rgba(255, 255, 0, 255)),
]);
let fc2 = fill_color::LinearGradient::new(
(-10.0, 0.0),
(10.0, 0.0),
vec![
(0.1, rgba(0, 0, 255, 150)),
(0.4, rgba(0, 0, 255, 255)),
(0.6, rgba(0, 0, 255, 255)),
(0.9, rgba(0, 255, 255, 255)),
]);
let mut context = Context::from_pixel(60, 60, rgba(250, 250, 250, 0));
context.transformed_context(&Matrix::new().translate(20.0, 20.0))
.fill(&path, &FillStyle{
color: fc1,
compositor: compositor::SrcOver,
fill_rule: fill_rule::EvenOdd,
pixel: Default::default(),
});
context.transformed_context(&Matrix::new().rotate(90f64.to_radians()).translate(20.0, 20.0))
.fill(&path, &FillStyle{
color: fc2,
compositor: c,
fill_rule: fill_rule::EvenOdd,
pixel: Default::default(),
});
let x = (60 * (i % 5) + 10) as u32;
let y = (60 * (i / 5) + 10) as u32;
draw_image_pixel_perfect(img, &context.image, (x, y), (0, 0), (60, 60), &compositor::Src);
}
fn rgba(r: u8, g: u8, b: u8, a: u8) -> Rgba {
Rgba([r as f32 / 255.0, g as f32 / 255.0, b as f32 / 255.0, a as f32 / 255.0])
}
| 34.930693 | 96 | 0.529195 |
e5d2cc91633a520ebeb3610383d333dad9e67c10 | 327 | mod generator;
mod load;
mod scenario;
mod spawner;
pub use self::generator::{
BorderSpawnOverTime, OriginDestination, ScenarioGenerator, SpawnOverTime,
};
pub use self::load::SimFlags;
pub use self::scenario::{IndividTrip, OffMapLocation, PersonSpec, Scenario, SpawnTrip};
pub use self::spawner::{TripSpawner, TripSpec};
| 27.25 | 87 | 0.7737 |
fea2fce06f10ec424c283b6251007099d3bc6051 | 5,720 | use crate::daemon::{
database::bitcointx::{RevaultTx, TransactionType},
revaultd::VaultStatus,
};
use revault_tx::{
bitcoin::{
util::bip32::{ChildNumber, ExtendedPubKey},
Amount, OutPoint, Txid,
},
scripts::{CpfpDescriptor, DepositDescriptor, UnvaultDescriptor},
transactions::SpendTransaction,
};
pub const SCHEMA: &str = "\
CREATE TABLE version (
version INTEGER NOT NULL
);
CREATE TABLE tip (
network TEXT NOT NULL,
blockheight INTEGER NOT NULL,
blockhash BLOB NOT NULL
);
/* This stores metadata about our wallet. We only support single wallet for
* now (and the foreseeable future). This MUST be in sync with bitcoind's
* wallet.
*/
CREATE TABLE wallets (
id INTEGER PRIMARY KEY NOT NULL,
timestamp INTEGER NOT NULL,
deposit_descriptor TEXT NOT NULL,
unvault_descriptor TEXT NOT NULL,
cpfp_descriptor TEXT NOT NULL,
our_manager_xpub TEXT,
our_stakeholder_xpub TEXT,
deposit_derivation_index INTEGER NOT NULL
);
/* This stores the vaults we heard about. The deposit may be unconfirmed,
* in which case the blockheight will be 0 (FIXME: should be NULL instead?).
* For any vault entry a deposit transaction MUST be present in bitcoind's
* wallet.
* The final_txid is stored to not harass bitcoind trying to guess the
* spending txid or the canceling txid out of a deposit outpoint.
* It MUST be NOT NULL if status is 'spending', 'spent', 'canceling'
* or 'canceled'.
* The emer_shared field indicates wether we already shared the Emergency
* transaction for this vault with the watchtower.
*/
CREATE TABLE vaults (
id INTEGER PRIMARY KEY NOT NULL,
wallet_id INTEGER NOT NULL,
status INTEGER NOT NULL,
blockheight INTEGER NOT NULL,
deposit_txid BLOB NOT NULL,
deposit_vout INTEGER NOT NULL,
amount INTEGER NOT NULL,
derivation_index INTEGER NOT NULL,
funded_at INTEGER,
secured_at INTEGER,
delegated_at INTEGER,
moved_at INTEGER,
final_txid BLOB,
emer_shared BOOLEAN NOT NULL CHECK (emer_shared IN (0,1)),
FOREIGN KEY (wallet_id) REFERENCES wallets (id)
ON UPDATE RESTRICT
ON DELETE RESTRICT
);
/* This stores transactions we presign:
* - Emergency (only for stakeholders)
* - Unvault
* - Cancel
* - Unvault Emergency (only for stakeholders)
*/
CREATE TABLE presigned_transactions (
id INTEGER PRIMARY KEY NOT NULL,
vault_id INTEGER NOT NULL,
type INTEGER NOT NULL,
psbt BLOB UNIQUE NOT NULL,
txid BLOB UNIQUE NOT NULL,
fullysigned BOOLEAN NOT NULL CHECK (fullysigned IN (0,1)),
FOREIGN KEY (vault_id) REFERENCES vaults (id)
ON UPDATE RESTRICT
ON DELETE RESTRICT
);
/* A bridge between the Unvault transactions a Spend transaction
* may refer and the possible Spend transactions an Unvault one
* may be associated with.
*/
CREATE TABLE spend_inputs (
id INTEGER PRIMARY KEY NOT NULL,
unvault_id INTEGER NOT NULL,
spend_id INTEGER NOT NULL,
FOREIGN KEY (unvault_id) REFERENCES presigned_transactions (id)
ON UPDATE RESTRICT
ON DELETE RESTRICT,
FOREIGN KEY (spend_id) REFERENCES spend_transactions (id)
ON UPDATE RESTRICT
ON DELETE CASCADE
);
/* This stores Spend transactions we created. A txid column is there to
* ease research.
* The 'broadcasted' column indicates wether a Spend transaction is:
* - Not elligible for broadcast (NULL)
* - Waiting to be broadcasted (0)
* - Already broadcasted (1)
* The 'has_priority' column indicates wether a Spend would automatically
* be CPFPed if not confirmed in the first block after broadcast
*/
CREATE TABLE spend_transactions (
id INTEGER PRIMARY KEY NOT NULL,
psbt BLOB UNIQUE NOT NULL,
txid BLOB UNIQUE NOT NULL,
broadcasted BOOLEAN CHECK (broadcasted IN (NULL, 0,1)),
has_priority BOOLEAN NOT NULL CHECK (has_priority IN (0,1)) DEFAULT 0
);
CREATE INDEX vault_status ON vaults (status);
CREATE INDEX vault_transactions ON presigned_transactions (vault_id);
";
/// A row in the "wallets" table
#[derive(Clone)]
pub struct DbWallet {
pub id: u32, // FIXME: should be an i64
pub timestamp: u32,
pub deposit_descriptor: DepositDescriptor,
pub unvault_descriptor: UnvaultDescriptor,
pub cpfp_descriptor: CpfpDescriptor,
pub our_man_xpub: Option<ExtendedPubKey>,
pub our_stk_xpub: Option<ExtendedPubKey>,
pub deposit_derivation_index: ChildNumber,
}
/// A row of the "vaults" table
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct DbVault {
pub id: u32, // FIXME: should be an i64
pub wallet_id: u32,
pub status: VaultStatus,
pub blockheight: u32,
pub deposit_outpoint: OutPoint,
pub amount: Amount,
pub derivation_index: ChildNumber,
pub funded_at: Option<u32>,
pub secured_at: Option<u32>,
pub delegated_at: Option<u32>,
pub moved_at: Option<u32>,
pub final_txid: Option<Txid>,
pub emer_shared: bool,
}
// FIXME: naming it "db transaction" was ambiguous..
/// A row in the "presigned_transactions" table
#[derive(Debug, Clone)]
pub struct DbTransaction {
pub id: u32, // FIXME: should be an i64
pub vault_id: u32,
pub tx_type: TransactionType,
pub psbt: RevaultTx,
pub is_fully_signed: bool,
}
/// A row in the "spend_inputs" table
#[derive(Debug)]
pub struct DbSpendInput {
pub id: i64,
pub unvault_id: u32,
pub spend_id: u32,
}
/// A row in the "spend_transactions" table
#[derive(Debug, PartialEq)]
pub struct DbSpendTransaction {
pub id: i64,
pub psbt: SpendTransaction,
pub broadcasted: Option<bool>,
pub has_priority: bool,
// txid is intentionally not there as it's already part of the psbt
}
| 30.918919 | 76 | 0.712413 |
8774b48fb3e403665dae7b75ce9d3b96d2ff6d55 | 6,939 | use super::{AllocId, InterpResult};
use rustc_macros::HashStable;
use rustc_target::abi::{HasDataLayout, Size};
use std::convert::TryFrom;
use std::fmt;
////////////////////////////////////////////////////////////////////////////////
// Pointer arithmetic
////////////////////////////////////////////////////////////////////////////////
pub trait PointerArithmetic: HasDataLayout {
// These are not supposed to be overridden.
#[inline(always)]
fn pointer_size(&self) -> Size {
self.data_layout().pointer_size
}
#[inline]
fn machine_usize_max(&self) -> u64 {
let max_usize_plus_1 = 1u128 << self.pointer_size().bits();
u64::try_from(max_usize_plus_1 - 1).unwrap()
}
#[inline]
fn machine_isize_min(&self) -> i64 {
let max_isize_plus_1 = 1i128 << (self.pointer_size().bits() - 1);
i64::try_from(-max_isize_plus_1).unwrap()
}
#[inline]
fn machine_isize_max(&self) -> i64 {
let max_isize_plus_1 = 1u128 << (self.pointer_size().bits() - 1);
i64::try_from(max_isize_plus_1 - 1).unwrap()
}
/// Helper function: truncate given value-"overflowed flag" pair to pointer size and
/// update "overflowed flag" if there was an overflow.
/// This should be called by all the other methods before returning!
#[inline]
fn truncate_to_ptr(&self, (val, over): (u64, bool)) -> (u64, bool) {
let val = u128::from(val);
let max_ptr_plus_1 = 1u128 << self.pointer_size().bits();
(u64::try_from(val % max_ptr_plus_1).unwrap(), over || val >= max_ptr_plus_1)
}
#[inline]
fn overflowing_offset(&self, val: u64, i: u64) -> (u64, bool) {
// We do not need to check if i fits in a machine usize. If it doesn't,
// either the wrapping_add will wrap or res will not fit in a pointer.
let res = val.overflowing_add(i);
self.truncate_to_ptr(res)
}
#[inline]
fn overflowing_signed_offset(&self, val: u64, i: i64) -> (u64, bool) {
// We need to make sure that i fits in a machine isize.
let n = i.unsigned_abs();
if i >= 0 {
let (val, over) = self.overflowing_offset(val, n);
(val, over || i > self.machine_isize_max())
} else {
let res = val.overflowing_sub(n);
let (val, over) = self.truncate_to_ptr(res);
(val, over || i < self.machine_isize_min())
}
}
#[inline]
fn offset<'tcx>(&self, val: u64, i: u64) -> InterpResult<'tcx, u64> {
let (res, over) = self.overflowing_offset(val, i);
if over { throw_ub!(PointerArithOverflow) } else { Ok(res) }
}
#[inline]
fn signed_offset<'tcx>(&self, val: u64, i: i64) -> InterpResult<'tcx, u64> {
let (res, over) = self.overflowing_signed_offset(val, i);
if over { throw_ub!(PointerArithOverflow) } else { Ok(res) }
}
}
impl<T: HasDataLayout> PointerArithmetic for T {}
/// Represents a pointer in the Miri engine.
///
/// `Pointer` is generic over the `Tag` associated with each pointer,
/// which is used to do provenance tracking during execution.
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, TyEncodable, TyDecodable, Hash)]
#[derive(HashStable)]
pub struct Pointer<Tag = ()> {
pub alloc_id: AllocId,
pub offset: Size,
pub tag: Tag,
}
static_assert_size!(Pointer, 16);
/// Print the address of a pointer (without the tag)
fn print_ptr_addr<Tag>(ptr: &Pointer<Tag>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// Forward `alternate` flag to `alloc_id` printing.
if f.alternate() {
write!(f, "{:#?}", ptr.alloc_id)?;
} else {
write!(f, "{:?}", ptr.alloc_id)?;
}
// Print offset only if it is non-zero.
if ptr.offset.bytes() > 0 {
write!(f, "+0x{:x}", ptr.offset.bytes())?;
}
Ok(())
}
// We want the `Debug` output to be readable as it is used by `derive(Debug)` for
// all the Miri types.
// We have to use `Debug` output for the tag, because `()` does not implement
// `Display` so we cannot specialize that.
impl<Tag: fmt::Debug> fmt::Debug for Pointer<Tag> {
default fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
print_ptr_addr(self, f)?;
write!(f, "[{:?}]", self.tag)
}
}
// Specialization for no tag
impl fmt::Debug for Pointer<()> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
print_ptr_addr(self, f)
}
}
impl<Tag: fmt::Debug> fmt::Display for Pointer<Tag> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(self, f)
}
}
/// Produces a `Pointer` that points to the beginning of the `Allocation`.
impl From<AllocId> for Pointer {
#[inline(always)]
fn from(alloc_id: AllocId) -> Self {
Pointer::new(alloc_id, Size::ZERO)
}
}
impl Pointer<()> {
#[inline(always)]
pub fn new(alloc_id: AllocId, offset: Size) -> Self {
Pointer { alloc_id, offset, tag: () }
}
#[inline(always)]
pub fn with_tag<Tag>(self, tag: Tag) -> Pointer<Tag> {
Pointer::new_with_tag(self.alloc_id, self.offset, tag)
}
}
impl<'tcx, Tag> Pointer<Tag> {
#[inline(always)]
pub fn new_with_tag(alloc_id: AllocId, offset: Size, tag: Tag) -> Self {
Pointer { alloc_id, offset, tag }
}
#[inline]
pub fn offset(self, i: Size, cx: &impl HasDataLayout) -> InterpResult<'tcx, Self> {
Ok(Pointer::new_with_tag(
self.alloc_id,
Size::from_bytes(cx.data_layout().offset(self.offset.bytes(), i.bytes())?),
self.tag,
))
}
#[inline]
pub fn overflowing_offset(self, i: Size, cx: &impl HasDataLayout) -> (Self, bool) {
let (res, over) = cx.data_layout().overflowing_offset(self.offset.bytes(), i.bytes());
(Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over)
}
#[inline(always)]
pub fn wrapping_offset(self, i: Size, cx: &impl HasDataLayout) -> Self {
self.overflowing_offset(i, cx).0
}
#[inline]
pub fn signed_offset(self, i: i64, cx: &impl HasDataLayout) -> InterpResult<'tcx, Self> {
Ok(Pointer::new_with_tag(
self.alloc_id,
Size::from_bytes(cx.data_layout().signed_offset(self.offset.bytes(), i)?),
self.tag,
))
}
#[inline]
pub fn overflowing_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> (Self, bool) {
let (res, over) = cx.data_layout().overflowing_signed_offset(self.offset.bytes(), i);
(Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over)
}
#[inline(always)]
pub fn wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
self.overflowing_signed_offset(i, cx).0
}
#[inline(always)]
pub fn erase_tag(self) -> Pointer {
Pointer { alloc_id: self.alloc_id, offset: self.offset, tag: () }
}
}
| 33.200957 | 94 | 0.59216 |
33aa786ea1bf236a49bca33baf4b301d60e31207 | 1,467 | // Copyright 2021, The Tremor Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use super::ConnectorHarness;
use crate::{connectors::impls::file, errors::Result};
use async_std::path::Path;
use tremor_value::prelude::*;
#[async_std::test]
async fn file_connector() -> Result<()> {
let _ = env_logger::try_init();
let input_path = Path::new(file!())
.parent()
.unwrap()
.join("data")
.join("non_existent.txt");
let defn = literal!({
"codec": "string",
"preprocessors": ["separate"],
"config": {
"path": input_path.display().to_string(),
"mode": "read"
}
});
let harness = ConnectorHarness::new(function_name!(), &file::Builder::default(), &defn).await?;
assert!(harness.start().await.is_err());
let (out_events, err_events) = harness.stop().await?;
assert!(out_events.is_empty());
assert!(err_events.is_empty());
Ok(())
}
| 31.891304 | 99 | 0.646898 |
fe8968337426d0eb3190322219632229f13fdd48 | 61,146 | // Miniscript
// Written in 2019 by
// Sanket Kanjular and Andrew Poelstra
//
// To the extent possible under law, the author(s) have dedicated all
// copyright and related and neighboring rights to this software to
// the public domain worldwide. This software is distributed without
// any warranty.
//
// You should have received a copy of the CC0 Public Domain Dedication
// along with this software.
// If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
//
use bitcoin::hashes::{hash160, ripemd160, sha256, sha256d, Hash};
use bitcoin::{self, secp256k1};
use fmt;
use miniscript::context::Any;
use miniscript::ScriptContext;
use Descriptor;
use Terminal;
use {error, Miniscript};
use {BitcoinSig, ToPublicKey};
/// Detailed Error type for Interpreter
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum Error {
/// Unexpected Stack End, caused by popping extra elements from stack
UnexpectedStackEnd,
/// Unexpected Stack Push `StackElement::Push` element when the interpreter
/// was expecting a stack boolean `StackElement::Satisfied` or
/// `StackElement::Dissatisfied`
UnexpectedStackElementPush,
/// Verify expects stack top element exactly to be `StackElement::Satisfied`.
/// This error is raised even if the stack top is `StackElement::Push`.
VerifyFailed,
/// MultiSig missing at least `1` witness elements out of `k + 1` required
InsufficientSignaturesMultiSig,
/// MultiSig requires 1 extra zero element apart from the `k` signatures
MissingExtraZeroMultiSig,
/// Script abortion because of incorrect dissatisfaction for multisig.
/// Any input witness apart from sat(0 sig ...) or nsat(0 0 ..) leads to
/// this error. This is network standardness assumption and miniscript only
/// supports standard scripts
MultiSigEvaluationError,
/// Signature failed to verify
InvalidSignature(bitcoin::PublicKey),
/// General Interpreter error.
CouldNotEvaluate,
/// Script abortion because of incorrect dissatisfaction for Checksig.
/// Any input witness apart from sat(sig) or nsat(0) leads to
/// this error. This is network standardness assumption and miniscript only
/// supports standard scripts
PkEvaluationError(bitcoin::PublicKey),
/// Miniscript requires the entire top level script to be satisfied.
ScriptSatisfactionError,
/// The Public Key hash check for the given pubkey. This occurs in `PkH`
/// node when the given key does not match to Hash in script.
PkHashVerifyFail(hash160::Hash),
/// Parse Error while parsing a `StackElement::Push` as a Pubkey. Both
/// 33 byte and 65 bytes are supported.
PubkeyParseError,
/// The preimage to the hash function must be exactly 32 bytes.
HashPreimageLengthMismatch,
/// Got `StackElement::Satisfied` or `StackElement::Dissatisfied` when the
/// interpreter was expecting `StackElement::Push`
UnexpectedStackBoolean,
/// Could not satisfy, relative locktime not met
RelativeLocktimeNotMet(u32),
/// Could not satisfy, absolute locktime not met
AbsoluteLocktimeNotMet(u32),
/// Forward-secp related errors
Secp(secp256k1::Error),
}
#[doc(hidden)]
impl From<secp256k1::Error> for Error {
fn from(e: secp256k1::Error) -> Error {
Error::Secp(e)
}
}
impl error::Error for Error {
fn description(&self) -> &str {
""
}
fn cause(&self) -> Option<&error::Error> {
match *self {
Error::Secp(ref err) => Some(err),
ref x => Some(x),
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::UnexpectedStackEnd => f.write_str("Unexpected Stack End"),
Error::UnexpectedStackElementPush => write!(f, "Got {}, expected Stack Boolean", 1),
Error::VerifyFailed => {
f.write_str("Expected Satisfied Boolean at stack top for VERIFY")
}
Error::InsufficientSignaturesMultiSig => f.write_str("Insufficient signatures for CMS"),
Error::MissingExtraZeroMultiSig => f.write_str("CMS missing extra zero"),
Error::MultiSigEvaluationError => {
f.write_str("CMS script aborted, incorrect satisfaction/dissatisfaction")
}
Error::InvalidSignature(pk) => write!(f, "bad signature with pk {}", pk),
Error::CouldNotEvaluate => f.write_str("Interpreter Error: Could not evaluate"),
Error::PkEvaluationError(ref key) => write!(f, "Incorrect Signature for pk {}", key),
Error::ScriptSatisfactionError => f.write_str("Top level script must be satisfied"),
Error::PkHashVerifyFail(ref hash) => write!(f, "Pubkey Hash check failed {}", hash),
Error::PubkeyParseError => f.write_str("Error in parsing pubkey {}"),
Error::HashPreimageLengthMismatch => f.write_str("Hash preimage should be 32 bytes"),
Error::UnexpectedStackBoolean => {
f.write_str("Expected Stack Push operation, found stack bool")
}
Error::RelativeLocktimeNotMet(n) => {
write!(f, "required relative locktime CSV of {} blocks, not met", n)
}
Error::AbsoluteLocktimeNotMet(n) => write!(
f,
"required absolute locktime CLTV of {} blocks, not met",
n
),
Error::Secp(ref e) => fmt::Display::fmt(e, f),
}
}
}
/// Definition of Stack Element of the Stack used for interpretation of Miniscript.
/// All stack elements with vec![] go to Dissatisfied and vec![1] are marked to Satisfied.
/// Others are directly pushed as witness
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
pub enum StackElement<'stack> {
/// Result of a satisfied Miniscript fragment
/// Translated from `vec![1]` from input stack
Satisfied,
/// Result of a dissatisfied Miniscript fragment
/// Translated from `vec![]` from input stack
Dissatisfied,
/// Input from the witness stack
Push(&'stack [u8]),
}
impl<'stack> StackElement<'stack> {
/// Convert witness stack to StackElement
pub fn from(v: &'stack [u8]) -> StackElement<'stack> {
if *v == [1] {
StackElement::Satisfied
} else if *v == [] {
StackElement::Dissatisfied
} else {
StackElement::Push(v)
}
}
}
/// Type of HashLock used for SatisfiedConstraint structure
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum HashLockType<'desc> {
///SHA 256 hashlock
Sha256(&'desc sha256::Hash),
///Hash 256 hashlock
Hash256(&'desc sha256d::Hash),
///Hash160 hashlock
Hash160(&'desc hash160::Hash),
///Ripemd160 hashlock
Ripemd160(&'desc ripemd160::Hash),
}
/// A satisfied Miniscript condition (Signature, Hashlock, Timelock)
/// 'desc represents the lifetime of descriptor and `stack represents
/// the lifetime of witness
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum SatisfiedConstraint<'desc, 'stack> {
///Public key and corresponding signature
PublicKey {
key: &'desc bitcoin::PublicKey,
sig: secp256k1::Signature,
},
///PublicKeyHash, corresponding pubkey and signature
PublicKeyHash {
keyhash: &'desc hash160::Hash,
key: bitcoin::PublicKey,
sig: secp256k1::Signature,
},
///Hashlock and preimage for SHA256
HashLock {
hash: HashLockType<'desc>,
preimage: &'stack [u8],
},
///Relative Timelock for CSV.
RelativeTimeLock { time: &'desc u32 },
///Absolute Timelock for CLTV.
AbsoluteTimeLock { time: &'desc u32 },
}
///This is used by the interpreter to know which evaluation state a AstemElem is.
///This is required because whenever a same node(for eg. OrB) appears on the stack, we don't
///know if the left child has been evaluated or not. And based on the result on
///the top of the stack, we need to decide whether to execute right child or not.
///This is also useful for wrappers and thresholds which push a value on the stack
///depending on evaluation of the children.
struct NodeEvaluationState<'desc> {
///The node which is being evaluated
node: &'desc Miniscript<bitcoin::PublicKey, Any>,
///number of children evaluated
n_evaluated: usize,
///number of children satisfied
n_satisfied: usize,
}
/// An iterator over all the satisfied constraints satisfied by a given
/// descriptor/scriptSig/witness stack tuple. This returns all the redundant
/// satisfied constraints even if they were not required for the entire
/// satisfaction. For example, and_b(Pk,false) would return the witness for
/// Pk if it was satisfied even if the entire and_b could have failed.
/// In case the script would abort on the given witness stack OR if the entire
/// script is dissatisfied, this would return keep on returning values
///_until_Error.
pub struct SatisfiedConstraints<'desc, 'stack, F: FnMut(&bitcoin::PublicKey, BitcoinSig) -> bool> {
verify_sig: F,
public_key: Option<&'desc bitcoin::PublicKey>,
state: Vec<NodeEvaluationState<'desc>>,
stack: Stack<'stack>,
age: u32,
height: u32,
has_errored: bool,
}
/// Stack Data structure representing the stack input to Miniscript. This Stack
/// is created from the combination of ScriptSig and Witness stack.
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
pub struct Stack<'stack>(pub Vec<StackElement<'stack>>);
///Iterator for SatisfiedConstraints
impl<'desc, 'stack, F> Iterator for SatisfiedConstraints<'desc, 'stack, F>
where
Any: ScriptContext,
F: FnMut(&bitcoin::PublicKey, BitcoinSig) -> bool,
{
type Item = Result<SatisfiedConstraint<'desc, 'stack>, Error>;
fn next(&mut self) -> Option<Self::Item> {
if self.has_errored {
// Stop yielding values after the first error
None
} else {
let res = self.iter_next();
if let Some(Err(_)) = res {
self.has_errored = true;
}
res
}
}
}
impl<'desc, 'stack, F> SatisfiedConstraints<'desc, 'stack, F>
where
F: FnMut(&bitcoin::PublicKey, BitcoinSig) -> bool,
{
// Creates a new iterator over all constraints satisfied for a given
/// descriptor by a given witness stack. Because this iterator is lazy,
/// it may return satisfied constraints even if these turn out to be
/// irrelevant to the final (dis)satisfaction of the descriptor.
pub fn from_descriptor(
des: &'desc Descriptor<bitcoin::PublicKey>,
stack: Stack<'stack>,
verify_sig: F,
age: u32,
height: u32,
) -> SatisfiedConstraints<'desc, 'stack, F> {
match des {
&Descriptor::Pk(ref pk) | &Descriptor::Pkh(ref pk) => SatisfiedConstraints {
verify_sig: verify_sig,
public_key: Some(pk),
state: vec![],
stack: stack,
age,
height,
has_errored: false,
},
&Descriptor::ShWpkh(ref pk) | &Descriptor::Wpkh(ref pk) => SatisfiedConstraints {
verify_sig: verify_sig,
public_key: Some(pk),
state: vec![],
stack: stack,
age,
height,
has_errored: false,
},
&Descriptor::Wsh(ref miniscript) | &Descriptor::ShWsh(ref miniscript) => {
SatisfiedConstraints {
verify_sig: verify_sig,
public_key: None,
state: vec![NodeEvaluationState {
node: Any::from_segwitv0(miniscript),
n_evaluated: 0,
n_satisfied: 0,
}],
stack: stack,
age,
height,
has_errored: false,
}
}
&Descriptor::Sh(ref miniscript) | &Descriptor::Bare(ref miniscript) => {
SatisfiedConstraints {
verify_sig: verify_sig,
public_key: None,
state: vec![NodeEvaluationState {
node: Any::from_legacy(miniscript),
n_evaluated: 0,
n_satisfied: 0,
}],
stack: stack,
age,
height,
has_errored: false,
}
}
}
}
}
impl<'desc, 'stack, F> SatisfiedConstraints<'desc, 'stack, F>
where
Any: ScriptContext,
F: FnMut(&bitcoin::PublicKey, BitcoinSig) -> bool,
{
/// Helper function to push a NodeEvaluationState on state stack
fn push_evaluation_state(
&mut self,
node: &'desc Miniscript<bitcoin::PublicKey, Any>,
n_evaluated: usize,
n_satisfied: usize,
) -> () {
self.state.push(NodeEvaluationState {
node,
n_evaluated,
n_satisfied,
})
}
/// Helper function to step the iterator
fn iter_next(&mut self) -> Option<Result<SatisfiedConstraint<'desc, 'stack>, Error>> {
while let Some(node_state) = self.state.pop() {
//non-empty stack
match node_state.node.node {
Terminal::True => {
debug_assert_eq!(node_state.n_evaluated, 0);
debug_assert_eq!(node_state.n_satisfied, 0);
self.stack.push(StackElement::Satisfied);
}
Terminal::False => {
debug_assert_eq!(node_state.n_evaluated, 0);
debug_assert_eq!(node_state.n_satisfied, 0);
self.stack.push(StackElement::Dissatisfied);
}
Terminal::PkK(ref pk) => {
debug_assert_eq!(node_state.n_evaluated, 0);
debug_assert_eq!(node_state.n_satisfied, 0);
let res = self.stack.evaluate_pk(&mut self.verify_sig, pk);
if res.is_some() {
return res;
}
}
Terminal::PkH(ref pkh) => {
debug_assert_eq!(node_state.n_evaluated, 0);
debug_assert_eq!(node_state.n_satisfied, 0);
let res = self.stack.evaluate_pkh(&mut self.verify_sig, pkh);
if res.is_some() {
return res;
}
}
Terminal::After(ref n) => {
debug_assert_eq!(node_state.n_evaluated, 0);
debug_assert_eq!(node_state.n_satisfied, 0);
let res = self.stack.evaluate_after(n, self.age);
if res.is_some() {
return res;
}
}
Terminal::Older(ref n) => {
debug_assert_eq!(node_state.n_evaluated, 0);
debug_assert_eq!(node_state.n_satisfied, 0);
let res = self.stack.evaluate_older(n, self.height);
if res.is_some() {
return res;
}
}
Terminal::Sha256(ref hash) => {
debug_assert_eq!(node_state.n_evaluated, 0);
debug_assert_eq!(node_state.n_satisfied, 0);
let res = self.stack.evaluate_sha256(hash);
if res.is_some() {
return res;
}
}
Terminal::Hash256(ref hash) => {
debug_assert_eq!(node_state.n_evaluated, 0);
debug_assert_eq!(node_state.n_satisfied, 0);
let res = self.stack.evaluate_hash256(hash);
if res.is_some() {
return res;
}
}
Terminal::Hash160(ref hash) => {
debug_assert_eq!(node_state.n_evaluated, 0);
debug_assert_eq!(node_state.n_satisfied, 0);
let res = self.stack.evaluate_hash160(hash);
if res.is_some() {
return res;
}
}
Terminal::Ripemd160(ref hash) => {
debug_assert_eq!(node_state.n_evaluated, 0);
debug_assert_eq!(node_state.n_satisfied, 0);
let res = self.stack.evaluate_ripemd160(hash);
if res.is_some() {
return res;
}
}
Terminal::Alt(ref sub) | Terminal::Swap(ref sub) | Terminal::Check(ref sub) => {
debug_assert_eq!(node_state.n_evaluated, 0);
debug_assert_eq!(node_state.n_satisfied, 0);
self.push_evaluation_state(sub, 0, 0);
}
Terminal::DupIf(ref sub) if node_state.n_evaluated == 0 => match self.stack.pop() {
Some(StackElement::Dissatisfied) => {
self.stack.push(StackElement::Dissatisfied);
}
Some(StackElement::Satisfied) => {
self.push_evaluation_state(node_state.node, 1, 1);
self.push_evaluation_state(sub, 0, 0);
}
Some(StackElement::Push(_v)) => {
return Some(Err(Error::UnexpectedStackElementPush))
}
None => return Some(Err(Error::UnexpectedStackEnd)),
},
Terminal::DupIf(ref _sub) if node_state.n_evaluated == 1 => {
self.stack.push(StackElement::Satisfied);
}
Terminal::ZeroNotEqual(ref sub) | Terminal::Verify(ref sub)
if node_state.n_evaluated == 0 =>
{
self.push_evaluation_state(node_state.node, 1, 0);
self.push_evaluation_state(sub, 0, 0);
}
Terminal::Verify(ref _sub) if node_state.n_evaluated == 1 => {
match self.stack.pop() {
Some(StackElement::Satisfied) => (),
Some(_) => return Some(Err(Error::VerifyFailed)),
None => return Some(Err(Error::UnexpectedStackEnd)),
}
}
Terminal::ZeroNotEqual(ref _sub) if node_state.n_evaluated == 1 => {
match self.stack.pop() {
Some(StackElement::Dissatisfied) => {
self.stack.push(StackElement::Dissatisfied)
}
Some(_) => self.stack.push(StackElement::Satisfied),
None => return Some(Err(Error::UnexpectedStackEnd)),
}
}
Terminal::NonZero(ref sub) => {
debug_assert_eq!(node_state.n_evaluated, 0);
debug_assert_eq!(node_state.n_satisfied, 0);
match self.stack.last() {
Some(&StackElement::Dissatisfied) => (),
Some(_) => self.push_evaluation_state(sub, 0, 0),
None => return Some(Err(Error::UnexpectedStackEnd)),
}
}
Terminal::AndV(ref left, ref right) => {
debug_assert_eq!(node_state.n_evaluated, 0);
debug_assert_eq!(node_state.n_satisfied, 0);
self.push_evaluation_state(right, 0, 0);
self.push_evaluation_state(left, 0, 0);
}
Terminal::OrB(ref left, ref _right) | Terminal::AndB(ref left, ref _right)
if node_state.n_evaluated == 0 =>
{
self.push_evaluation_state(node_state.node, 1, 0);
self.push_evaluation_state(left, 0, 0);
}
Terminal::OrB(ref _left, ref right) | Terminal::AndB(ref _left, ref right)
if node_state.n_evaluated == 1 =>
{
match self.stack.pop() {
Some(StackElement::Dissatisfied) => {
self.push_evaluation_state(node_state.node, 2, 0);
self.push_evaluation_state(right, 0, 0);
}
Some(StackElement::Satisfied) => {
self.push_evaluation_state(node_state.node, 2, 1);
self.push_evaluation_state(right, 0, 0);
}
Some(StackElement::Push(_v)) => {
return Some(Err(Error::UnexpectedStackElementPush))
}
None => return Some(Err(Error::UnexpectedStackEnd)),
}
}
Terminal::AndB(ref _left, ref _right) if node_state.n_evaluated == 2 => {
match self.stack.pop() {
Some(StackElement::Satisfied) if node_state.n_satisfied == 1 => {
self.stack.push(StackElement::Satisfied)
}
Some(_) => self.stack.push(StackElement::Dissatisfied),
None => return Some(Err(Error::UnexpectedStackEnd)),
}
}
Terminal::AndOr(ref left, ref _right, _)
| Terminal::OrC(ref left, ref _right)
| Terminal::OrD(ref left, ref _right)
if node_state.n_evaluated == 0 =>
{
self.push_evaluation_state(node_state.node, 1, 0);
self.push_evaluation_state(left, 0, 0);
}
Terminal::OrB(ref _left, ref _right) if node_state.n_evaluated == 2 => {
match self.stack.pop() {
Some(StackElement::Dissatisfied) if node_state.n_satisfied == 0 => {
self.stack.push(StackElement::Dissatisfied)
}
Some(_) => {
self.stack.push(StackElement::Satisfied);
}
None => return Some(Err(Error::UnexpectedStackEnd)),
}
}
Terminal::OrC(ref _left, ref right) if node_state.n_evaluated == 1 => {
match self.stack.pop() {
Some(StackElement::Satisfied) => (),
Some(StackElement::Dissatisfied) => self.push_evaluation_state(right, 0, 0),
Some(StackElement::Push(_v)) => {
return Some(Err(Error::UnexpectedStackElementPush))
}
None => return Some(Err(Error::UnexpectedStackEnd)),
}
}
Terminal::OrD(ref _left, ref right) if node_state.n_evaluated == 1 => {
match self.stack.pop() {
Some(StackElement::Satisfied) => self.stack.push(StackElement::Satisfied),
Some(StackElement::Dissatisfied) => self.push_evaluation_state(right, 0, 0),
Some(StackElement::Push(_v)) => {
return Some(Err(Error::UnexpectedStackElementPush))
}
None => return Some(Err(Error::UnexpectedStackEnd)),
}
}
Terminal::AndOr(_, ref left, ref right) | Terminal::OrI(ref left, ref right) => {
match self.stack.pop() {
Some(StackElement::Satisfied) => self.push_evaluation_state(left, 0, 0),
Some(StackElement::Dissatisfied) => self.push_evaluation_state(right, 0, 0),
Some(StackElement::Push(_v)) => {
return Some(Err(Error::UnexpectedStackElementPush))
}
None => return Some(Err(Error::UnexpectedStackEnd)),
}
}
Terminal::Thresh(ref _k, ref subs) if node_state.n_evaluated == 0 => {
self.push_evaluation_state(node_state.node, 1, 0);
self.push_evaluation_state(&subs[0], 0, 0);
}
Terminal::Thresh(k, ref subs) if node_state.n_evaluated == subs.len() => {
match self.stack.pop() {
Some(StackElement::Dissatisfied) if node_state.n_satisfied == k => {
self.stack.push(StackElement::Satisfied)
}
Some(StackElement::Satisfied) if node_state.n_satisfied == k - 1 => {
self.stack.push(StackElement::Satisfied)
}
Some(StackElement::Satisfied) | Some(StackElement::Dissatisfied) => {
self.stack.push(StackElement::Dissatisfied)
}
Some(StackElement::Push(_v)) => {
return Some(Err(Error::UnexpectedStackElementPush))
}
None => return Some(Err(Error::UnexpectedStackEnd)),
}
}
Terminal::Thresh(ref _k, ref subs) if node_state.n_evaluated != 0 => {
match self.stack.pop() {
Some(StackElement::Dissatisfied) => {
self.push_evaluation_state(
node_state.node,
node_state.n_evaluated + 1,
node_state.n_satisfied,
);
self.push_evaluation_state(&subs[node_state.n_evaluated], 0, 0);
}
Some(StackElement::Satisfied) => {
self.push_evaluation_state(
node_state.node,
node_state.n_evaluated + 1,
node_state.n_satisfied + 1,
);
self.push_evaluation_state(&subs[node_state.n_evaluated], 0, 0);
}
Some(StackElement::Push(_v)) => {
return Some(Err(Error::UnexpectedStackElementPush))
}
None => return Some(Err(Error::UnexpectedStackEnd)),
}
}
Terminal::Multi(ref k, ref subs) if node_state.n_evaluated == 0 => {
let len = self.stack.len();
if len < k + 1 {
return Some(Err(Error::InsufficientSignaturesMultiSig));
} else {
//Non-sat case. If the first sig is empty, others k elements must
//be empty.
match self.stack.last() {
Some(&StackElement::Dissatisfied) => {
//Remove the extra zero from multi-sig check
let sigs = self.stack.split_off(len - (k + 1));
let nonsat = sigs
.iter()
.map(|sig| *sig == StackElement::Dissatisfied)
.filter(|empty| *empty)
.count();
if nonsat == *k {
self.stack.push(StackElement::Dissatisfied);
} else {
return Some(Err(Error::MissingExtraZeroMultiSig));
}
}
None => return Some(Err(Error::UnexpectedStackEnd)),
_ => {
match self
.stack
.evaluate_multi(&mut self.verify_sig, &subs[subs.len() - 1])
{
Some(Ok(x)) => {
self.push_evaluation_state(
node_state.node,
node_state.n_evaluated + 1,
node_state.n_satisfied + 1,
);
return Some(Ok(x));
}
None => self.push_evaluation_state(
node_state.node,
node_state.n_evaluated + 1,
node_state.n_satisfied,
),
x => return x, //forward errors as is
}
}
}
}
}
Terminal::Multi(k, ref subs) => {
if node_state.n_satisfied == k {
//multi-sig bug: Pop extra 0
if let Some(StackElement::Dissatisfied) = self.stack.pop() {
self.stack.push(StackElement::Satisfied);
} else {
return Some(Err(Error::MissingExtraZeroMultiSig));
}
} else if node_state.n_evaluated == subs.len() {
return Some(Err(Error::MultiSigEvaluationError));
} else {
match self.stack.evaluate_multi(
&mut self.verify_sig,
&subs[subs.len() - node_state.n_evaluated - 1],
) {
Some(Ok(x)) => {
self.push_evaluation_state(
node_state.node,
node_state.n_evaluated + 1,
node_state.n_satisfied + 1,
);
return Some(Ok(x));
}
None => self.push_evaluation_state(
node_state.node,
node_state.n_evaluated + 1,
node_state.n_satisfied,
),
x => return x, //forward errors as is
}
}
}
//All other match patterns should not be reached in any valid
//type checked Miniscript
_ => return Some(Err(Error::CouldNotEvaluate)),
};
}
//state empty implies that either the execution has terminated or we have a
//Pk based descriptor
if let Some(pk) = self.public_key {
if let Some(StackElement::Push(sig)) = self.stack.pop() {
if let Ok(sig) = verify_sersig(&mut self.verify_sig, &pk, &sig) {
//Signature check successful, set public_key to None to
//terminate the next() function in the subsequent call
self.public_key = None;
self.stack.push(StackElement::Satisfied);
return Some(Ok(SatisfiedConstraint::PublicKey { key: pk, sig }));
} else {
return Some(Err(Error::PkEvaluationError(pk.clone().to_public_key())));
}
} else {
return Some(Err(Error::UnexpectedStackEnd));
}
} else {
//All the script has been executed.
//Check that the stack must contain exactly 1 satisfied element
if self.stack.pop() == Some(StackElement::Satisfied) && self.stack.is_empty() {
return None;
} else {
return Some(Err(Error::ScriptSatisfactionError));
}
}
}
}
/// Helper function to verify serialized signature
fn verify_sersig<'stack, F>(
verify_sig: F,
pk: &bitcoin::PublicKey,
sigser: &[u8],
) -> Result<secp256k1::Signature, Error>
where
F: FnOnce(&bitcoin::PublicKey, BitcoinSig) -> bool,
{
if let Some((sighash_byte, sig)) = sigser.split_last() {
let sighashtype = bitcoin::SigHashType::from_u32(*sighash_byte as u32);
let sig = secp256k1::Signature::from_der(sig)?;
if verify_sig(pk, (sig, sighashtype)) {
Ok(sig)
} else {
Err(Error::InvalidSignature(*pk))
}
} else {
Err(Error::PkEvaluationError(*pk))
}
}
impl<'stack> Stack<'stack> {
///wrapper for self.0.is_empty()
fn is_empty(&self) -> bool {
self.0.is_empty()
}
///wrapper for self.0.len()
fn len(&mut self) -> usize {
self.0.len()
}
///wrapper for self.0.pop()
fn pop(&mut self) -> Option<StackElement<'stack>> {
self.0.pop()
}
///wrapper for self.0.push()
fn push(&mut self, elem: StackElement<'stack>) -> () {
self.0.push(elem);
}
///wrapper for self.0.split_off()
fn split_off(&mut self, k: usize) -> Vec<StackElement<'stack>> {
self.0.split_off(k)
}
///wrapper for self.0.last()
fn last(&self) -> Option<&StackElement<'stack>> {
self.0.last()
}
/// Helper function to evaluate a Pk Node which takes the
/// top of the stack as input signature and validates it.
/// Sat: If the signature witness is correct, 1 is pushed
/// Unsat: For empty witness a 0 is pushed
/// Err: All of other witness result in errors.
/// `pk` CHECKSIG
fn evaluate_pk<'desc, F>(
&mut self,
verify_sig: F,
pk: &'desc bitcoin::PublicKey,
) -> Option<Result<SatisfiedConstraint<'desc, 'stack>, Error>>
where
F: FnMut(&bitcoin::PublicKey, BitcoinSig) -> bool,
{
if let Some(sigser) = self.pop() {
match sigser {
StackElement::Dissatisfied => {
self.push(StackElement::Dissatisfied);
None
}
StackElement::Push(ref sigser) => {
let sig = verify_sersig(verify_sig, pk, sigser);
match sig {
Ok(sig) => {
self.push(StackElement::Satisfied);
Some(Ok(SatisfiedConstraint::PublicKey { key: pk, sig }))
}
Err(e) => return Some(Err(e)),
}
}
StackElement::Satisfied => {
return Some(Err(Error::PkEvaluationError(pk.clone().to_public_key())))
}
}
} else {
Some(Err(Error::UnexpectedStackEnd))
}
}
/// Helper function to evaluate a Pkh Node. Takes input as pubkey and sig
/// from the top of the stack and outputs Sat if the pubkey, sig is valid
/// Sat: If the pubkey hash matches and signature witness is correct,
/// Unsat: For an empty witness
/// Err: All of other witness result in errors.
/// `DUP HASH160 <keyhash> EQUALVERIY CHECKSIG`
fn evaluate_pkh<'desc, F>(
&mut self,
verify_sig: F,
pkh: &'desc hash160::Hash,
) -> Option<Result<SatisfiedConstraint<'desc, 'stack>, Error>>
where
F: FnOnce(&bitcoin::PublicKey, BitcoinSig) -> bool,
{
if let Some(StackElement::Push(pk)) = self.pop() {
let pk_hash = hash160::Hash::hash(pk);
if pk_hash != *pkh {
return Some(Err(Error::PkHashVerifyFail(*pkh)));
}
match bitcoin::PublicKey::from_slice(pk) {
Ok(pk) => {
if let Some(sigser) = self.pop() {
match sigser {
StackElement::Dissatisfied => {
self.push(StackElement::Dissatisfied);
None
}
StackElement::Push(sigser) => {
let sig = verify_sersig(verify_sig, &pk, sigser);
match sig {
Ok(sig) => {
self.push(StackElement::Satisfied);
Some(Ok(SatisfiedConstraint::PublicKeyHash {
keyhash: pkh,
key: pk,
sig,
}))
}
Err(e) => return Some(Err(e)),
}
}
StackElement::Satisfied => {
return Some(Err(Error::PkEvaluationError(
pk.clone().to_public_key(),
)))
}
}
} else {
Some(Err(Error::UnexpectedStackEnd))
}
}
Err(..) => Some(Err(Error::PubkeyParseError)),
}
} else {
Some(Err(Error::UnexpectedStackEnd))
}
}
/// Helper function to evaluate a After Node. Takes no argument from stack
/// `n CHECKLOCKTIMEVERIFY 0NOTEQUAL` and `n CHECKLOCKTIMEVERIFY`
/// Ideally this should return int value as n: build_scriptint(t as i64)),
/// The reason we don't need to copy the Script semantics is that
/// Miniscript never evaluates integers and it is safe to treat them as
/// booleans
fn evaluate_after<'desc>(
&mut self,
n: &'desc u32,
age: u32,
) -> Option<Result<SatisfiedConstraint<'desc, 'stack>, Error>> {
if age >= *n {
self.push(StackElement::Satisfied);
Some(Ok(SatisfiedConstraint::AbsoluteTimeLock { time: n }))
} else {
Some(Err(Error::AbsoluteLocktimeNotMet(*n)))
}
}
/// Helper function to evaluate a Older Node. Takes no argument from stack
/// `n CHECKSEQUENCEVERIFY 0NOTEQUAL` and `n CHECKSEQUENCEVERIFY`
/// Ideally this should return int value as n: build_scriptint(t as i64)),
/// The reason we don't need to copy the Script semantics is that
/// Miniscript never evaluates integers and it is safe to treat them as
/// booleans
fn evaluate_older<'desc>(
&mut self,
n: &'desc u32,
height: u32,
) -> Option<Result<SatisfiedConstraint<'desc, 'stack>, Error>> {
if height >= *n {
self.push(StackElement::Satisfied);
Some(Ok(SatisfiedConstraint::RelativeTimeLock { time: n }))
} else {
Some(Err(Error::RelativeLocktimeNotMet(*n)))
}
}
/// Helper function to evaluate a Sha256 Node.
/// `SIZE 32 EQUALVERIFY SHA256 h EQUAL`
fn evaluate_sha256<'desc>(
&mut self,
hash: &'desc sha256::Hash,
) -> Option<Result<SatisfiedConstraint<'desc, 'stack>, Error>> {
if let Some(StackElement::Push(preimage)) = self.pop() {
if preimage.len() != 32 {
return Some(Err(Error::HashPreimageLengthMismatch));
}
if sha256::Hash::hash(preimage) == *hash {
self.push(StackElement::Satisfied);
Some(Ok(SatisfiedConstraint::HashLock {
hash: HashLockType::Sha256(hash),
preimage,
}))
} else {
self.push(StackElement::Dissatisfied);
None
}
} else {
Some(Err(Error::UnexpectedStackEnd))
}
}
/// Helper function to evaluate a Hash256 Node.
/// `SIZE 32 EQUALVERIFY HASH256 h EQUAL`
fn evaluate_hash256<'desc>(
&mut self,
hash: &'desc sha256d::Hash,
) -> Option<Result<SatisfiedConstraint<'desc, 'stack>, Error>> {
if let Some(StackElement::Push(preimage)) = self.pop() {
if preimage.len() != 32 {
return Some(Err(Error::HashPreimageLengthMismatch));
}
if sha256d::Hash::hash(preimage) == *hash {
self.push(StackElement::Satisfied);
Some(Ok(SatisfiedConstraint::HashLock {
hash: HashLockType::Hash256(hash),
preimage,
}))
} else {
self.push(StackElement::Dissatisfied);
None
}
} else {
Some(Err(Error::UnexpectedStackEnd))
}
}
/// Helper function to evaluate a Hash160 Node.
/// `SIZE 32 EQUALVERIFY HASH160 h EQUAL`
fn evaluate_hash160<'desc>(
&mut self,
hash: &'desc hash160::Hash,
) -> Option<Result<SatisfiedConstraint<'desc, 'stack>, Error>> {
if let Some(StackElement::Push(preimage)) = self.pop() {
if preimage.len() != 32 {
return Some(Err(Error::HashPreimageLengthMismatch));
}
if hash160::Hash::hash(preimage) == *hash {
self.push(StackElement::Satisfied);
Some(Ok(SatisfiedConstraint::HashLock {
hash: HashLockType::Hash160(hash),
preimage,
}))
} else {
self.push(StackElement::Dissatisfied);
None
}
} else {
Some(Err(Error::UnexpectedStackEnd))
}
}
/// Helper function to evaluate a RipeMd160 Node.
/// `SIZE 32 EQUALVERIFY RIPEMD160 h EQUAL`
fn evaluate_ripemd160<'desc>(
&mut self,
hash: &'desc ripemd160::Hash,
) -> Option<Result<SatisfiedConstraint<'desc, 'stack>, Error>> {
if let Some(StackElement::Push(preimage)) = self.pop() {
if preimage.len() != 32 {
return Some(Err(Error::HashPreimageLengthMismatch));
}
if ripemd160::Hash::hash(preimage) == *hash {
self.push(StackElement::Satisfied);
Some(Ok(SatisfiedConstraint::HashLock {
hash: HashLockType::Ripemd160(hash),
preimage,
}))
} else {
self.push(StackElement::Dissatisfied);
None
}
} else {
Some(Err(Error::UnexpectedStackEnd))
}
}
/// Helper function to evaluate a checkmultisig which takes the top of the
/// stack as input signatures and validates it in order of pubkeys.
/// For example, if the first signature is satisfied by second public key,
/// other signatures are not checked against the first pubkey.
/// `multi(2,pk1,pk2)` would be satisfied by `[0 sig2 sig1]` and Err on
/// `[0 sig2 sig1]`
fn evaluate_multi<'desc, F>(
&mut self,
verify_sig: F,
pk: &'desc bitcoin::PublicKey,
) -> Option<Result<SatisfiedConstraint<'desc, 'stack>, Error>>
where
F: FnOnce(&bitcoin::PublicKey, BitcoinSig) -> bool,
{
if let Some(witness_sig) = self.pop() {
if let StackElement::Push(sigser) = witness_sig {
let sig = verify_sersig(verify_sig, pk, sigser);
match sig {
Ok(sig) => return Some(Ok(SatisfiedConstraint::PublicKey { key: pk, sig })),
Err(..) => {
self.push(witness_sig);
return None;
}
}
} else {
Some(Err(Error::UnexpectedStackBoolean))
}
} else {
Some(Err(Error::UnexpectedStackEnd))
}
}
}
#[cfg(test)]
mod tests {
use bitcoin;
use bitcoin::hashes::{hash160, ripemd160, sha256, sha256d, Hash};
use bitcoin::secp256k1::{self, Secp256k1, VerifyOnly};
use descriptor::satisfied_constraints::{
Error, HashLockType, NodeEvaluationState, SatisfiedConstraint, SatisfiedConstraints, Stack,
StackElement,
};
use miniscript::context::{Any, Legacy};
use std::str::FromStr;
use BitcoinSig;
use Miniscript;
use MiniscriptKey;
use ToPublicKey;
fn setup_keys_sigs(
n: usize,
) -> (
Vec<bitcoin::PublicKey>,
Vec<Vec<u8>>,
Vec<secp256k1::Signature>,
secp256k1::Message,
Secp256k1<VerifyOnly>,
) {
let secp_sign = secp256k1::Secp256k1::signing_only();
let secp_verify = secp256k1::Secp256k1::verification_only();
let msg = secp256k1::Message::from_slice(&b"Yoda: btc, I trust. HODL I must!"[..])
.expect("32 bytes");
let mut pks = vec![];
let mut secp_sigs = vec![];
let mut der_sigs = vec![];
let mut sk = [0; 32];
for i in 1..n + 1 {
sk[0] = i as u8;
sk[1] = (i >> 8) as u8;
sk[2] = (i >> 16) as u8;
let sk = secp256k1::SecretKey::from_slice(&sk[..]).expect("secret key");
let pk = bitcoin::PublicKey {
key: secp256k1::PublicKey::from_secret_key(&secp_sign, &sk),
compressed: true,
};
let sig = secp_sign.sign(&msg, &sk);
secp_sigs.push(sig);
let mut sigser = sig.serialize_der().to_vec();
sigser.push(0x01); // sighash_all
pks.push(pk);
der_sigs.push(sigser);
}
(pks, der_sigs, secp_sigs, msg, secp_verify)
}
#[test]
fn sat_constraints() {
let (pks, der_sigs, secp_sigs, sighash, secp) = setup_keys_sigs(10);
let vfyfn =
|pk: &bitcoin::PublicKey, (sig, _)| secp.verify(&sighash, &sig, &pk.key).is_ok();
fn from_stack<'stack, 'elem, F>(
verify_fn: F,
stack: Stack<'stack>,
ms: &'elem Miniscript<bitcoin::PublicKey, Legacy>,
) -> SatisfiedConstraints<'elem, 'stack, F>
where
F: FnMut(&bitcoin::PublicKey, BitcoinSig) -> bool,
{
SatisfiedConstraints {
verify_sig: verify_fn,
stack: stack,
public_key: None,
state: vec![NodeEvaluationState {
node: Any::from_legacy(ms),
n_evaluated: 0,
n_satisfied: 0,
}],
age: 1002,
height: 1002,
has_errored: false,
}
};
let pk = ms_str!("c:pk_k({})", pks[0]);
let pkh = ms_str!("c:pk_h({})", pks[1].to_pubkeyhash());
//Time
let after = ms_str!("after({})", 1000);
let older = ms_str!("older({})", 1000);
//Hashes
let preimage = vec![0xab as u8; 32];
let sha256_hash = sha256::Hash::hash(&preimage);
let sha256 = ms_str!("sha256({})", sha256_hash);
let sha256d_hash_rev = sha256d::Hash::hash(&preimage);
let mut sha256d_hash_bytes = sha256d_hash_rev.clone().into_inner();
sha256d_hash_bytes.reverse();
let sha256d_hash = sha256d::Hash::from_inner(sha256d_hash_bytes);
let hash256 = ms_str!("hash256({})", sha256d_hash);
let hash160_hash = hash160::Hash::hash(&preimage);
let hash160 = ms_str!("hash160({})", hash160_hash);
let ripemd160_hash = ripemd160::Hash::hash(&preimage);
let ripemd160 = ms_str!("ripemd160({})", ripemd160_hash);
let stack = Stack(vec![StackElement::Push(&der_sigs[0])]);
let constraints = from_stack(&vfyfn, stack, &pk);
let pk_satisfied: Result<Vec<SatisfiedConstraint>, Error> = constraints.collect();
assert_eq!(
pk_satisfied.unwrap(),
vec![SatisfiedConstraint::PublicKey {
key: &pks[0],
sig: secp_sigs[0].clone(),
}]
);
//Check Pk failure with wrong signature
let stack = Stack(vec![StackElement::Dissatisfied]);
let constraints = from_stack(&vfyfn, stack, &pk);
let pk_err: Result<Vec<SatisfiedConstraint>, Error> = constraints.collect();
assert!(pk_err.is_err());
//Check Pkh
let pk_bytes = pks[1].to_public_key().to_bytes();
let stack = Stack(vec![
StackElement::Push(&der_sigs[1]),
StackElement::Push(&pk_bytes),
]);
let constraints = from_stack(&vfyfn, stack, &pkh);
let pkh_satisfied: Result<Vec<SatisfiedConstraint>, Error> = constraints.collect();
assert_eq!(
pkh_satisfied.unwrap(),
vec![SatisfiedConstraint::PublicKeyHash {
keyhash: &pks[1].to_pubkeyhash(),
key: pks[1].clone(),
sig: secp_sigs[1].clone(),
}]
);
//Check After
let stack = Stack(vec![]);
let constraints = from_stack(&vfyfn, stack, &after);
let after_satisfied: Result<Vec<SatisfiedConstraint>, Error> = constraints.collect();
assert_eq!(
after_satisfied.unwrap(),
vec![SatisfiedConstraint::AbsoluteTimeLock { time: &1000 }]
);
//Check Older
let stack = Stack(vec![]);
let constraints = from_stack(&vfyfn, stack, &older);
let older_satisfied: Result<Vec<SatisfiedConstraint>, Error> = constraints.collect();
assert_eq!(
older_satisfied.unwrap(),
vec![SatisfiedConstraint::RelativeTimeLock { time: &1000 }]
);
//Check Sha256
let stack = Stack(vec![StackElement::Push(&preimage)]);
let constraints = from_stack(&vfyfn, stack, &sha256);
let sah256_satisfied: Result<Vec<SatisfiedConstraint>, Error> = constraints.collect();
assert_eq!(
sah256_satisfied.unwrap(),
vec![SatisfiedConstraint::HashLock {
hash: HashLockType::Sha256(&sha256_hash),
preimage: &preimage,
}]
);
//Check Shad256
let stack = Stack(vec![StackElement::Push(&preimage)]);
let constraints = from_stack(&vfyfn, stack, &hash256);
let sha256d_satisfied: Result<Vec<SatisfiedConstraint>, Error> = constraints.collect();
assert_eq!(
sha256d_satisfied.unwrap(),
vec![SatisfiedConstraint::HashLock {
hash: HashLockType::Hash256(&sha256d_hash_rev),
preimage: &preimage,
}]
);
//Check hash160
let stack = Stack(vec![StackElement::Push(&preimage)]);
let constraints = from_stack(&vfyfn, stack, &hash160);
let hash160_satisfied: Result<Vec<SatisfiedConstraint>, Error> = constraints.collect();
assert_eq!(
hash160_satisfied.unwrap(),
vec![SatisfiedConstraint::HashLock {
hash: HashLockType::Hash160(&hash160_hash),
preimage: &preimage,
}]
);
//Check ripemd160
let stack = Stack(vec![StackElement::Push(&preimage)]);
let constraints = from_stack(&vfyfn, stack, &ripemd160);
let ripemd160_satisfied: Result<Vec<SatisfiedConstraint>, Error> = constraints.collect();
assert_eq!(
ripemd160_satisfied.unwrap(),
vec![SatisfiedConstraint::HashLock {
hash: HashLockType::Ripemd160(&ripemd160_hash),
preimage: &preimage
}]
);
//Check AndV
let pk_bytes = pks[1].to_public_key().to_bytes();
let stack = Stack(vec![
StackElement::Push(&der_sigs[1]),
StackElement::Push(&pk_bytes),
StackElement::Push(&der_sigs[0]),
]);
let elem = ms_str!(
"and_v(vc:pk_k({}),c:pk_h({}))",
pks[0],
pks[1].to_pubkeyhash()
);
let constraints = from_stack(&vfyfn, stack, &elem);
let and_v_satisfied: Result<Vec<SatisfiedConstraint>, Error> = constraints.collect();
assert_eq!(
and_v_satisfied.unwrap(),
vec![
SatisfiedConstraint::PublicKey {
key: &pks[0],
sig: secp_sigs[0].clone(),
},
SatisfiedConstraint::PublicKeyHash {
keyhash: &pks[1].to_pubkeyhash(),
key: pks[1].clone(),
sig: secp_sigs[1].clone(),
}
]
);
//Check AndB
let stack = Stack(vec![
StackElement::Push(&preimage),
StackElement::Push(&der_sigs[0]),
]);
let elem = ms_str!("and_b(c:pk_k({}),sjtv:sha256({}))", pks[0], sha256_hash);
let constraints = from_stack(&vfyfn, stack, &elem);
let and_b_satisfied: Result<Vec<SatisfiedConstraint>, Error> = constraints.collect();
assert_eq!(
and_b_satisfied.unwrap(),
vec![
SatisfiedConstraint::PublicKey {
key: &pks[0],
sig: secp_sigs[0].clone(),
},
SatisfiedConstraint::HashLock {
hash: HashLockType::Sha256(&sha256_hash),
preimage: &preimage,
}
]
);
//Check AndOr
let stack = Stack(vec![
StackElement::Push(&preimage),
StackElement::Push(&der_sigs[0]),
]);
let elem = ms_str!(
"andor(c:pk_k({}),jtv:sha256({}),c:pk_h({}))",
pks[0],
sha256_hash,
pks[1].to_pubkeyhash(),
);
let constraints = from_stack(&vfyfn, stack, &elem);
let and_or_satisfied: Result<Vec<SatisfiedConstraint>, Error> = constraints.collect();
assert_eq!(
and_or_satisfied.unwrap(),
vec![
SatisfiedConstraint::PublicKey {
key: &pks[0],
sig: secp_sigs[0].clone(),
},
SatisfiedConstraint::HashLock {
hash: HashLockType::Sha256(&sha256_hash),
preimage: &preimage,
}
]
);
//AndOr second satisfaction path
let pk_bytes = pks[1].to_public_key().to_bytes();
let stack = Stack(vec![
StackElement::Push(&der_sigs[1]),
StackElement::Push(&pk_bytes),
StackElement::Dissatisfied,
]);
let constraints = from_stack(&vfyfn, stack, &elem);
let and_or_satisfied: Result<Vec<SatisfiedConstraint>, Error> = constraints.collect();
assert_eq!(
and_or_satisfied.unwrap(),
vec![SatisfiedConstraint::PublicKeyHash {
keyhash: &pks[1].to_pubkeyhash(),
key: pks[1].clone(),
sig: secp_sigs[1].clone(),
}]
);
//Check OrB
let stack = Stack(vec![
StackElement::Push(&preimage),
StackElement::Dissatisfied,
]);
let elem = ms_str!("or_b(c:pk_k({}),sjtv:sha256({}))", pks[0], sha256_hash);
let constraints = from_stack(&vfyfn, stack, &elem);
let or_b_satisfied: Result<Vec<SatisfiedConstraint>, Error> = constraints.collect();
assert_eq!(
or_b_satisfied.unwrap(),
vec![SatisfiedConstraint::HashLock {
hash: HashLockType::Sha256(&sha256_hash),
preimage: &preimage,
}]
);
//Check OrD
let stack = Stack(vec![StackElement::Push(&der_sigs[0])]);
let elem = ms_str!("or_d(c:pk_k({}),jtv:sha256({}))", pks[0], sha256_hash);
let constraints = from_stack(&vfyfn, stack, &elem);
let or_d_satisfied: Result<Vec<SatisfiedConstraint>, Error> = constraints.collect();
assert_eq!(
or_d_satisfied.unwrap(),
vec![SatisfiedConstraint::PublicKey {
key: &pks[0],
sig: secp_sigs[0].clone(),
}]
);
//Check OrC
let stack = Stack(vec![
StackElement::Push(&der_sigs[0]),
StackElement::Dissatisfied,
]);
let elem = ms_str!("t:or_c(jtv:sha256({}),vc:pk_k({}))", sha256_hash, pks[0]);
let constraints = from_stack(&vfyfn, stack, &elem);
let or_c_satisfied: Result<Vec<SatisfiedConstraint>, Error> = constraints.collect();
assert_eq!(
or_c_satisfied.unwrap(),
vec![SatisfiedConstraint::PublicKey {
key: &pks[0],
sig: secp_sigs[0].clone(),
}]
);
//Check OrI
let stack = Stack(vec![
StackElement::Push(&der_sigs[0]),
StackElement::Dissatisfied,
]);
let elem = ms_str!("or_i(jtv:sha256({}),c:pk_k({}))", sha256_hash, pks[0]);
let constraints = from_stack(&vfyfn, stack, &elem);
let or_i_satisfied: Result<Vec<SatisfiedConstraint>, Error> = constraints.collect();
assert_eq!(
or_i_satisfied.unwrap(),
vec![SatisfiedConstraint::PublicKey {
key: &pks[0],
sig: secp_sigs[0].clone(),
}]
);
//Check Thres
let stack = Stack(vec![
StackElement::Push(&der_sigs[0]),
StackElement::Push(&der_sigs[1]),
StackElement::Push(&der_sigs[2]),
StackElement::Dissatisfied,
StackElement::Dissatisfied,
]);
let elem = ms_str!(
"thresh(3,c:pk_k({}),sc:pk_k({}),sc:pk_k({}),sc:pk_k({}),sc:pk_k({}))",
pks[4],
pks[3],
pks[2],
pks[1],
pks[0],
);
let constraints = from_stack(&vfyfn, stack, &elem);
let thresh_satisfied: Result<Vec<SatisfiedConstraint>, Error> = constraints.collect();
assert_eq!(
thresh_satisfied.unwrap(),
vec![
SatisfiedConstraint::PublicKey {
key: &pks[2],
sig: secp_sigs[2].clone(),
},
SatisfiedConstraint::PublicKey {
key: &pks[1],
sig: secp_sigs[1].clone(),
},
SatisfiedConstraint::PublicKey {
key: &pks[0],
sig: secp_sigs[0].clone(),
}
]
);
//Check ThresM
let stack = Stack(vec![
StackElement::Dissatisfied,
StackElement::Push(&der_sigs[2]),
StackElement::Push(&der_sigs[1]),
StackElement::Push(&der_sigs[0]),
]);
let elem = ms_str!(
"multi(3,{},{},{},{},{})",
pks[4],
pks[3],
pks[2],
pks[1],
pks[0],
);
let constraints = from_stack(&vfyfn, stack, &elem);
let multi_satisfied: Result<Vec<SatisfiedConstraint>, Error> = constraints.collect();
assert_eq!(
multi_satisfied.unwrap(),
vec![
SatisfiedConstraint::PublicKey {
key: &pks[0],
sig: secp_sigs[0].clone(),
},
SatisfiedConstraint::PublicKey {
key: &pks[1],
sig: secp_sigs[1].clone(),
},
SatisfiedConstraint::PublicKey {
key: &pks[2],
sig: secp_sigs[2].clone(),
},
]
);
//Error ThresM: Invalid order of sigs
let stack = Stack(vec![
StackElement::Dissatisfied,
StackElement::Push(&der_sigs[0]),
StackElement::Push(&der_sigs[2]),
StackElement::Push(&der_sigs[1]),
]);
let elem = ms_str!(
"multi(3,{},{},{},{},{})",
pks[4],
pks[3],
pks[2],
pks[1],
pks[0],
);
let constraints = from_stack(&vfyfn, stack, &elem);
let multi_error: Result<Vec<SatisfiedConstraint>, Error> = constraints.collect();
assert!(multi_error.is_err());
}
}
| 40.628571 | 100 | 0.506509 |
79c907142708d2e96da4094ffe77ef73acabc67c | 498 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main() {
let i = ~100;
} | 38.307692 | 68 | 0.7249 |
87e0fc1f1b631725e7bd5763698710f0272d3a0c | 4,983 | use std::ops::Range;
use crate::scanner::Scanner;
/// Scan for URLs starting from the trigger character ":", requires "://".
///
/// Based on RFC 3986.
pub struct UrlScanner {}
impl Scanner for UrlScanner {
fn scan(&self, s: &str, colon: usize) -> Option<Range<usize>> {
let after_slash_slash = colon + 3;
// Need at least one character for scheme, and one after '//'
if colon > 0 && after_slash_slash < s.len() && s[colon..].starts_with("://") {
if let Some(start) = self.find_start(&s[0..colon]) {
if let Some(end) = self.find_end(&s[after_slash_slash..]) {
let range = Range {
start,
end: after_slash_slash + end,
};
return Some(range);
}
}
}
None
}
}
impl UrlScanner {
// See "scheme" in RFC 3986
fn find_start(&self, s: &str) -> Option<usize> {
let mut first = None;
let mut digit = None;
for (i, c) in s.char_indices().rev() {
match c {
'a'..='z' | 'A'..='Z' => first = Some(i),
'0'..='9' => digit = Some(i),
// scheme special
'+' | '-' | '.' => {}
_ => {
break;
}
}
}
// We don't want to extract "abc://foo" out of "1abc://foo".
// ".abc://foo" and others are ok though, as they feel more like separators.
if let Some(first) = first {
if let Some(digit) = digit {
// Comparing the byte indices with `- 1` is ok as scheme must be ASCII
if first > 0 && first - 1 == digit {
return None;
}
}
}
first
}
fn find_end(&self, s: &str) -> Option<usize> {
let mut round = 0;
let mut square = 0;
let mut curly = 0;
let mut single_quote = false;
let mut previous_can_be_last = true;
let mut end = None;
for (i, c) in s.char_indices() {
let can_be_last = match c {
'\u{00}'..='\u{1F}' | ' ' | '\"' | '<' | '>' | '`' | '\u{7F}'..='\u{9F}' => {
// These can never be part of an URL, so stop now. See RFC 3986 and RFC 3987.
// Some characters are not in the above list, even they are not in "unreserved"
// or "reserved":
// '\\', '^', '{', '|', '}'
// The reason for this is that other link detectors also allow them. Also see
// below, we require the braces to be balanced.
break;
}
'?' | '!' | '.' | ',' | ':' | ';' | '*' => {
// These may be part of an URL but not at the end. It's not that the spec
// doesn't allow them, but they are frequently used in plain text as delimiters
// where they're not meant to be part of the URL.
false
}
'/' => {
// This may be part of an URL and at the end, but not if the previous character
// can't be the end of an URL
previous_can_be_last
}
'(' => {
round += 1;
false
}
')' => {
round -= 1;
if round < 0 {
// More closing than opening brackets, stop now
break;
}
true
}
'[' => {
// Allowed in IPv6 address host
square += 1;
false
}
']' => {
// Allowed in IPv6 address host
square -= 1;
if square < 0 {
// More closing than opening brackets, stop now
break;
}
true
}
'{' => {
curly += 1;
false
}
'}' => {
curly -= 1;
if curly < 0 {
// More closing than opening brackets, stop now
break;
}
true
}
'\'' => {
single_quote = !single_quote;
// A single quote can only be the end of an URL if there's an even number
!single_quote
}
_ => true,
};
if can_be_last {
end = Some(i + c.len_utf8());
}
previous_can_be_last = can_be_last;
}
end
}
}
| 34.604167 | 99 | 0.381296 |
2f225b403863cede788243d448958de894756682 | 1,293 | extern crate sandbox_execution_environment;
use sandbox_execution_environment::{ Setup };
use sp_core::{ traits::{ CallInWasm, MissingHostFunctions }};
#[test]
fn test_grandpa_api_grandpa_pending_change() {
let mut setup = Setup::new();
let result = setup.executor.call_in_wasm(
&setup.wasm_code_array,
None,
"GrandpaApi_grandpa_pending_change",
&[],
&mut setup.ext.ext(),
MissingHostFunctions::Allow).unwrap();
println!("{:?}", result);
assert_eq!(result, [0u8; 0]);
}
#[test]
fn test_grandpa_api_grandpa_forced_change() {
let mut setup = Setup::new();
let result = setup.executor.call_in_wasm(
&setup.wasm_code_array,
None,
"GrandpaApi_grandpa_forced_change",
&[],
&mut setup.ext.ext(),
MissingHostFunctions::Allow).unwrap();
println!("{:?}", result);
assert_eq!(result, [0u8; 0]);
}
#[test]
fn test_grandpa_api_grandpa_authorities() {
let mut setup = Setup::new();
let result = setup.executor.call_in_wasm(
&setup.wasm_code_array,
None,
"GrandpaApi_grandpa_authorities",
&[],
&mut setup.ext.ext(),
MissingHostFunctions::Allow).unwrap();
println!("{:?}", result);
assert_eq!(result, [0u8; 0]);
} | 28.108696 | 61 | 0.627224 |
e20ac380e3d266822adafca6bc217c855b1b830f | 1,456 | #![cfg_attr(not(feature = "std"), no_std)]
#![cfg_attr(feature = "docs", feature(external_doc))]
#![cfg_attr(feature = "docs", deny(missing_docs))]
#![cfg_attr(feature = "docs", doc(include = "../README.md"))]
#![cfg_attr(
feature = "docs",
doc(html_root_url = "https://docs.rs/bulletproofs/4.0.0")
)]
extern crate alloc;
#[macro_use]
extern crate serde_derive;
mod util;
#[cfg_attr(feature = "docs", doc(include = "../docs/notes-intro.md"))]
mod notes {
#[cfg_attr(feature = "docs", doc(include = "../docs/notes-ipp.md"))]
mod inner_product_proof {}
#[cfg_attr(feature = "docs", doc(include = "../docs/notes-rp.md"))]
mod range_proof {}
#[cfg_attr(feature = "docs", doc(include = "../docs/notes-r1cs.md"))]
mod r1cs_proof {}
}
mod errors;
mod generators;
mod inner_product_proof;
mod range_proof;
mod transcript;
// re-export crates that are used in our public API.
pub use blstrs;
pub use group;
pub use merlin;
pub use rand;
pub use crate::errors::ProofError;
pub use crate::generators::{BulletproofGens, BulletproofGensShare, PedersenGens};
pub use crate::range_proof::RangeProof;
#[cfg_attr(feature = "docs", doc(include = "../docs/aggregation-api.md"))]
pub mod range_proof_mpc {
pub use crate::errors::MPCError;
pub use crate::range_proof::dealer;
pub use crate::range_proof::messages;
pub use crate::range_proof::party;
}
#[cfg(feature = "yoloproofs")]
#[cfg(feature = "std")]
pub mod r1cs;
| 26.962963 | 81 | 0.679258 |
b9b3644121da59cee7a94725ce66eb4bb964f081 | 4,657 | //! Components for GPIO pins.
//!
//!
//! Usage
//! -----
//! ```rust
//! let gpio = components::gpio::GpioComponent::new(
//! board_kernel,
//! components::gpio_component_helper!(
//! nrf52840::gpio::GPIOPin,
//! // left side of the USB plug
//! 0 => &nrf52840::gpio::PORT[Pin::P0_13],
//! 1 => &nrf52840::gpio::PORT[Pin::P0_15],
//! 2 => &nrf52840::gpio::PORT[Pin::P0_17],
//! 3 => &nrf52840::gpio::PORT[Pin::P0_20],
//! 4 => &nrf52840::gpio::PORT[Pin::P0_22],
//! 5 => &nrf52840::gpio::PORT[Pin::P0_24],
//! 6 => &nrf52840::gpio::PORT[Pin::P1_00],
//! 7 => &nrf52840::gpio::PORT[Pin::P0_09],
//! 8 => &nrf52840::gpio::PORT[Pin::P0_10],
//! // right side of the USB plug
//! 9 => &nrf52840::gpio::PORT[Pin::P0_31],
//! 10 => &nrf52840::gpio::PORT[Pin::P0_29],
//! 11 => &nrf52840::gpio::PORT[Pin::P0_02],
//! 12 => &nrf52840::gpio::PORT[Pin::P1_15],
//! 13 => &nrf52840::gpio::PORT[Pin::P1_13],
//! 14 => &nrf52840::gpio::PORT[Pin::P1_10],
//! // Below the PCB
//! 15 => &nrf52840::gpio::PORT[Pin::P0_26],
//! 16 => &nrf52840::gpio::PORT[Pin::P0_04],
//! 17 => &nrf52840::gpio::PORT[Pin::P0_11],
//! 18 => &nrf52840::gpio::PORT[Pin::P0_14],
//! 19 => &nrf52840::gpio::PORT[Pin::P1_11],
//! 20 => &nrf52840::gpio::PORT[Pin::P1_07],
//! 21 => &nrf52840::gpio::PORT[Pin::P1_01],
//! 22 => &nrf52840::gpio::PORT[Pin::P1_04],
//! 23 => &nrf52840::gpio::PORT[Pin::P1_02]
//! ),
//! ).finalize(components::gpio_component_buf!(nrf52840::gpio::GPIOPin));
//! ```
use capsules::gpio::GPIO;
use core::mem::MaybeUninit;
use kernel::capabilities;
use kernel::component::Component;
use kernel::create_capability;
use kernel::hil::gpio;
use kernel::hil::gpio::InterruptWithValue;
use kernel::static_init_half;
#[macro_export]
macro_rules! gpio_component_helper_max_pin {
() => { 0usize };
($a:expr, $b:expr, $($tail:expr),* $(,)?) => { $crate::gpio_component_helper_max_pin! (max ($a, $b), $($tail,)*) };
($a:expr $(,)?) => { $a };
}
#[macro_export]
/// Pins are declared using the following format:
/// number => pin
///
/// Any pin numbers that are skipped will be declared as None
/// and using them from user space will return NODEVICE
macro_rules! gpio_component_helper {
(
$Pin:ty,
$($nr:literal => $pin:expr),* $(,)?
) => {{
use kernel::count_expressions;
use kernel::hil::gpio::InterruptValueWrapper;
use kernel::static_init;
const fn max (a: usize, b: usize) -> usize {
[a, b][(a < b) as usize]
}
const NUM_PINS: usize = $crate::gpio_component_helper_max_pin! ($($nr,)*) + 1;
let mut pins = static_init!(
[Option<&'static InterruptValueWrapper<'static, $Pin>>; NUM_PINS],
[None; NUM_PINS]
);
$(
pins[$nr] = Some(static_init!(InterruptValueWrapper<$Pin>, InterruptValueWrapper::new($pin)).finalize());
)*
pins
};};
}
#[macro_export]
macro_rules! gpio_component_buf {
($Pin:ty $(,)?) => {{
use capsules::gpio::GPIO;
use core::mem::MaybeUninit;
static mut BUF: MaybeUninit<GPIO<'static, $Pin>> = MaybeUninit::uninit();
&mut BUF
};};
}
pub struct GpioComponent<IP: 'static + gpio::InterruptPin<'static>> {
board_kernel: &'static kernel::Kernel,
gpio_pins: &'static [Option<&'static gpio::InterruptValueWrapper<'static, IP>>],
}
impl<IP: 'static + gpio::InterruptPin<'static>> GpioComponent<IP> {
pub fn new(
board_kernel: &'static kernel::Kernel,
gpio_pins: &'static [Option<&'static gpio::InterruptValueWrapper<'static, IP>>],
) -> Self {
Self {
board_kernel: board_kernel,
gpio_pins,
}
}
}
impl<IP: 'static + gpio::InterruptPin<'static>> Component for GpioComponent<IP> {
type StaticInput = &'static mut MaybeUninit<GPIO<'static, IP>>;
type Output = &'static GPIO<'static, IP>;
unsafe fn finalize(self, static_buffer: Self::StaticInput) -> Self::Output {
let grant_cap = create_capability!(capabilities::MemoryAllocationCapability);
let gpio = static_init_half!(
static_buffer,
GPIO<'static, IP>,
GPIO::new(self.gpio_pins, self.board_kernel.create_grant(&grant_cap))
);
for maybe_pin in self.gpio_pins.iter() {
if let Some(pin) = maybe_pin {
pin.set_client(gpio);
}
}
gpio
}
}
| 33.503597 | 119 | 0.563238 |
2fa637f87a75085406e3e4ea611f14ee115b893c | 250 | // Advent of Code 2020: December, 24
// Day 24: Lobby Layout
use crate::manage_input::get_hexa_tiles;
pub fn answers_day24() -> (u64, u64) {
let filepath: &str = "inputs/day24_input.txt";
let tiles = get_hexa_tiles(filepath);
(0, 0)
}
| 20.833333 | 50 | 0.664 |
1d66e5f25efa66cba5363df0e268cf9d91d5b6ea | 9,880 | //! Leaf nodes in trees.
use std::borrow::{Borrow, BorrowMut};
use std::marker::PhantomData;
use std::convert::TryFrom;
use std::error;
use std::fmt;
use std::mem::{self, ManuallyDrop};
use std::ptr;
use thiserror::Error;
use hoard::primitive::Primitive;
use hoard::blob::{Blob, BlobDyn, Bytes, BytesUninit};
use hoard::load::{MaybeValid, Load, LoadRef};
use hoard::save::{Save, SavePoll, Saver};
use hoard::ptr::{AsZone, Zone, Get, GetMut, Ptr, PtrClean, PtrBlob};
use hoard::pointee::Pointee;
use hoard::owned::{IntoOwned, Take, RefOwn, Ref};
use hoard::bag::Bag;
use crate::commit::{
Commit,
HashCommit,
Digest,
sha256::Sha256Digest,
};
use super::raw;
/// Leaf node in a tree.
#[repr(transparent)]
pub struct Leaf<T, P: Ptr = (), D: Digest = Sha256Digest> {
raw: ManuallyDrop<raw::Node<T, P, D>>,
}
impl<T, P: Ptr, D: Digest> Drop for Leaf<T, P, D> {
fn drop(&mut self) {
unsafe {
self.raw.ptr.dealloc::<T>(())
}
}
}
impl<T: Commit, P: Ptr, D: Digest> Commit for Leaf<T, P, D> {
type Commitment = Leaf<T::Commitment, (), D>;
fn to_commitment(&self) -> Self::Commitment {
let digest = self.value_commit().digest();
let raw = raw::Node::new(Some(digest), ());
unsafe { Leaf::from_raw(raw) }
}
}
impl<T, P: Ptr, D: Digest> Leaf<T, P, D> {
pub fn new(value: T) -> Self
where P: Default,
{
Self::new_unchecked(None, P::alloc(value))
}
}
impl<T, P: Ptr, D: Digest> Leaf<T, P, D> {
pub fn new_unchecked(digest: Option<D>, bag: Bag<T, P>) -> Self {
let (ptr, ()) = bag.into_raw_parts();
let raw = raw::Node::new(digest, ptr);
unsafe {
Self::from_raw(raw)
}
}
pub unsafe fn from_raw(raw: raw::Node<T, P, D>) -> Self {
Self {
raw: ManuallyDrop::new(raw),
}
}
pub unsafe fn from_raw_node_ref(raw: &raw::Node<T, P, D>) -> &Self {
&*(raw as *const _ as *const _)
}
pub unsafe fn from_raw_node_mut(raw: &mut raw::Node<T, P, D>) -> &mut Self {
&mut *(raw as *mut _ as *mut _)
}
pub fn into_raw(self) -> raw::Node<T, P, D> {
let this = ManuallyDrop::new(self);
unsafe {
ptr::read(&*this.raw)
}
}
/// Returns a hash commit to the `T` value, re-hashing if necessary.
fn value_commit(&self) -> HashCommit<T::Commitment, D>
where T: Commit
{
self.try_value_commit()
.unwrap_or_else(|| self.calc_value_commit())
}
fn calc_value_commit(&self) -> HashCommit<T::Commitment, D>
where T: Commit
{
let value = self.try_get_dirty()
.ok().expect("digest missing yet leaf value clean");
let hash_commit = HashCommit::new(value);
self.raw.set_digest(hash_commit.digest());
hash_commit
}
/// Returns a hash commit to the `T` value, if available.
fn try_value_commit(&self) -> Option<HashCommit<T::Commitment, D>>
where T: Commit
{
self.raw.digest().map(HashCommit::from_digest)
}
}
impl<T, P: Ptr, D: Digest> Leaf<T, P, D>
where T: Load,
P::Zone: AsZone<T::Zone>,
{
pub fn get(&self) -> Ref<T>
where P: Get
{
unsafe {
self.raw.get::<T>(T::sized_metadata())
.trust()
}
}
pub fn get_mut(&mut self) -> &mut T
where P: GetMut
{
unsafe {
self.raw.get_mut::<T>(T::sized_metadata())
.trust()
}
}
pub fn take(self) -> T
where P: Get
{
let raw = self.into_raw();
unsafe {
raw.take::<T>(T::sized_metadata())
.trust()
}
}
}
impl<T, P: Ptr, D: Digest> Leaf<T, P, D> {
pub fn try_get_dirty(&self) -> Result<&T, P::Clean> {
unsafe {
self.raw.try_get_dirty(())
.map(MaybeValid::trust)
}
}
}
impl<T, P: Ptr, D: Digest> fmt::Debug for Leaf<T, P, D>
where T: fmt::Debug, P: fmt::Debug, D: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Leaf")
.field("digest", &self.raw.digest())
.field("ptr", &self.try_get_dirty().map_err(P::from_clean))
.finish()
}
}
/*
impl<T, P: Ptr> Commit for Leaf<T, P>
where T: Commit
{
const VERBATIM_LEN: usize = Digest::<!>::LEN;
type Committed = T::Committed;
fn encode_verbatim(&self, dst: &mut impl WriteVerbatim) {
dst.write(&self.digest().as_bytes())
}
}
*/
// ---- hoard impls ------
#[derive(Debug, Error)]
#[error("FIXME")]
#[doc(hidden)]
pub struct DecodeLeafBytesError<Raw: error::Error>(Raw);
impl<T, P: PtrBlob, D: Digest> Blob for Leaf<T, P, D>
where T: Blob,
{
const SIZE: usize = <raw::Node<T, P, D> as Blob>::SIZE;
type DecodeBytesError = DecodeLeafBytesError<<raw::Node<T, P, D> as Blob>::DecodeBytesError>;
fn encode_bytes<'a>(&self, dst: BytesUninit<'a, Self>) -> Bytes<'a, Self> {
dst.write_struct()
.write_field(&*self.raw)
.done()
}
fn decode_bytes(blob: Bytes<'_, Self>) -> Result<MaybeValid<Self>, Self::DecodeBytesError> {
let mut fields = blob.struct_fields();
let raw = fields.trust_field().map_err(DecodeLeafBytesError)?;
fields.assert_done();
let this = unsafe { Self::from_raw(raw) };
Ok(this.into())
}
}
impl<T, P: Ptr, D: Digest> Load for Leaf<T, P, D>
where T: Load,
{
type Blob = Leaf<T::Blob, P::Blob, D>;
type Ptr = P;
type Zone = P::Zone;
fn load(blob: Self::Blob, zone: &Self::Zone) -> Self {
let raw = raw::Node::load(blob.into_raw(), zone);
unsafe {
Self::from_raw(raw)
}
}
}
// ----- save impls ---------
impl<Q: PtrBlob, T: Save<Q>, P: Ptr, D: Digest> Save<Q> for Leaf<T, P, D>
where T: Commit + Save<Q>,
P::Zone: AsZone<T::Zone>,
P::Clean: From<<T::Ptr as Ptr>::Clean>,
{
type DstBlob = Leaf<T::DstBlob, Q, D>;
type SavePoll = LeafSavePoll<Q, T, P, D>;
fn init_save(&self) -> Self::SavePoll {
LeafSavePoll {
marker: PhantomData,
digest: self.value_commit().digest(),
state: match self.try_get_dirty() {
Ok(dirty) => State::Dirty(dirty.init_save()),
Err(p_clean) => State::Clean(p_clean),
}
}
}
}
#[doc(hidden)]
pub struct LeafSavePoll<Q: PtrBlob, T: Save<Q>, P: Ptr, D: Digest> {
marker: PhantomData<fn(T)>,
digest: D,
state: State<Q, T, P>,
}
#[derive(Debug)]
enum State<Q: PtrBlob, T: Save<Q>, P: Ptr> {
Clean(P::Clean),
Dirty(T::SavePoll),
Done(Q),
}
impl<Q: PtrBlob, T: Save<Q>, P: Ptr, D: Digest> LeafSavePoll<Q, T, P, D> {
pub(crate) fn encode_raw_node_blob(&self) -> raw::Node<T::DstBlob, Q, D> {
match self.state {
State::Done(q_ptr) => raw::Node::new(Some(self.digest), q_ptr),
State::Dirty(_) | State::Clean(_) => panic!(),
}
}
}
impl<Q: PtrBlob, T: Save<Q>, P: Ptr, D: Digest> SavePoll for LeafSavePoll<Q, T, P, D>
where P::Zone: AsZone<T::Zone>,
P::Clean: From<<T::Ptr as Ptr>::Clean>,
{
type SrcPtr = P::Clean;
type DstPtr = Q;
type DstBlob = Leaf<T::DstBlob, Q, D>;
fn save_poll<S>(&mut self, saver: &mut S) -> Result<(), S::Error>
where S: Saver<SrcPtr = Self::SrcPtr, DstPtr = Self::DstPtr>
{
loop {
self.state = match &mut self.state {
State::Clean(p_clean) => {
match saver.save_ptr::<T>(*p_clean, ())? {
Ok(q_ptr) => State::Done(q_ptr),
Err(target_poll) => State::Dirty(target_poll),
}
},
State::Dirty(target_poll) => {
State::Done(saver.poll_ref(target_poll)?)
},
State::Done(_) => break Ok(()),
}
}
}
fn encode_blob(&self) -> Self::DstBlob {
let raw = self.encode_raw_node_blob();
unsafe { Leaf::from_raw(raw) }
}
}
#[cfg(test)]
mod tests {
use super::*;
use hoard::{
ptr::{
Heap,
key::{
Map,
offset::OffsetSaver,
},
},
};
#[test]
fn save() {
let n = 42u8;
let leaf = Leaf::<u8, Heap>::new(n);
let saver = OffsetSaver::new(&[][..]);
let (offset, buf) = saver.try_save(&leaf).unwrap();
assert_eq!(offset, 1);
assert_eq!(buf, vec![
42,
42, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0
]);
}
#[test]
fn value_commit() {
let n = 42u8;
let n_hash_commit = HashCommit::new(&n);
let mut leaf_n = Leaf::<u8, Heap>::new(n);
assert!(leaf_n.try_value_commit().is_none());
assert_eq!(leaf_n.value_commit(), n_hash_commit);
assert_eq!(leaf_n.try_value_commit(), Some(n_hash_commit));
assert_eq!(leaf_n.value_commit(), n_hash_commit);
// Make sure the cached commitment is cleared on write
let _ = leaf_n.get_mut();
assert!(leaf_n.try_value_commit().is_none());
// ...and recalculated properly...
*leaf_n.get_mut() = 43;
assert!(leaf_n.try_value_commit().is_none());
assert_eq!(leaf_n.value_commit(), HashCommit::new(&43u8));
}
#[test]
fn to_commitment() {
let n = 42u8;
let n_hash_commit = HashCommit::<u8>::new(&n);
let leaf_n = Leaf::<u8, Heap>::new(n);
let leaf_hash_commit = HashCommit::<Leaf<u8>>::new(&leaf_n);
assert_eq!(n_hash_commit.digest(), leaf_hash_commit.digest())
}
}
| 26.702703 | 108 | 0.531781 |
fff7d1df907ae716b2f0e4e48ac2f3168cbe5e2a | 1,797 | use solana_client::thin_client::ThinClient;
use solana_core::validator::Validator;
use solana_core::validator::ValidatorConfig;
use solana_gossip::{cluster_info::Node, contact_info::ContactInfo};
use solana_sdk::pubkey::Pubkey;
use solana_sdk::signature::Keypair;
use std::path::PathBuf;
use std::sync::Arc;
pub struct ValidatorInfo {
pub keypair: Arc<Keypair>,
pub voting_keypair: Arc<Keypair>,
pub ledger_path: PathBuf,
pub contact_info: ContactInfo,
}
pub struct ClusterValidatorInfo {
pub info: ValidatorInfo,
pub config: ValidatorConfig,
pub validator: Option<Validator>,
}
impl ClusterValidatorInfo {
pub fn new(
validator_info: ValidatorInfo,
config: ValidatorConfig,
validator: Validator,
) -> Self {
Self {
info: validator_info,
config,
validator: Some(validator),
}
}
}
pub trait Cluster {
fn get_node_pubkeys(&self) -> Vec<Pubkey>;
fn get_validator_client(&self, pubkey: &Pubkey) -> Option<ThinClient>;
fn get_contact_info(&self, pubkey: &Pubkey) -> Option<&ContactInfo>;
fn exit_node(&mut self, pubkey: &Pubkey) -> ClusterValidatorInfo;
fn restart_node(&mut self, pubkey: &Pubkey, cluster_validator_info: ClusterValidatorInfo);
fn create_restart_context(
&mut self,
pubkey: &Pubkey,
cluster_validator_info: &mut ClusterValidatorInfo,
) -> (Node, Option<ContactInfo>);
fn restart_node_with_context(
cluster_validator_info: ClusterValidatorInfo,
restart_context: (Node, Option<ContactInfo>),
) -> ClusterValidatorInfo;
fn add_node(&mut self, pubkey: &Pubkey, cluster_validator_info: ClusterValidatorInfo);
fn exit_restart_node(&mut self, pubkey: &Pubkey, config: ValidatorConfig);
}
| 32.672727 | 94 | 0.69783 |
292e6c787c893213ef3eaf7fb51357824247e437 | 10,834 | use core::iter;
use curve25519_dalek::ristretto::{CompressedRistretto, RistrettoPoint};
use curve25519_dalek::traits::{IsIdentity, VartimeMultiscalarMul};
use curve25519_dalek::scalar::Scalar;
use merlin::Transcript;
use crate::elgamal::elgamal::{ElGamalRand, ElGamalCT, ElGamalPK, ElGamalSK};
use crate::elgamal::pedersen::{PedersenGens, PedersenOpen, PedersenComm};
use crate::transcript::TranscriptProtocol;
use crate::ProofError;
use rand_core::{CryptoRng, RngCore};
#[allow(non_snake_case)]
struct InCTValidityProof {
Y_eg_0: CompressedRistretto,
Y_eg_1: CompressedRistretto,
Y_p: CompressedRistretto,
z_x: Scalar,
z_eg: Scalar,
z_p: Scalar,
}
#[allow(non_snake_case)]
impl InCTValidityProof {
pub fn prove<T: RngCore + CryptoRng>(
x: u32,
eg_pk: ElGamalPK,
eg_rand: ElGamalRand,
ped_gens: PedersenGens,
ped_open: PedersenOpen,
transcript: &mut Transcript,
rng: &mut T,
) -> Self {
transcript.in_ct_validity_domain_sep();
let G = ped_gens.G;
let H_eg = eg_pk.0;
let H_p = ped_gens.H;
let x = Scalar::from(x);
let r_eg = eg_rand.0;
let r_p = ped_open.0;
let y_x = Scalar::random(rng);
let y_eg = Scalar::random(rng);
let y_p = Scalar::random(rng);
let Y_eg_0 = (y_x * G + y_eg * H_eg).compress();
let Y_eg_1 = (y_eg * G).compress();
let Y_p = (y_x * G + y_p * H_p).compress();
transcript.append_point(b"Y_eg_0", &Y_eg_0);
transcript.append_point(b"Y_eg_1", &Y_eg_1);
transcript.append_point(b"Y_p", &Y_p);
let c = transcript.challenge_scalar(b"c");
let z_x = c * x + y_x;
let z_eg = c * r_eg + y_eg;
let z_p = c * r_p + y_p;
InCTValidityProof {
Y_eg_0,
Y_eg_1,
Y_p,
z_x,
z_eg,
z_p,
}
}
pub fn verify(
eg_pk: ElGamalPK,
eg_ct: ElGamalCT,
ped_gens: PedersenGens,
ped_comm: PedersenComm,
transcript: &mut Transcript,
proof: InCTValidityProof,
) -> Result<(), ProofError> {
transcript.in_ct_validity_domain_sep();
let G = ped_gens.G;
let H_eg = eg_pk.0;
let H_p = ped_gens.H;
let InCTValidityProof {
Y_eg_0,
Y_eg_1,
Y_p,
z_x,
z_eg,
z_p,
} = proof;
transcript.validate_and_append_point(b"Y_eg_0", &Y_eg_0)?;
transcript.validate_and_append_point(b"Y_eg_1", &Y_eg_1)?;
transcript.validate_and_append_point(b"Y_p", &Y_p)?;
let c = transcript.challenge_scalar(b"c");
let w = transcript.clone().challenge_scalar(b"w"); // can otpionally be randomized
let ww = w * w;
let mega_check = RistrettoPoint::optional_multiscalar_mul(
iter::once(z_x)
.chain(iter::once(z_eg))
.chain(iter::once(-c))
.chain(iter::once(-Scalar::one()))
.chain(iter::once(w * z_eg))
.chain(iter::once(-w * c))
.chain(iter::once(-w))
.chain(iter::once(ww * z_x))
.chain(iter::once(ww * z_p))
.chain(iter::once(-ww * c))
.chain(iter::once(-ww)),
iter::once(Some(G))
.chain(iter::once(Some(H_eg)))
.chain(iter::once(Some(eg_ct.C_0)))
.chain(iter::once(Y_eg_0.decompress()))
.chain(iter::once(Some(G)))
.chain(iter::once(Some(eg_ct.C_1)))
.chain(iter::once(Y_eg_1.decompress()))
.chain(iter::once(Some(G)))
.chain(iter::once(Some(H_p)))
.chain(iter::once(Some(ped_comm.0)))
.chain(iter::once(Y_p.decompress()))
)
.ok_or_else(|| ProofError::VerificationError)?;
if mega_check.is_identity() {
Ok(())
} else {
Err(ProofError::VerificationError)
}
}
}
#[allow(non_snake_case)]
struct OutCTValidityProof {
Y_eg_0: CompressedRistretto,
Y_eg_1: CompressedRistretto,
Y_p: CompressedRistretto,
z_sk: Scalar,
z_x: Scalar,
z_p: Scalar,
}
#[allow(non_snake_case)]
impl OutCTValidityProof {
pub fn prove<T: RngCore + CryptoRng>(
x: u64,
eg_ct: ElGamalCT,
eg_sk: ElGamalSK,
ped_gens: PedersenGens,
ped_open: PedersenOpen,
transcript: &mut Transcript,
rng: &mut T,
) -> Self {
transcript.out_ct_validity_domain_sep();
let G = ped_gens.G;
let H_p = ped_gens.H;
let C_1 = eg_ct.C_1;
let sk = eg_sk.0;
let x = Scalar::from(x);
let r_p = ped_open.0;
let y_sk = Scalar::random(rng);
let y_x = Scalar::random(rng);
let y_p = Scalar::random(rng);
let Y_eg_0 = (y_x * G + y_sk * C_1).compress();
let Y_eg_1 = (y_sk * G).compress();
let Y_p = (y_x * G + y_p * H_p).compress();
transcript.append_point(b"Y_eg_0", &Y_eg_0);
transcript.append_point(b"Y_eg_1", &Y_eg_1);
transcript.append_point(b"Y_p", &Y_p);
let c = transcript.challenge_scalar(b"c");
let z_sk = c * sk + y_sk;
let z_x = c * x + y_x;
let z_p = c * r_p + y_p;
OutCTValidityProof {
Y_eg_0,
Y_eg_1,
Y_p,
z_sk,
z_x,
z_p,
}
}
pub fn verify(
eg_pk: ElGamalPK,
eg_ct: ElGamalCT,
ped_gens: PedersenGens,
ped_comm: PedersenComm,
transcript: &mut Transcript,
proof: OutCTValidityProof,
) -> Result<(), ProofError> {
transcript.out_ct_validity_domain_sep();
let G = ped_gens.G;
let H_eg = eg_pk.0;
let H_p = ped_gens.H;
let C_0 = eg_ct.C_0;
let C_1 = eg_ct.C_1;
let OutCTValidityProof {
Y_eg_0,
Y_eg_1,
Y_p,
z_sk,
z_x,
z_p,
} = proof;
transcript.validate_and_append_point(b"Y_eg_0", &Y_eg_0)?;
transcript.validate_and_append_point(b"Y_eg_0", &Y_eg_1)?;
transcript.validate_and_append_point(b"Y_p", &Y_p)?;
let c = transcript.challenge_scalar(b"c");
let w = transcript.clone().challenge_scalar(b"w");
let ww = w * w;
let mega_check = RistrettoPoint::optional_multiscalar_mul(
iter::once(z_x)
.chain(iter::once(z_sk))
.chain(iter::once(-c))
.chain(iter::once(-Scalar::one()))
.chain(iter::once(w * z_sk))
.chain(iter::once(-w * c))
.chain(iter::once(-w))
.chain(iter::once(ww * z_x))
.chain(iter::once(ww * z_p))
.chain(iter::once(-ww * c))
.chain(iter::once(-ww)),
iter::once(Some(G))
.chain(iter::once(Some(C_1)))
.chain(iter::once(Some(C_0)))
.chain(iter::once(Y_eg_1.decompress()))
.chain(iter::once(Some(G)))
.chain(iter::once(Some(H_eg)))
.chain(iter::once(Some(G)))
.chain(iter::once(Some(G)))
.chain(iter::once(Some(H_p)))
.chain(iter::once(Some(ped_comm.0)))
.chain(iter::once(Y_p.decompress()))
)
.ok_or_else(|| ProofError::VerificationError)?;
if mega_check.is_identity() {
Ok(())
} else {
Err(ProofError::VerificationError)
}
}
}
#[allow(non_snake_case)]
struct NetZeroProof {
Y_0: CompressedRistretto,
Y_1: CompressedRistretto,
z_x: Scalar,
z_0: Scalar,
z_1: Scalar,
}
#[allow(non_snake_case)]
impl NetZeroProof {
pub fn prove<T: RngCore + CryptoRng>(
x: u64,
eg_pk_0: ElGamalPK,
eg_pk_1: ElGamalPK,
eg_rand_0: ElGamalRand,
eg_rand_1: ElGamalRand,
transcript: &mut Transcript,
rng: &mut T,
) -> NetZeroProof {
transcript.net_zero_domain_sep();
let G = curve25519_dalek::constants::RISTRETTO_BASEPOINT_POINT;
let H_0 = eg_pk_0.0;
let H_1 = eg_pk_1.0;
let x = Scalar::from(x);
let r_0 = eg_rand_0.0;
let r_1 = eg_rand_1.0;
let y_x = Scalar::random(rng);
let y_0 = Scalar::random(rng);
let y_1 = Scalar::random(rng);
let Y_0 = (y_x * G + y_0 * H_0).compress();
let Y_1 = (y_x * G + y_1 * H_1).compress();
transcript.append_point(b"Y_0", &Y_0);
transcript.append_point(b"Y_1", &Y_1);
let c = transcript.challenge_scalar(b"c");
let z_x = c * x + y_x;
let z_0 = c * x + y_0;
let z_1 = c * x + y_1;
NetZeroProof {
Y_0,
Y_1,
z_x,
z_0,
z_1,
}
}
pub fn verify(
eg_pk_0: ElGamalPK,
eg_pk_1: ElGamalPK,
eg_ct_0: ElGamalCT,
eg_ct_1: ElGamalCT,
transcript: &mut Transcript,
proof: NetZeroProof,
) -> Result<(), ProofError> {
transcript.net_zero_domain_sep();
let G = curve25519_dalek::constants::RISTRETTO_BASEPOINT_POINT;
let C_0 = eg_ct_0.C_0;
let C_1 = eg_ct_1.C_0;
let NetZeroProof {
Y_0,
Y_1,
z_x,
z_0,
z_1,
} = proof;
transcript.validate_and_append_point(b"Y_0", &Y_0)?;
transcript.validate_and_append_point(b"Y_1", &Y_1)?;
let c = transcript.challenge_scalar(b"c");
let w = transcript.clone().challenge_scalar(b"w");
let mega_check = RistrettoPoint::optional_multiscalar_mul(
iter::once(z_x)
.chain(iter::once(z_0))
.chain(iter::once(-c))
.chain(iter::once(-Scalar::one()))
.chain(iter::once(-w * z_x))
.chain(iter::once(-w * z_0))
.chain(iter::once(-w * c))
.chain(iter::once(-w)),
iter::once(Some(G))
.chain(iter::once(Some(eg_pk_0.0)))
.chain(iter::once(Some(C_0)))
.chain(iter::once(Y_0.decompress()))
.chain(iter::once(Some(G)))
.chain(iter::once(Some(eg_pk_1.0)))
.chain(iter::once(Some(C_1)))
.chain(iter::once(Y_1.decompress()))
)
.ok_or_else(|| ProofError::VerificationError)?;
if mega_check.is_identity() {
Ok(())
} else {
Err(ProofError::VerificationError)
}
}
}
| 28.890667 | 90 | 0.525106 |
714cb1f413bfa30f8d51f769ac2c8efce98d1013 | 2,998 | #[cfg(test)]
mod test {
mod new {
use crate::grammar::context_free_grammar::ContextFreeGrammar;
use crate::grammar::context_free_grammar_production::ContextFreeGrammarProduction;
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
enum TerminalTokenTypeTest {
Eof,
Id,
}
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
enum SyntaxTokenTest {
Epsilon,
Module,
Terminal(TerminalTokenTypeTest),
}
#[test]
fn it_creates_a_new_instance() -> () {
let grammar_productions: Vec<ContextFreeGrammarProduction<SyntaxTokenTest>> =
vec![ContextFreeGrammarProduction::new(
SyntaxTokenTest::Module,
vec![
SyntaxTokenTest::Terminal(TerminalTokenTypeTest::Id),
SyntaxTokenTest::Terminal(TerminalTokenTypeTest::Eof),
],
)];
ContextFreeGrammar::new(
SyntaxTokenTest::Epsilon,
SyntaxTokenTest::Module,
grammar_productions,
);
}
#[test]
#[should_panic]
fn it_panics_if_production_input_is_epsilon() {
let grammar_productions: Vec<ContextFreeGrammarProduction<SyntaxTokenTest>> =
vec![ContextFreeGrammarProduction::new(
SyntaxTokenTest::Epsilon,
vec![SyntaxTokenTest::Terminal(TerminalTokenTypeTest::Id)],
)];
ContextFreeGrammar::new(
SyntaxTokenTest::Epsilon,
SyntaxTokenTest::Module,
grammar_productions,
);
}
#[test]
#[should_panic]
fn it_panics_if_production_output_has_no_symbols() -> () {
let grammar_productions: Vec<ContextFreeGrammarProduction<SyntaxTokenTest>> =
vec![ContextFreeGrammarProduction::new(
SyntaxTokenTest::Module,
vec![],
)];
ContextFreeGrammar::new(
SyntaxTokenTest::Epsilon,
SyntaxTokenTest::Module,
grammar_productions,
);
}
#[test]
#[should_panic]
fn it_panics_if_production_output_has_epsilon_and_any_other_symbol() -> () {
let grammar_productions: Vec<ContextFreeGrammarProduction<SyntaxTokenTest>> =
vec![ContextFreeGrammarProduction::new(
SyntaxTokenTest::Module,
vec![
SyntaxTokenTest::Terminal(TerminalTokenTypeTest::Id),
SyntaxTokenTest::Epsilon,
],
)];
ContextFreeGrammar::new(
SyntaxTokenTest::Epsilon,
SyntaxTokenTest::Module,
grammar_productions,
);
}
}
}
| 32.945055 | 90 | 0.528686 |
bfad45a97358a8a67155522e70c5dc32dce32d03 | 38,988 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! Benchmark derived from TPC-H. This is not an official TPC-H benchmark.
use std::{
fs,
iter::Iterator,
path::{Path, PathBuf},
sync::Arc,
time::Instant,
};
use ballista::context::BallistaContext;
use ballista::prelude::{BallistaConfig, BALLISTA_DEFAULT_SHUFFLE_PARTITIONS};
use datafusion::arrow::datatypes::{DataType, Field, Schema};
use datafusion::arrow::record_batch::RecordBatch;
use datafusion::arrow::util::pretty;
use datafusion::datasource::parquet::ParquetTable;
use datafusion::datasource::{CsvFile, MemTable, TableProvider};
use datafusion::error::{DataFusionError, Result};
use datafusion::logical_plan::LogicalPlan;
use datafusion::parquet::basic::Compression;
use datafusion::parquet::file::properties::WriterProperties;
use datafusion::physical_plan::display::DisplayableExecutionPlan;
use datafusion::physical_plan::{collect, displayable};
use datafusion::prelude::*;
use structopt::StructOpt;
#[cfg(feature = "snmalloc")]
#[global_allocator]
static ALLOC: snmalloc_rs::SnMalloc = snmalloc_rs::SnMalloc;
#[cfg(feature = "mimalloc")]
#[global_allocator]
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
#[derive(Debug, StructOpt, Clone)]
struct BallistaBenchmarkOpt {
/// Query number
#[structopt(short, long)]
query: usize,
/// Activate debug mode to see query results
#[structopt(short, long)]
debug: bool,
/// Number of iterations of each test run
#[structopt(short = "i", long = "iterations", default_value = "3")]
iterations: usize,
/// Batch size when reading CSV or Parquet files
#[structopt(short = "s", long = "batch-size", default_value = "8192")]
batch_size: usize,
/// Path to data files
#[structopt(parse(from_os_str), required = true, short = "p", long = "path")]
path: PathBuf,
/// File format: `csv` or `parquet`
#[structopt(short = "f", long = "format", default_value = "csv")]
file_format: String,
/// Load the data into a MemTable before executing the query
#[structopt(short = "m", long = "mem-table")]
mem_table: bool,
/// Number of partitions to process in parallel
#[structopt(short = "p", long = "partitions", default_value = "2")]
partitions: usize,
/// Ballista executor host
#[structopt(long = "host")]
host: Option<String>,
/// Ballista executor port
#[structopt(long = "port")]
port: Option<u16>,
}
#[derive(Debug, StructOpt, Clone)]
struct DataFusionBenchmarkOpt {
/// Query number
#[structopt(short, long)]
query: usize,
/// Activate debug mode to see query results
#[structopt(short, long)]
debug: bool,
/// Number of iterations of each test run
#[structopt(short = "i", long = "iterations", default_value = "3")]
iterations: usize,
/// Number of partitions to process in parallel
#[structopt(short = "p", long = "partitions", default_value = "2")]
partitions: usize,
/// Batch size when reading CSV or Parquet files
#[structopt(short = "s", long = "batch-size", default_value = "8192")]
batch_size: usize,
/// Path to data files
#[structopt(parse(from_os_str), required = true, short = "p", long = "path")]
path: PathBuf,
/// File format: `csv` or `parquet`
#[structopt(short = "f", long = "format", default_value = "csv")]
file_format: String,
/// Load the data into a MemTable before executing the query
#[structopt(short = "m", long = "mem-table")]
mem_table: bool,
}
#[derive(Debug, StructOpt)]
struct ConvertOpt {
/// Path to csv files
#[structopt(parse(from_os_str), required = true, short = "i", long = "input")]
input_path: PathBuf,
/// Output path
#[structopt(parse(from_os_str), required = true, short = "o", long = "output")]
output_path: PathBuf,
/// Output file format: `csv` or `parquet`
#[structopt(short = "f", long = "format")]
file_format: String,
/// Compression to use when writing Parquet files
#[structopt(short = "c", long = "compression", default_value = "zstd")]
compression: String,
/// Number of partitions to produce
#[structopt(short = "p", long = "partitions", default_value = "1")]
partitions: usize,
/// Batch size when reading CSV or Parquet files
#[structopt(short = "s", long = "batch-size", default_value = "8192")]
batch_size: usize,
}
#[derive(Debug, StructOpt)]
#[structopt(about = "benchmark command")]
enum BenchmarkSubCommandOpt {
#[structopt(name = "ballista")]
BallistaBenchmark(BallistaBenchmarkOpt),
#[structopt(name = "datafusion")]
DataFusionBenchmark(DataFusionBenchmarkOpt),
}
#[derive(Debug, StructOpt)]
#[structopt(name = "TPC-H", about = "TPC-H Benchmarks.")]
enum TpchOpt {
Benchmark(BenchmarkSubCommandOpt),
Convert(ConvertOpt),
}
const TABLES: &[&str] = &[
"part", "supplier", "partsupp", "customer", "orders", "lineitem", "nation", "region",
];
#[tokio::main]
async fn main() -> Result<()> {
use BenchmarkSubCommandOpt::*;
env_logger::init();
match TpchOpt::from_args() {
TpchOpt::Benchmark(BallistaBenchmark(opt)) => {
benchmark_ballista(opt).await.map(|_| ())
}
TpchOpt::Benchmark(DataFusionBenchmark(opt)) => {
benchmark_datafusion(opt).await.map(|_| ())
}
TpchOpt::Convert(opt) => convert_tbl(opt).await,
}
}
async fn benchmark_datafusion(opt: DataFusionBenchmarkOpt) -> Result<Vec<RecordBatch>> {
println!("Running benchmarks with the following options: {:?}", opt);
let config = ExecutionConfig::new()
.with_target_partitions(opt.partitions)
.with_batch_size(opt.batch_size);
let mut ctx = ExecutionContext::with_config(config);
// register tables
for table in TABLES {
let table_provider =
get_table(opt.path.to_str().unwrap(), table, opt.file_format.as_str())?;
if opt.mem_table {
println!("Loading table '{}' into memory", table);
let start = Instant::now();
let memtable = MemTable::load(
table_provider,
opt.batch_size,
Some(opt.partitions),
opt.partitions,
)
.await?;
println!(
"Loaded table '{}' into memory in {} ms",
table,
start.elapsed().as_millis()
);
ctx.register_table(*table, Arc::new(memtable))?;
} else {
ctx.register_table(*table, table_provider)?;
}
}
let mut millis = vec![];
// run benchmark
let mut result: Vec<RecordBatch> = Vec::with_capacity(1);
for i in 0..opt.iterations {
let start = Instant::now();
let plan = create_logical_plan(&mut ctx, opt.query)?;
result = execute_query(&mut ctx, &plan, opt.debug).await?;
let elapsed = start.elapsed().as_secs_f64() * 1000.0;
millis.push(elapsed as f64);
println!("Query {} iteration {} took {:.1} ms", opt.query, i, elapsed);
}
let avg = millis.iter().sum::<f64>() / millis.len() as f64;
println!("Query {} avg time: {:.2} ms", opt.query, avg);
Ok(result)
}
async fn benchmark_ballista(opt: BallistaBenchmarkOpt) -> Result<()> {
println!("Running benchmarks with the following options: {:?}", opt);
let config = BallistaConfig::builder()
.set(
BALLISTA_DEFAULT_SHUFFLE_PARTITIONS,
&format!("{}", opt.partitions),
)
.build()
.map_err(|e| DataFusionError::Execution(format!("{:?}", e)))?;
let ctx =
BallistaContext::remote(opt.host.unwrap().as_str(), opt.port.unwrap(), &config);
// register tables with Ballista context
let path = opt.path.to_str().unwrap();
let file_format = opt.file_format.as_str();
for table in TABLES {
match file_format {
// dbgen creates .tbl ('|' delimited) files without header
"tbl" => {
let path = format!("{}/{}.tbl", path, table);
let schema = get_schema(table);
let options = CsvReadOptions::new()
.schema(&schema)
.delimiter(b'|')
.has_header(false)
.file_extension(".tbl");
ctx.register_csv(table, &path, options)
.map_err(|e| DataFusionError::Plan(format!("{:?}", e)))?;
}
"csv" => {
let path = format!("{}/{}", path, table);
let schema = get_schema(table);
let options = CsvReadOptions::new().schema(&schema).has_header(true);
ctx.register_csv(table, &path, options)
.map_err(|e| DataFusionError::Plan(format!("{:?}", e)))?;
}
"parquet" => {
let path = format!("{}/{}", path, table);
ctx.register_parquet(table, &path)
.map_err(|e| DataFusionError::Plan(format!("{:?}", e)))?;
}
other => {
unimplemented!("Invalid file format '{}'", other);
}
}
}
let mut millis = vec![];
// run benchmark
let sql = get_query_sql(opt.query)?;
println!("Running benchmark with query {}:\n {}", opt.query, sql);
for i in 0..opt.iterations {
let start = Instant::now();
let df = ctx
.sql(&sql)
.map_err(|e| DataFusionError::Plan(format!("{:?}", e)))?;
let batches = df
.collect()
.await
.map_err(|e| DataFusionError::Plan(format!("{:?}", e)))?;
let elapsed = start.elapsed().as_secs_f64() * 1000.0;
millis.push(elapsed as f64);
println!("Query {} iteration {} took {:.1} ms", opt.query, i, elapsed);
if opt.debug {
pretty::print_batches(&batches)?;
}
}
let avg = millis.iter().sum::<f64>() / millis.len() as f64;
println!("Query {} avg time: {:.2} ms", opt.query, avg);
Ok(())
}
fn get_query_sql(query: usize) -> Result<String> {
if query > 0 && query < 23 {
let filename = format!("queries/q{}.sql", query);
Ok(fs::read_to_string(&filename).expect("failed to read query"))
} else {
Err(DataFusionError::Plan(
"invalid query. Expected value between 1 and 22".to_owned(),
))
}
}
fn create_logical_plan(ctx: &mut ExecutionContext, query: usize) -> Result<LogicalPlan> {
let sql = get_query_sql(query)?;
ctx.create_logical_plan(&sql)
}
async fn execute_query(
ctx: &mut ExecutionContext,
plan: &LogicalPlan,
debug: bool,
) -> Result<Vec<RecordBatch>> {
if debug {
println!("=== Logical plan ===\n{:?}\n", plan);
}
let plan = ctx.optimize(plan)?;
if debug {
println!("=== Optimized logical plan ===\n{:?}\n", plan);
}
let physical_plan = ctx.create_physical_plan(&plan)?;
if debug {
println!(
"=== Physical plan ===\n{}\n",
displayable(physical_plan.as_ref()).indent().to_string()
);
}
let result = collect(physical_plan.clone()).await?;
if debug {
println!(
"=== Physical plan with metrics ===\n{}\n",
DisplayableExecutionPlan::with_metrics(physical_plan.as_ref())
.indent()
.to_string()
);
pretty::print_batches(&result)?;
}
Ok(result)
}
async fn convert_tbl(opt: ConvertOpt) -> Result<()> {
let output_root_path = Path::new(&opt.output_path);
for table in TABLES {
let start = Instant::now();
let schema = get_schema(table);
let input_path = format!("{}/{}.tbl", opt.input_path.to_str().unwrap(), table);
let options = CsvReadOptions::new()
.schema(&schema)
.delimiter(b'|')
.file_extension(".tbl");
let config = ExecutionConfig::new().with_batch_size(opt.batch_size);
let mut ctx = ExecutionContext::with_config(config);
// build plan to read the TBL file
let mut csv = ctx.read_csv(&input_path, options)?;
// optionally, repartition the file
if opt.partitions > 1 {
csv = csv.repartition(Partitioning::RoundRobinBatch(opt.partitions))?
}
// create the physical plan
let csv = csv.to_logical_plan();
let csv = ctx.optimize(&csv)?;
let csv = ctx.create_physical_plan(&csv)?;
let output_path = output_root_path.join(table);
let output_path = output_path.to_str().unwrap().to_owned();
println!(
"Converting '{}' to {} files in directory '{}'",
&input_path, &opt.file_format, &output_path
);
match opt.file_format.as_str() {
"csv" => ctx.write_csv(csv, output_path).await?,
"parquet" => {
let compression = match opt.compression.as_str() {
"none" => Compression::UNCOMPRESSED,
"snappy" => Compression::SNAPPY,
"brotli" => Compression::BROTLI,
"gzip" => Compression::GZIP,
"lz4" => Compression::LZ4,
"lz0" => Compression::LZO,
"zstd" => Compression::ZSTD,
other => {
return Err(DataFusionError::NotImplemented(format!(
"Invalid compression format: {}",
other
)))
}
};
let props = WriterProperties::builder()
.set_compression(compression)
.build();
ctx.write_parquet(csv, output_path, Some(props)).await?
}
other => {
return Err(DataFusionError::NotImplemented(format!(
"Invalid output format: {}",
other
)))
}
}
println!("Conversion completed in {} ms", start.elapsed().as_millis());
}
Ok(())
}
fn get_table(
path: &str,
table: &str,
table_format: &str,
) -> Result<Arc<dyn TableProvider>> {
match table_format {
// dbgen creates .tbl ('|' delimited) files without header
"tbl" => {
let path = format!("{}/{}.tbl", path, table);
let schema = get_schema(table);
let options = CsvReadOptions::new()
.schema(&schema)
.delimiter(b'|')
.has_header(false)
.file_extension(".tbl");
Ok(Arc::new(CsvFile::try_new(&path, options)?))
}
"csv" => {
let path = format!("{}/{}", path, table);
let schema = get_schema(table);
let options = CsvReadOptions::new().schema(&schema).has_header(true);
Ok(Arc::new(CsvFile::try_new(&path, options)?))
}
"parquet" => {
let path = format!("{}/{}", path, table);
let schema = get_schema(table);
Ok(Arc::new(ParquetTable::try_new_with_schema(
&path, schema, false,
)?))
}
other => {
unimplemented!("Invalid file format '{}'", other);
}
}
}
fn get_schema(table: &str) -> Schema {
// note that the schema intentionally uses signed integers so that any generated Parquet
// files can also be used to benchmark tools that only support signed integers, such as
// Apache Spark
match table {
"part" => Schema::new(vec![
Field::new("p_partkey", DataType::Int64, false),
Field::new("p_name", DataType::Utf8, false),
Field::new("p_mfgr", DataType::Utf8, false),
Field::new("p_brand", DataType::Utf8, false),
Field::new("p_type", DataType::Utf8, false),
Field::new("p_size", DataType::Int32, false),
Field::new("p_container", DataType::Utf8, false),
Field::new("p_retailprice", DataType::Float64, false),
Field::new("p_comment", DataType::Utf8, false),
]),
"supplier" => Schema::new(vec![
Field::new("s_suppkey", DataType::Int64, false),
Field::new("s_name", DataType::Utf8, false),
Field::new("s_address", DataType::Utf8, false),
Field::new("s_nationkey", DataType::Int64, false),
Field::new("s_phone", DataType::Utf8, false),
Field::new("s_acctbal", DataType::Float64, false),
Field::new("s_comment", DataType::Utf8, false),
]),
"partsupp" => Schema::new(vec![
Field::new("ps_partkey", DataType::Int64, false),
Field::new("ps_suppkey", DataType::Int64, false),
Field::new("ps_availqty", DataType::Int32, false),
Field::new("ps_supplycost", DataType::Float64, false),
Field::new("ps_comment", DataType::Utf8, false),
]),
"customer" => Schema::new(vec![
Field::new("c_custkey", DataType::Int64, false),
Field::new("c_name", DataType::Utf8, false),
Field::new("c_address", DataType::Utf8, false),
Field::new("c_nationkey", DataType::Int64, false),
Field::new("c_phone", DataType::Utf8, false),
Field::new("c_acctbal", DataType::Float64, false),
Field::new("c_mktsegment", DataType::Utf8, false),
Field::new("c_comment", DataType::Utf8, false),
]),
"orders" => Schema::new(vec![
Field::new("o_orderkey", DataType::Int64, false),
Field::new("o_custkey", DataType::Int64, false),
Field::new("o_orderstatus", DataType::Utf8, false),
Field::new("o_totalprice", DataType::Float64, false),
Field::new("o_orderdate", DataType::Date32, false),
Field::new("o_orderpriority", DataType::Utf8, false),
Field::new("o_clerk", DataType::Utf8, false),
Field::new("o_shippriority", DataType::Int32, false),
Field::new("o_comment", DataType::Utf8, false),
]),
"lineitem" => Schema::new(vec![
Field::new("l_orderkey", DataType::Int64, false),
Field::new("l_partkey", DataType::Int64, false),
Field::new("l_suppkey", DataType::Int64, false),
Field::new("l_linenumber", DataType::Int32, false),
Field::new("l_quantity", DataType::Float64, false),
Field::new("l_extendedprice", DataType::Float64, false),
Field::new("l_discount", DataType::Float64, false),
Field::new("l_tax", DataType::Float64, false),
Field::new("l_returnflag", DataType::Utf8, false),
Field::new("l_linestatus", DataType::Utf8, false),
Field::new("l_shipdate", DataType::Date32, false),
Field::new("l_commitdate", DataType::Date32, false),
Field::new("l_receiptdate", DataType::Date32, false),
Field::new("l_shipinstruct", DataType::Utf8, false),
Field::new("l_shipmode", DataType::Utf8, false),
Field::new("l_comment", DataType::Utf8, false),
]),
"nation" => Schema::new(vec![
Field::new("n_nationkey", DataType::Int64, false),
Field::new("n_name", DataType::Utf8, false),
Field::new("n_regionkey", DataType::Int64, false),
Field::new("n_comment", DataType::Utf8, false),
]),
"region" => Schema::new(vec![
Field::new("r_regionkey", DataType::Int64, false),
Field::new("r_name", DataType::Utf8, false),
Field::new("r_comment", DataType::Utf8, false),
]),
_ => unimplemented!(),
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::env;
use std::sync::Arc;
use datafusion::arrow::array::*;
use datafusion::arrow::util::display::array_value_to_string;
use datafusion::logical_plan::Expr;
use datafusion::logical_plan::Expr::Cast;
#[tokio::test]
async fn q1() -> Result<()> {
verify_query(1).await
}
#[tokio::test]
async fn q2() -> Result<()> {
verify_query(2).await
}
#[tokio::test]
async fn q3() -> Result<()> {
verify_query(3).await
}
#[tokio::test]
async fn q4() -> Result<()> {
verify_query(4).await
}
#[tokio::test]
async fn q5() -> Result<()> {
verify_query(5).await
}
#[tokio::test]
async fn q6() -> Result<()> {
verify_query(6).await
}
#[tokio::test]
async fn q7() -> Result<()> {
verify_query(7).await
}
#[tokio::test]
async fn q8() -> Result<()> {
verify_query(8).await
}
#[tokio::test]
async fn q9() -> Result<()> {
verify_query(9).await
}
#[tokio::test]
async fn q10() -> Result<()> {
verify_query(10).await
}
#[tokio::test]
async fn q11() -> Result<()> {
verify_query(11).await
}
#[tokio::test]
async fn q12() -> Result<()> {
verify_query(12).await
}
#[tokio::test]
async fn q13() -> Result<()> {
verify_query(13).await
}
#[tokio::test]
async fn q14() -> Result<()> {
verify_query(14).await
}
#[tokio::test]
async fn q15() -> Result<()> {
verify_query(15).await
}
#[tokio::test]
async fn q16() -> Result<()> {
verify_query(16).await
}
#[tokio::test]
async fn q17() -> Result<()> {
verify_query(17).await
}
#[tokio::test]
async fn q18() -> Result<()> {
verify_query(18).await
}
#[tokio::test]
async fn q19() -> Result<()> {
verify_query(19).await
}
#[tokio::test]
async fn q20() -> Result<()> {
verify_query(20).await
}
#[tokio::test]
async fn q21() -> Result<()> {
verify_query(21).await
}
#[tokio::test]
async fn q22() -> Result<()> {
verify_query(22).await
}
#[tokio::test]
async fn run_q1() -> Result<()> {
run_query(1).await
}
#[tokio::test]
async fn run_q3() -> Result<()> {
run_query(3).await
}
#[tokio::test]
async fn run_q5() -> Result<()> {
run_query(5).await
}
#[tokio::test]
async fn run_q6() -> Result<()> {
run_query(6).await
}
#[tokio::test]
async fn run_q7() -> Result<()> {
run_query(7).await
}
#[tokio::test]
async fn run_q8() -> Result<()> {
run_query(8).await
}
#[tokio::test]
async fn run_q9() -> Result<()> {
run_query(9).await
}
#[tokio::test]
async fn run_q10() -> Result<()> {
run_query(10).await
}
#[tokio::test]
async fn run_q12() -> Result<()> {
run_query(12).await
}
#[tokio::test]
async fn run_q13() -> Result<()> {
run_query(13).await
}
#[tokio::test]
async fn run_q14() -> Result<()> {
run_query(14).await
}
#[tokio::test]
async fn run_q19() -> Result<()> {
run_query(19).await
}
/// Specialised String representation
fn col_str(column: &ArrayRef, row_index: usize) -> String {
if column.is_null(row_index) {
return "NULL".to_string();
}
// Special case ListArray as there is no pretty print support for it yet
if let DataType::FixedSizeList(_, n) = column.data_type() {
let array = column
.as_any()
.downcast_ref::<FixedSizeListArray>()
.unwrap()
.value(row_index);
let mut r = Vec::with_capacity(*n as usize);
for i in 0..*n {
r.push(col_str(&array, i as usize));
}
return format!("[{}]", r.join(","));
}
array_value_to_string(column, row_index).unwrap()
}
/// Converts the results into a 2d array of strings, `result[row][column]`
/// Special cases nulls to NULL for testing
fn result_vec(results: &[RecordBatch]) -> Vec<Vec<String>> {
let mut result = vec![];
for batch in results {
for row_index in 0..batch.num_rows() {
let row_vec = batch
.columns()
.iter()
.map(|column| col_str(column, row_index))
.collect();
result.push(row_vec);
}
}
result
}
fn get_answer_schema(n: usize) -> Schema {
match n {
1 => Schema::new(vec![
Field::new("l_returnflag", DataType::Utf8, true),
Field::new("l_linestatus", DataType::Utf8, true),
Field::new("sum_qty", DataType::Float64, true),
Field::new("sum_base_price", DataType::Float64, true),
Field::new("sum_disc_price", DataType::Float64, true),
Field::new("sum_charge", DataType::Float64, true),
Field::new("avg_qty", DataType::Float64, true),
Field::new("avg_price", DataType::Float64, true),
Field::new("avg_disc", DataType::Float64, true),
Field::new("count_order", DataType::UInt64, true),
]),
2 => Schema::new(vec![
Field::new("s_acctbal", DataType::Float64, true),
Field::new("s_name", DataType::Utf8, true),
Field::new("n_name", DataType::Utf8, true),
Field::new("p_partkey", DataType::Int32, true),
Field::new("p_mfgr", DataType::Utf8, true),
Field::new("s_address", DataType::Utf8, true),
Field::new("s_phone", DataType::Utf8, true),
Field::new("s_comment", DataType::Utf8, true),
]),
3 => Schema::new(vec![
Field::new("l_orderkey", DataType::Int32, true),
Field::new("revenue", DataType::Float64, true),
Field::new("o_orderdate", DataType::Date32, true),
Field::new("o_shippriority", DataType::Int32, true),
]),
4 => Schema::new(vec![
Field::new("o_orderpriority", DataType::Utf8, true),
Field::new("order_count", DataType::Int32, true),
]),
5 => Schema::new(vec![
Field::new("n_name", DataType::Utf8, true),
Field::new("revenue", DataType::Float64, true),
]),
6 => Schema::new(vec![Field::new("revenue", DataType::Float64, true)]),
7 => Schema::new(vec![
Field::new("supp_nation", DataType::Utf8, true),
Field::new("cust_nation", DataType::Utf8, true),
Field::new("l_year", DataType::Int32, true),
Field::new("revenue", DataType::Float64, true),
]),
8 => Schema::new(vec![
Field::new("o_year", DataType::Int32, true),
Field::new("mkt_share", DataType::Float64, true),
]),
9 => Schema::new(vec![
Field::new("nation", DataType::Utf8, true),
Field::new("o_year", DataType::Int32, true),
Field::new("sum_profit", DataType::Float64, true),
]),
10 => Schema::new(vec![
Field::new("c_custkey", DataType::Int32, true),
Field::new("c_name", DataType::Utf8, true),
Field::new("revenue", DataType::Float64, true),
Field::new("c_acctbal", DataType::Float64, true),
Field::new("n_name", DataType::Utf8, true),
Field::new("c_address", DataType::Utf8, true),
Field::new("c_phone", DataType::Utf8, true),
Field::new("c_comment", DataType::Utf8, true),
]),
11 => Schema::new(vec![
Field::new("ps_partkey", DataType::Int32, true),
Field::new("value", DataType::Float64, true),
]),
12 => Schema::new(vec![
Field::new("l_shipmode", DataType::Utf8, true),
Field::new("high_line_count", DataType::Int64, true),
Field::new("low_line_count", DataType::Int64, true),
]),
13 => Schema::new(vec![
Field::new("c_count", DataType::Int64, true),
Field::new("custdist", DataType::Int64, true),
]),
14 => Schema::new(vec![Field::new("promo_revenue", DataType::Float64, true)]),
15 => Schema::new(vec![Field::new("promo_revenue", DataType::Float64, true)]),
16 => Schema::new(vec![
Field::new("p_brand", DataType::Utf8, true),
Field::new("p_type", DataType::Utf8, true),
Field::new("c_phone", DataType::Int32, true),
Field::new("c_comment", DataType::Int32, true),
]),
17 => Schema::new(vec![Field::new("avg_yearly", DataType::Float64, true)]),
18 => Schema::new(vec![
Field::new("c_name", DataType::Utf8, true),
Field::new("c_custkey", DataType::Int32, true),
Field::new("o_orderkey", DataType::Int32, true),
Field::new("o_orderdate", DataType::Date32, true),
Field::new("o_totalprice", DataType::Float64, true),
Field::new("sum_l_quantity", DataType::Float64, true),
]),
19 => Schema::new(vec![Field::new("revenue", DataType::Float64, true)]),
20 => Schema::new(vec![
Field::new("s_name", DataType::Utf8, true),
Field::new("s_address", DataType::Utf8, true),
]),
21 => Schema::new(vec![
Field::new("s_name", DataType::Utf8, true),
Field::new("numwait", DataType::Int32, true),
]),
22 => Schema::new(vec![
Field::new("cntrycode", DataType::Int32, true),
Field::new("numcust", DataType::Int32, true),
Field::new("totacctbal", DataType::Float64, true),
]),
_ => unimplemented!(),
}
}
// convert expected schema to all utf8 so columns can be read as strings to be parsed separately
// this is due to the fact that the csv parser cannot handle leading/trailing spaces
fn string_schema(schema: Schema) -> Schema {
Schema::new(
schema
.fields()
.iter()
.map(|field| {
Field::new(
Field::name(field),
DataType::Utf8,
Field::is_nullable(field),
)
})
.collect::<Vec<Field>>(),
)
}
// convert the schema to the same but with all columns set to nullable=true.
// this allows direct schema comparison ignoring nullable.
fn nullable_schema(schema: Arc<Schema>) -> Schema {
Schema::new(
schema
.fields()
.iter()
.map(|field| {
Field::new(
Field::name(field),
Field::data_type(field).to_owned(),
true,
)
})
.collect::<Vec<Field>>(),
)
}
async fn run_query(n: usize) -> Result<()> {
// Tests running query with empty tables, to see whether they run succesfully.
let config = ExecutionConfig::new()
.with_target_partitions(1)
.with_batch_size(10);
let mut ctx = ExecutionContext::with_config(config);
for &table in TABLES {
let schema = get_schema(table);
let batch = RecordBatch::new_empty(Arc::new(schema.to_owned()));
let provider = MemTable::try_new(Arc::new(schema), vec![vec![batch]])?;
ctx.register_table(table, Arc::new(provider))?;
}
let plan = create_logical_plan(&mut ctx, n)?;
execute_query(&mut ctx, &plan, false).await?;
Ok(())
}
async fn verify_query(n: usize) -> Result<()> {
if let Ok(path) = env::var("TPCH_DATA") {
// load expected answers from tpch-dbgen
// read csv as all strings, trim and cast to expected type as the csv string
// to value parser does not handle data with leading/trailing spaces
let mut ctx = ExecutionContext::new();
let schema = string_schema(get_answer_schema(n));
let options = CsvReadOptions::new()
.schema(&schema)
.delimiter(b'|')
.file_extension(".out");
let df = ctx.read_csv(&format!("{}/answers/q{}.out", path, n), options)?;
let df = df.select(
get_answer_schema(n)
.fields()
.iter()
.map(|field| {
Expr::Alias(
Box::new(Cast {
expr: Box::new(trim(col(Field::name(field)))),
data_type: Field::data_type(field).to_owned(),
}),
Field::name(field).to_string(),
)
})
.collect::<Vec<Expr>>(),
)?;
let expected = df.collect().await?;
// run the query to compute actual results of the query
let opt = DataFusionBenchmarkOpt {
query: n,
debug: false,
iterations: 1,
partitions: 2,
batch_size: 8192,
path: PathBuf::from(path.to_string()),
file_format: "tbl".to_string(),
mem_table: false,
};
let actual = benchmark_datafusion(opt).await?;
// assert schema equality without comparing nullable values
assert_eq!(
nullable_schema(expected[0].schema()),
nullable_schema(actual[0].schema())
);
// convert both datasets to Vec<Vec<String>> for simple comparison
let expected_vec = result_vec(&expected);
let actual_vec = result_vec(&actual);
// basic result comparison
assert_eq!(expected_vec.len(), actual_vec.len());
// compare each row. this works as all TPC-H queries have determinisically ordered results
for i in 0..actual_vec.len() {
assert_eq!(expected_vec[i], actual_vec[i]);
}
} else {
println!("TPCH_DATA environment variable not set, skipping test");
}
Ok(())
}
mod ballista_round_trip {
use super::*;
use ballista_core::serde::protobuf;
use datafusion::physical_plan::ExecutionPlan;
use std::convert::TryInto;
fn round_trip_query(n: usize) -> Result<()> {
let config = ExecutionConfig::new()
.with_target_partitions(1)
.with_batch_size(10);
let mut ctx = ExecutionContext::with_config(config);
// set tpch_data_path to dummy value and skip physical plan serde test when TPCH_DATA
// is not set.
let tpch_data_path =
env::var("TPCH_DATA").unwrap_or_else(|_| "./".to_string());
for &table in TABLES {
let schema = get_schema(table);
let options = CsvReadOptions::new()
.schema(&schema)
.delimiter(b'|')
.has_header(false)
.file_extension(".tbl");
let provider = CsvFile::try_new(
&format!("{}/{}.tbl", tpch_data_path, table),
options,
)?;
ctx.register_table(table, Arc::new(provider))?;
}
// test logical plan round trip
let plan = create_logical_plan(&mut ctx, n)?;
let proto: protobuf::LogicalPlanNode = (&plan).try_into().unwrap();
let round_trip: LogicalPlan = (&proto).try_into().unwrap();
assert_eq!(
format!("{:?}", plan),
format!("{:?}", round_trip),
"logical plan round trip failed"
);
// test optimized logical plan round trip
let plan = ctx.optimize(&plan)?;
let proto: protobuf::LogicalPlanNode = (&plan).try_into().unwrap();
let round_trip: LogicalPlan = (&proto).try_into().unwrap();
assert_eq!(
format!("{:?}", plan),
format!("{:?}", round_trip),
"opitmized logical plan round trip failed"
);
// test physical plan roundtrip
if env::var("TPCH_DATA").is_ok() {
let physical_plan = ctx.create_physical_plan(&plan)?;
let proto: protobuf::PhysicalPlanNode =
(physical_plan.clone()).try_into().unwrap();
let round_trip: Arc<dyn ExecutionPlan> = (&proto).try_into().unwrap();
assert_eq!(
format!("{:?}", physical_plan),
format!("{:?}", round_trip),
"physical plan round trip failed"
);
}
Ok(())
}
macro_rules! test_round_trip {
($tn:ident, $query:expr) => {
#[test]
fn $tn() -> Result<()> {
round_trip_query($query)
}
};
}
test_round_trip!(q1, 1);
test_round_trip!(q3, 3);
test_round_trip!(q5, 5);
test_round_trip!(q6, 6);
test_round_trip!(q7, 7);
test_round_trip!(q8, 8);
test_round_trip!(q9, 9);
test_round_trip!(q10, 10);
test_round_trip!(q12, 12);
test_round_trip!(q13, 13);
}
}
| 34.08042 | 102 | 0.532702 |
5dc843573cd274c4243bc23ad5c15e921f808d15 | 730 | //! Runtime API definition for the Refund Module.
#![cfg_attr(not(feature = "std"), no_std)]
use codec::Codec;
use sp_std::vec::Vec;
sp_api::decl_runtime_apis! {
pub trait RefundApi<AccountId, H256, RefundRequest> where
AccountId: Codec,
H256: Codec,
RefundRequest: Codec,
{
/// Get all refund requests for a particular account
fn get_refund_requests(account_id: AccountId) -> Vec<H256>;
/// Get the refund request corresponding to a particular issue ID
fn get_refund_requests_by_issue_id(issue_id: H256) -> Option<H256>;
/// Get all refund requests for a particular vault
fn get_vault_refund_requests(vault_id: AccountId) -> Vec<H256>;
}
}
| 30.416667 | 75 | 0.671233 |
76db8f2148e9ac62b6063834094c555b70e07811 | 527 | use hdk::prelude::holo_hash::*;
use hdk::prelude::*;
use std::convert::TryFrom;
#[hdk_entry(id = "game_move_entry")]
#[derive(Clone)]
pub struct GameMoveEntry {
pub game_hash: EntryHashB64,
pub author_pub_key: AgentPubKeyB64,
pub game_move: SerializedBytes,
pub resulting_game_state: SerializedBytes,
pub previous_move_hash: Option<HeaderHashB64>,
}
// IO structs
#[derive(Serialize, Deserialize, Debug)]
pub struct MoveInfo {
pub header_hash: HeaderHashB64,
pub game_move_entry: GameMoveEntry,
}
| 25.095238 | 50 | 0.73814 |
0ad75c5ec8cbcddba43adf03c83500b18e06f297 | 18,942 | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast::*;
use ast;
use ast_util;
use codemap;
use codemap::Span;
use owned_slice::OwnedSlice;
use parse::token;
use print::pprust;
use ptr::P;
use visit::Visitor;
use visit;
use std::cmp;
use std::u32;
pub fn path_name_i(idents: &[Ident]) -> String {
// FIXME: Bad copies (#2543 -- same for everything else that says "bad")
idents.iter().map(|i| {
token::get_ident(*i).to_string()
}).collect::<Vec<String>>().connect("::")
}
pub fn local_def(id: NodeId) -> DefId {
ast::DefId { krate: LOCAL_CRATE, node: id }
}
pub fn is_local(did: ast::DefId) -> bool { did.krate == LOCAL_CRATE }
pub fn stmt_id(s: &Stmt) -> NodeId {
match s.node {
StmtDecl(_, id) => id,
StmtExpr(_, id) => id,
StmtSemi(_, id) => id,
StmtMac(..) => panic!("attempted to analyze unexpanded stmt")
}
}
pub fn binop_to_string(op: BinOp_) -> &'static str {
match op {
BiAdd => "+",
BiSub => "-",
BiMul => "*",
BiDiv => "/",
BiRem => "%",
BiAnd => "&&",
BiOr => "||",
BiBitXor => "^",
BiBitAnd => "&",
BiBitOr => "|",
BiShl => "<<",
BiShr => ">>",
BiEq => "==",
BiLt => "<",
BiLe => "<=",
BiNe => "!=",
BiGe => ">=",
BiGt => ">"
}
}
pub fn lazy_binop(b: BinOp_) -> bool {
match b {
BiAnd => true,
BiOr => true,
_ => false
}
}
pub fn is_shift_binop(b: BinOp_) -> bool {
match b {
BiShl => true,
BiShr => true,
_ => false
}
}
pub fn is_comparison_binop(b: BinOp_) -> bool {
match b {
BiEq | BiLt | BiLe | BiNe | BiGt | BiGe =>
true,
BiAnd | BiOr | BiAdd | BiSub | BiMul | BiDiv | BiRem |
BiBitXor | BiBitAnd | BiBitOr | BiShl | BiShr =>
false,
}
}
/// Returns `true` if the binary operator takes its arguments by value
pub fn is_by_value_binop(b: BinOp_) -> bool {
!is_comparison_binop(b)
}
/// Returns `true` if the unary operator takes its argument by value
pub fn is_by_value_unop(u: UnOp) -> bool {
match u {
UnNeg | UnNot => true,
_ => false,
}
}
pub fn unop_to_string(op: UnOp) -> &'static str {
match op {
UnUniq => "box() ",
UnDeref => "*",
UnNot => "!",
UnNeg => "-",
}
}
pub fn is_path(e: P<Expr>) -> bool {
match e.node { ExprPath(..) => true, _ => false }
}
/// Get a string representation of a signed int type, with its value.
/// We want to avoid "45int" and "-3int" in favor of "45" and "-3"
pub fn int_ty_to_string(t: IntTy, val: Option<i64>) -> String {
let s = match t {
TyIs => "isize",
TyI8 => "i8",
TyI16 => "i16",
TyI32 => "i32",
TyI64 => "i64"
};
match val {
// cast to a u64 so we can correctly print INT64_MIN. All integral types
// are parsed as u64, so we wouldn't want to print an extra negative
// sign.
Some(n) => format!("{}{}", n as u64, s),
None => s.to_string()
}
}
pub fn int_ty_max(t: IntTy) -> u64 {
match t {
TyI8 => 0x80,
TyI16 => 0x8000,
TyIs | TyI32 => 0x80000000, // actually ni about TyIs
TyI64 => 0x8000000000000000
}
}
/// Get a string representation of an unsigned int type, with its value.
/// We want to avoid "42u" in favor of "42us". "42uint" is right out.
pub fn uint_ty_to_string(t: UintTy, val: Option<u64>) -> String {
let s = match t {
TyUs => "usize",
TyU8 => "u8",
TyU16 => "u16",
TyU32 => "u32",
TyU64 => "u64"
};
match val {
Some(n) => format!("{}{}", n, s),
None => s.to_string()
}
}
pub fn uint_ty_max(t: UintTy) -> u64 {
match t {
TyU8 => 0xff,
TyU16 => 0xffff,
TyUs | TyU32 => 0xffffffff, // actually ni about TyUs
TyU64 => 0xffffffffffffffff
}
}
pub fn float_ty_to_string(t: FloatTy) -> String {
match t {
TyF32 => "f32".to_string(),
TyF64 => "f64".to_string(),
}
}
// convert a span and an identifier to the corresponding
// 1-segment path
pub fn ident_to_path(s: Span, identifier: Ident) -> Path {
ast::Path {
span: s,
global: false,
segments: vec!(
ast::PathSegment {
identifier: identifier,
parameters: ast::AngleBracketedParameters(ast::AngleBracketedParameterData {
lifetimes: Vec::new(),
types: OwnedSlice::empty(),
bindings: OwnedSlice::empty(),
})
}
),
}
}
// If path is a single segment ident path, return that ident. Otherwise, return
// None.
pub fn path_to_ident(path: &Path) -> Option<Ident> {
if path.segments.len() != 1 {
return None;
}
let segment = &path.segments[0];
if !segment.parameters.is_empty() {
return None;
}
Some(segment.identifier)
}
pub fn ident_to_pat(id: NodeId, s: Span, i: Ident) -> P<Pat> {
P(Pat {
id: id,
node: PatIdent(BindByValue(MutImmutable), codemap::Spanned{span:s, node:i}, None),
span: s
})
}
pub fn name_to_dummy_lifetime(name: Name) -> Lifetime {
Lifetime { id: DUMMY_NODE_ID,
span: codemap::DUMMY_SP,
name: name }
}
/// Generate a "pretty" name for an `impl` from its type and trait.
/// This is designed so that symbols of `impl`'d methods give some
/// hint of where they came from, (previously they would all just be
/// listed as `__extensions__::method_name::hash`, with no indication
/// of the type).
pub fn impl_pretty_name(trait_ref: &Option<TraitRef>, ty: Option<&Ty>) -> Ident {
let mut pretty = match ty {
Some(t) => pprust::ty_to_string(t),
None => String::from_str("..")
};
match *trait_ref {
Some(ref trait_ref) => {
pretty.push('.');
pretty.push_str(&pprust::path_to_string(&trait_ref.path));
}
None => {}
}
token::gensym_ident(&pretty[..])
}
pub fn struct_field_visibility(field: ast::StructField) -> Visibility {
match field.node.kind {
ast::NamedField(_, v) | ast::UnnamedField(v) => v
}
}
/// Maps a binary operator to its precedence
pub fn operator_prec(op: ast::BinOp_) -> usize {
match op {
// 'as' sits here with 12
BiMul | BiDiv | BiRem => 11,
BiAdd | BiSub => 10,
BiShl | BiShr => 9,
BiBitAnd => 8,
BiBitXor => 7,
BiBitOr => 6,
BiLt | BiLe | BiGe | BiGt | BiEq | BiNe => 3,
BiAnd => 2,
BiOr => 1
}
}
/// Precedence of the `as` operator, which is a binary operator
/// not appearing in the prior table.
pub const AS_PREC: usize = 12;
pub fn empty_generics() -> Generics {
Generics {
lifetimes: Vec::new(),
ty_params: OwnedSlice::empty(),
where_clause: WhereClause {
id: DUMMY_NODE_ID,
predicates: Vec::new(),
}
}
}
// ______________________________________________________________________
// Enumerating the IDs which appear in an AST
#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct IdRange {
pub min: NodeId,
pub max: NodeId,
}
impl IdRange {
pub fn max() -> IdRange {
IdRange {
min: u32::MAX,
max: u32::MIN,
}
}
pub fn empty(&self) -> bool {
self.min >= self.max
}
pub fn add(&mut self, id: NodeId) {
self.min = cmp::min(self.min, id);
self.max = cmp::max(self.max, id + 1);
}
}
pub trait IdVisitingOperation {
fn visit_id(&mut self, node_id: NodeId);
}
/// A visitor that applies its operation to all of the node IDs
/// in a visitable thing.
pub struct IdVisitor<'a, O:'a> {
pub operation: &'a mut O,
pub pass_through_items: bool,
pub visited_outermost: bool,
}
impl<'a, O: IdVisitingOperation> IdVisitor<'a, O> {
fn visit_generics_helper(&mut self, generics: &Generics) {
for type_parameter in &*generics.ty_params {
self.operation.visit_id(type_parameter.id)
}
for lifetime in &generics.lifetimes {
self.operation.visit_id(lifetime.lifetime.id)
}
}
}
impl<'a, 'v, O: IdVisitingOperation> Visitor<'v> for IdVisitor<'a, O> {
fn visit_mod(&mut self,
module: &Mod,
_: Span,
node_id: NodeId) {
self.operation.visit_id(node_id);
visit::walk_mod(self, module)
}
fn visit_foreign_item(&mut self, foreign_item: &ForeignItem) {
self.operation.visit_id(foreign_item.id);
visit::walk_foreign_item(self, foreign_item)
}
fn visit_item(&mut self, item: &Item) {
if !self.pass_through_items {
if self.visited_outermost {
return
} else {
self.visited_outermost = true
}
}
self.operation.visit_id(item.id);
match item.node {
ItemUse(ref view_path) => {
match view_path.node {
ViewPathSimple(_, _) |
ViewPathGlob(_) => {}
ViewPathList(_, ref paths) => {
for path in paths {
self.operation.visit_id(path.node.id())
}
}
}
}
ItemEnum(ref enum_definition, _) => {
for variant in &enum_definition.variants {
self.operation.visit_id(variant.node.id)
}
}
_ => {}
}
visit::walk_item(self, item);
self.visited_outermost = false
}
fn visit_local(&mut self, local: &Local) {
self.operation.visit_id(local.id);
visit::walk_local(self, local)
}
fn visit_block(&mut self, block: &Block) {
self.operation.visit_id(block.id);
visit::walk_block(self, block)
}
fn visit_stmt(&mut self, statement: &Stmt) {
self.operation.visit_id(ast_util::stmt_id(statement));
visit::walk_stmt(self, statement)
}
fn visit_pat(&mut self, pattern: &Pat) {
self.operation.visit_id(pattern.id);
visit::walk_pat(self, pattern)
}
fn visit_expr(&mut self, expression: &Expr) {
self.operation.visit_id(expression.id);
visit::walk_expr(self, expression)
}
fn visit_ty(&mut self, typ: &Ty) {
self.operation.visit_id(typ.id);
visit::walk_ty(self, typ)
}
fn visit_generics(&mut self, generics: &Generics) {
self.visit_generics_helper(generics);
visit::walk_generics(self, generics)
}
fn visit_fn(&mut self,
function_kind: visit::FnKind<'v>,
function_declaration: &'v FnDecl,
block: &'v Block,
span: Span,
node_id: NodeId) {
if !self.pass_through_items {
match function_kind {
visit::FkMethod(..) if self.visited_outermost => return,
visit::FkMethod(..) => self.visited_outermost = true,
_ => {}
}
}
self.operation.visit_id(node_id);
match function_kind {
visit::FkItemFn(_, generics, _, _, _) => {
self.visit_generics_helper(generics)
}
visit::FkMethod(_, sig, _) => {
self.visit_generics_helper(&sig.generics)
}
visit::FkFnBlock => {}
}
for argument in &function_declaration.inputs {
self.operation.visit_id(argument.id)
}
visit::walk_fn(self,
function_kind,
function_declaration,
block,
span);
if !self.pass_through_items {
if let visit::FkMethod(..) = function_kind {
self.visited_outermost = false;
}
}
}
fn visit_struct_field(&mut self, struct_field: &StructField) {
self.operation.visit_id(struct_field.node.id);
visit::walk_struct_field(self, struct_field)
}
fn visit_struct_def(&mut self,
struct_def: &StructDef,
_: ast::Ident,
_: &ast::Generics,
id: NodeId) {
self.operation.visit_id(id);
struct_def.ctor_id.map(|ctor_id| self.operation.visit_id(ctor_id));
visit::walk_struct_def(self, struct_def);
}
fn visit_trait_item(&mut self, ti: &ast::TraitItem) {
self.operation.visit_id(ti.id);
visit::walk_trait_item(self, ti);
}
fn visit_impl_item(&mut self, ii: &ast::ImplItem) {
self.operation.visit_id(ii.id);
visit::walk_impl_item(self, ii);
}
fn visit_lifetime_ref(&mut self, lifetime: &Lifetime) {
self.operation.visit_id(lifetime.id);
}
fn visit_lifetime_def(&mut self, def: &LifetimeDef) {
self.visit_lifetime_ref(&def.lifetime);
}
fn visit_trait_ref(&mut self, trait_ref: &TraitRef) {
self.operation.visit_id(trait_ref.ref_id);
visit::walk_trait_ref(self, trait_ref);
}
}
pub fn visit_ids_for_inlined_item<O: IdVisitingOperation>(item: &InlinedItem,
operation: &mut O) {
let mut id_visitor = IdVisitor {
operation: operation,
pass_through_items: true,
visited_outermost: false,
};
visit::walk_inlined_item(&mut id_visitor, item);
}
struct IdRangeComputingVisitor {
result: IdRange,
}
impl IdVisitingOperation for IdRangeComputingVisitor {
fn visit_id(&mut self, id: NodeId) {
self.result.add(id);
}
}
pub fn compute_id_range_for_inlined_item(item: &InlinedItem) -> IdRange {
let mut visitor = IdRangeComputingVisitor {
result: IdRange::max()
};
visit_ids_for_inlined_item(item, &mut visitor);
visitor.result
}
/// Computes the id range for a single fn body, ignoring nested items.
pub fn compute_id_range_for_fn_body(fk: visit::FnKind,
decl: &FnDecl,
body: &Block,
sp: Span,
id: NodeId)
-> IdRange
{
let mut visitor = IdRangeComputingVisitor {
result: IdRange::max()
};
let mut id_visitor = IdVisitor {
operation: &mut visitor,
pass_through_items: false,
visited_outermost: false,
};
id_visitor.visit_fn(fk, decl, body, sp, id);
id_visitor.operation.result
}
pub fn walk_pat<F>(pat: &Pat, mut it: F) -> bool where F: FnMut(&Pat) -> bool {
// FIXME(#19596) this is a workaround, but there should be a better way
fn walk_pat_<G>(pat: &Pat, it: &mut G) -> bool where G: FnMut(&Pat) -> bool {
if !(*it)(pat) {
return false;
}
match pat.node {
PatIdent(_, _, Some(ref p)) => walk_pat_(&**p, it),
PatStruct(_, ref fields, _) => {
fields.iter().all(|field| walk_pat_(&*field.node.pat, it))
}
PatEnum(_, Some(ref s)) | PatTup(ref s) => {
s.iter().all(|p| walk_pat_(&**p, it))
}
PatBox(ref s) | PatRegion(ref s, _) => {
walk_pat_(&**s, it)
}
PatVec(ref before, ref slice, ref after) => {
before.iter().all(|p| walk_pat_(&**p, it)) &&
slice.iter().all(|p| walk_pat_(&**p, it)) &&
after.iter().all(|p| walk_pat_(&**p, it))
}
PatMac(_) => panic!("attempted to analyze unexpanded pattern"),
PatWild(_) | PatLit(_) | PatRange(_, _) | PatIdent(_, _, _) |
PatEnum(_, _) => {
true
}
}
}
walk_pat_(pat, &mut it)
}
/// Returns true if the given struct def is tuple-like; i.e. that its fields
/// are unnamed.
pub fn struct_def_is_tuple_like(struct_def: &ast::StructDef) -> bool {
struct_def.ctor_id.is_some()
}
/// Returns true if the given pattern consists solely of an identifier
/// and false otherwise.
pub fn pat_is_ident(pat: P<ast::Pat>) -> bool {
match pat.node {
ast::PatIdent(..) => true,
_ => false,
}
}
// are two paths equal when compared unhygienically?
// since I'm using this to replace ==, it seems appropriate
// to compare the span, global, etc. fields as well.
pub fn path_name_eq(a : &ast::Path, b : &ast::Path) -> bool {
(a.span == b.span)
&& (a.global == b.global)
&& (segments_name_eq(&a.segments[..], &b.segments[..]))
}
// are two arrays of segments equal when compared unhygienically?
pub fn segments_name_eq(a : &[ast::PathSegment], b : &[ast::PathSegment]) -> bool {
a.len() == b.len() &&
a.iter().zip(b.iter()).all(|(s, t)| {
s.identifier.name == t.identifier.name &&
// FIXME #7743: ident -> name problems in lifetime comparison?
// can types contain idents?
s.parameters == t.parameters
})
}
/// Returns true if this literal is a string and false otherwise.
pub fn lit_is_str(lit: &Lit) -> bool {
match lit.node {
LitStr(..) => true,
_ => false,
}
}
#[cfg(test)]
mod test {
use ast::*;
use super::*;
fn ident_to_segment(id : &Ident) -> PathSegment {
PathSegment {identifier: id.clone(),
parameters: PathParameters::none()}
}
#[test] fn idents_name_eq_test() {
assert!(segments_name_eq(
&[Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}]
.iter().map(ident_to_segment).collect::<Vec<PathSegment>>(),
&[Ident{name:Name(3),ctxt:104}, Ident{name:Name(78),ctxt:182}]
.iter().map(ident_to_segment).collect::<Vec<PathSegment>>()));
assert!(!segments_name_eq(
&[Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}]
.iter().map(ident_to_segment).collect::<Vec<PathSegment>>(),
&[Ident{name:Name(3),ctxt:104}, Ident{name:Name(77),ctxt:182}]
.iter().map(ident_to_segment).collect::<Vec<PathSegment>>()));
}
}
| 28.83105 | 92 | 0.546933 |
ac4e8c711e9c17b97216afea16a2d973ae4493fa | 8,475 | use bytes::{Buf, BufMut};
use super::QuicResult;
use codec::{BufLen, Codec, VarLen};
use types::{ConnectionId, GENERATED_CID_LENGTH};
use rand::{thread_rng, Rng};
use std::io::Cursor;
pub struct PartialDecode<'a> {
pub header: Header,
pub header_len: usize,
pub buf: &'a mut [u8],
}
impl<'a> PartialDecode<'a> {
pub fn new(buf: &'a mut [u8]) -> QuicResult<Self> {
let (header, header_len) = {
let mut read = Cursor::new(&buf);
let header = Header::decode(&mut read)?;
(header, read.position() as usize)
};
Ok(Self {
header,
header_len,
buf,
})
}
pub fn dst_cid(&self) -> ConnectionId {
self.header.dst_cid()
}
}
#[derive(Debug, PartialEq)]
pub enum Header {
Long {
ptype: LongType,
version: u32,
dst_cid: ConnectionId,
src_cid: ConnectionId,
len: u64,
number: u32,
},
Short {
key_phase: bool,
ptype: ShortType,
dst_cid: ConnectionId,
number: u32,
},
Negotiation {
dst_cid: ConnectionId,
src_cid: ConnectionId,
supported_versions: Vec<u32>,
},
}
impl Header {
pub fn ptype(&self) -> Option<LongType> {
match *self {
Header::Long { ptype, .. } => Some(ptype),
Header::Short { .. } => None,
Header::Negotiation { .. } => None,
}
}
fn dst_cid(&self) -> ConnectionId {
match *self {
Header::Long { dst_cid, .. } => dst_cid,
Header::Short { dst_cid, .. } => dst_cid,
Header::Negotiation { dst_cid, .. } => dst_cid,
}
}
}
impl BufLen for Header {
fn buf_len(&self) -> usize {
match self {
Header::Long {
dst_cid, src_cid, ..
} => 12 + (dst_cid.len as usize + src_cid.len as usize),
Header::Short { ptype, dst_cid, .. } => 1 + (dst_cid.len as usize) + ptype.buf_len(),
Header::Negotiation {
dst_cid,
src_cid,
supported_versions,
} => 6 + (dst_cid.len as usize + src_cid.len as usize) + 4 * supported_versions.len(),
}
}
}
impl Codec for Header {
fn encode<T: BufMut>(&self, buf: &mut T) {
match *self {
Header::Long {
ptype,
version,
dst_cid,
src_cid,
len,
number,
} => {
buf.put_u8(128 | ptype.to_byte());
buf.put_u32_be(version);
buf.put_u8((dst_cid.cil() << 4) | src_cid.cil());
buf.put_slice(&dst_cid);
buf.put_slice(&src_cid);
debug_assert!(len < 16383);
buf.put_u16_be((len | 16384) as u16);
buf.put_u32_be(number);
}
Header::Short {
key_phase,
ptype,
dst_cid,
number,
} => {
let key_phase_bit = if key_phase { 0x40 } else { 0 };
buf.put_u8(key_phase_bit | 0x20 | 0x10 | ptype.to_byte());
buf.put_slice(&dst_cid);
match ptype {
ShortType::One => buf.put_u8(number as u8),
ShortType::Two => buf.put_u16_be(number as u16),
ShortType::Four => buf.put_u32_be(number as u32),
};
}
Header::Negotiation {
dst_cid,
src_cid,
ref supported_versions,
} => {
buf.put_u8(thread_rng().gen::<u8>() | 128);
buf.put_u32_be(0);
buf.put_u8((dst_cid.cil() << 4) | src_cid.cil());
buf.put_slice(&dst_cid);
buf.put_slice(&src_cid);
for v in supported_versions {
buf.put_u32_be(*v);
}
}
}
}
fn decode<T: Buf>(buf: &mut T) -> QuicResult<Self> {
let first = buf.get_u8();
if first & 128 == 128 {
let version = buf.get_u32_be();
let cils = buf.get_u8();
let (dst_cid, src_cid, used) = {
let (mut dcil, mut scil) = ((cils >> 4) as usize, (cils & 15) as usize);
if dcil > 0 {
dcil += 3;
}
if scil > 0 {
scil += 3;
}
let bytes = buf.bytes();
let dst_cid = ConnectionId::new(&bytes[..dcil]);
let src_cid = ConnectionId::new(&bytes[dcil..dcil + scil]);
(dst_cid, src_cid, dcil + scil)
};
buf.advance(used);
if version == 0 {
let mut supported_versions = vec![];
while buf.has_remaining() {
supported_versions.push(buf.get_u32_be());
}
Ok(Header::Negotiation {
dst_cid,
src_cid,
supported_versions,
})
} else {
Ok(Header::Long {
ptype: LongType::from_byte(first ^ 128),
version,
dst_cid,
src_cid,
len: VarLen::decode(buf)?.0,
number: buf.get_u32_be(),
})
}
} else {
let key_phase = first & 0x40 == 0x40;
let dst_cid = {
let bytes = buf.bytes();
ConnectionId::new(&bytes[..GENERATED_CID_LENGTH as usize])
};
buf.advance(GENERATED_CID_LENGTH as usize);
let ptype = ShortType::from_byte(first & 3);
let number = match ptype {
ShortType::One => u32::from(buf.get_u8()),
ShortType::Two => u32::from(buf.get_u16_be()),
ShortType::Four => buf.get_u32_be(),
};
Ok(Header::Short {
key_phase,
ptype,
dst_cid,
number,
})
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum LongType {
Initial = 0x7f,
Retry = 0x7e,
Handshake = 0x7d,
Protected = 0x7c,
}
impl Copy for LongType {}
impl LongType {
pub fn to_byte(&self) -> u8 {
use self::LongType::*;
match self {
Initial => 0x7f,
Retry => 0x7e,
Handshake => 0x7d,
Protected => 0x7c,
}
}
pub fn from_byte(v: u8) -> Self {
use self::LongType::*;
match v {
0x7f => Initial,
0x7e => Retry,
0x7d => Handshake,
0x7c => Protected,
_ => panic!("invalid long packet type {}", v),
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum ShortType {
One = 0x0,
Two = 0x1,
Four = 0x2,
}
impl Copy for ShortType {}
impl BufLen for ShortType {
fn buf_len(&self) -> usize {
use self::ShortType::*;
match self {
One => 1,
Two => 2,
Four => 4,
}
}
}
impl ShortType {
pub fn to_byte(&self) -> u8 {
use self::ShortType::*;
match self {
One => 0,
Two => 1,
Four => 2,
}
}
pub fn from_byte(v: u8) -> Self {
use self::ShortType::*;
match v {
0 => One,
1 => Two,
2 => Four,
_ => panic!("invalid short packet type {}", v),
}
}
}
#[cfg(test)]
mod tests {
use hex;
use codec::Codec;
use types::ConnectionId;
use std::io::Cursor;
#[test]
fn test_short_roundtrip() {
let con : [u8; 8] = [
0x38, 0xa7, 0xe6, 0x55,
0xbf, 0x62, 0xf5, 0xf7
];
let header = super::Header::Short {
key_phase: false,
ptype: super::ShortType::Two,
dst_cid: ConnectionId::new(&con),
number: 3152957029,
};
let mut bytes = Vec::with_capacity(64);
header.encode(&mut bytes);
println!("encoded : {}", hex::encode(&bytes));
let mut read = Cursor::new(bytes);
let decoded = super::Header::decode(&mut read).unwrap();
assert_eq!(decoded, header);
}
}
| 26.81962 | 98 | 0.446726 |
3ac77e1a71053822baa1a40cba6d95f55c1d83a3 | 1,774 | //! A collection of proc-macros for resec.
//!
//! These proc-macros allow the library to generate robust schema's
//! from simple syntax.
use proc_macro::TokenStream;
use std::{fs, collections::BTreeMap};
use quote::{quote, format_ident};
use syn::{parse_macro_input, LitStr};
use proc_macro2::TokenStream as TokenStream2;
use regex::Regex;
#[proc_macro]
pub fn make_schema(input: TokenStream) -> TokenStream {
// Parse the input tokens into a syntax tree
let input: LitStr = parse_macro_input!(input);
// Load the schema file.
let contents = fs::read_to_string(input.value()).unwrap();
let map: BTreeMap<u32, String> = serde_json::from_str(&contents).unwrap();
// Compile the regex to reduce function runtime.
let regex = Regex::new("[^0-9a-zA-Z]+").unwrap();
// Form tokenstream from the given map.
let output: Vec<TokenStream2> = map.into_iter().map(|(k, v)| {
// Format the name.
let formatted_name = v.replace(' ', "");
// Replace the characters using regex.
let replaced = regex.replace_all(&formatted_name, "_");
// Generate idents.
let ident_name = format_ident!("{}", replaced.to_string());
// Generate the field.
let token = quote! {
#[strum(props(name = #ident_name, id = #k))]
#ident_name,
};
token
}).collect();
// Form the enum.
let tokens = quote! {
/// Subjects that offer documents on the SEC website.
/// Each subject contains its name and id that can be used to generate a query.
#[derive(EnumProperty, EnumIter, Serialize, Deserialize, Debug, Clone, PartialEq)]
pub enum Subject {
#(#output)*
}
};
TokenStream::from(tokens)
} | 31.678571 | 90 | 0.624577 |
48e42ba74787320b03448fd19be50699eaccbfbd | 1,829 | use std::collections::HashMap;
use std::error::Error;
use std::fmt::Display;
use std::fmt::Formatter;
use std::fmt::Result as FmtResult;
use fastobo::ast::*;
use fastobo::error::CardinalityError;
use fastobo::semantics::Identified;
use fastobo::semantics::OboFrame;
use fastobo::visit::Visit;
use super::ValidationError;
use super::Validator;
#[derive(Debug)]
pub struct DuplicateIdError {
id: Ident,
count: usize
}
impl Display for DuplicateIdError {
fn fmt(&self, f: &mut Formatter) -> FmtResult {
write!(f, "id `{}` appears more than once ({} times)", self.id, self.count)
}
}
impl Error for DuplicateIdError {
fn description(&self) -> &str {
"id appears more than once"
}
}
#[derive(Default)]
pub struct DuplicateIdChecker<'a> {
counts: HashMap<&'a Ident, usize>,
}
macro_rules! impl_visit {
($name:ident, $frame:ty) => {
fn $name(&mut self, frame: &'a $frame) {
*self.counts.entry(frame.as_id()).or_default() += 1;
}
}
}
impl<'a> Visit<'a> for DuplicateIdChecker<'a> {
impl_visit!(visit_term_frame, TermFrame);
impl_visit!(visit_typedef_frame, TypedefFrame);
impl_visit!(visit_instance_frame, InstanceFrame);
}
impl Validator for DuplicateIdChecker<'_> {
fn validate(doc: &OboDoc) -> Vec<ValidationError> {
let mut checker = Self::default();
checker.visit_doc(&doc);
let mut errors = Vec::new();
for (id, count) in checker.counts {
if count > 1 {
errors.push(ValidationError {
location: String::from("complete document"),
cause: Box::new(DuplicateIdError {
id: id.clone(),
count
}),
})
}
}
errors
}
}
| 24.716216 | 83 | 0.5883 |
76b373830ebd4a26d918a8016505c6d15a9e4f88 | 22,700 | use super::table::{Index, Table};
use super::{huffman, Header};
use bytes::{BufMut, BytesMut};
use http::header::{HeaderName, HeaderValue};
#[derive(Debug)]
pub struct Encoder {
table: Table,
size_update: Option<SizeUpdate>,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
enum SizeUpdate {
One(usize),
Two(usize, usize), // min, max
}
impl Encoder {
pub fn new(max_size: usize, capacity: usize) -> Encoder {
Encoder {
table: Table::new(max_size, capacity),
size_update: None,
}
}
/// Queues a max size update.
///
/// The next call to `encode` will include a dynamic size update frame.
pub fn update_max_size(&mut self, val: usize) {
match self.size_update {
Some(SizeUpdate::One(old)) => {
if val > old {
if old > self.table.max_size() {
self.size_update = Some(SizeUpdate::One(val));
} else {
self.size_update = Some(SizeUpdate::Two(old, val));
}
} else {
self.size_update = Some(SizeUpdate::One(val));
}
}
Some(SizeUpdate::Two(min, _)) => {
if val < min {
self.size_update = Some(SizeUpdate::One(val));
} else {
self.size_update = Some(SizeUpdate::Two(min, val));
}
}
None => {
if val != self.table.max_size() {
// Don't bother writing a frame if the value already matches
// the table's max size.
self.size_update = Some(SizeUpdate::One(val));
}
}
}
}
/// Encode a set of headers into the provide buffer
pub fn encode<I>(&mut self, headers: I, dst: &mut BytesMut)
where
I: IntoIterator<Item = Header<Option<HeaderName>>>,
{
let span = tracing::trace_span!("hpack::encode");
let _e = span.enter();
self.encode_size_updates(dst);
let mut last_index = None;
for header in headers {
match header.reify() {
// The header has an associated name. In which case, try to
// index it in the table.
Ok(header) => {
let index = self.table.index(header);
self.encode_header(&index, dst);
last_index = Some(index);
}
// The header does not have an associated name. This means that
// the name is the same as the previously yielded header. In
// which case, we skip table lookup and just use the same index
// as the previous entry.
Err(value) => {
self.encode_header_without_name(
last_index.as_ref().unwrap_or_else(|| {
panic!("encoding header without name, but no previous index to use for name");
}),
&value,
dst,
);
}
}
}
}
fn encode_size_updates(&mut self, dst: &mut BytesMut) {
match self.size_update.take() {
Some(SizeUpdate::One(val)) => {
self.table.resize(val);
encode_size_update(val, dst);
}
Some(SizeUpdate::Two(min, max)) => {
self.table.resize(min);
self.table.resize(max);
encode_size_update(min, dst);
encode_size_update(max, dst);
}
None => {}
}
}
fn encode_header(&mut self, index: &Index, dst: &mut BytesMut) {
match *index {
Index::Indexed(idx, _) => {
encode_int(idx, 7, 0x80, dst);
}
Index::Name(idx, _) => {
let header = self.table.resolve(&index);
encode_not_indexed(idx, header.value_slice(), header.is_sensitive(), dst);
}
Index::Inserted(_) => {
let header = self.table.resolve(&index);
assert!(!header.is_sensitive());
dst.put_u8(0b0100_0000);
encode_str(header.name().as_slice(), dst);
encode_str(header.value_slice(), dst);
}
Index::InsertedValue(idx, _) => {
let header = self.table.resolve(&index);
assert!(!header.is_sensitive());
encode_int(idx, 6, 0b0100_0000, dst);
encode_str(header.value_slice(), dst);
}
Index::NotIndexed(_) => {
let header = self.table.resolve(&index);
encode_not_indexed2(
header.name().as_slice(),
header.value_slice(),
header.is_sensitive(),
dst,
);
}
}
}
fn encode_header_without_name(
&mut self,
last: &Index,
value: &HeaderValue,
dst: &mut BytesMut,
) {
match *last {
Index::Indexed(..)
| Index::Name(..)
| Index::Inserted(..)
| Index::InsertedValue(..) => {
let idx = self.table.resolve_idx(last);
encode_not_indexed(idx, value.as_ref(), value.is_sensitive(), dst);
}
Index::NotIndexed(_) => {
let last = self.table.resolve(last);
encode_not_indexed2(
last.name().as_slice(),
value.as_ref(),
value.is_sensitive(),
dst,
);
}
}
}
}
impl Default for Encoder {
fn default() -> Encoder {
Encoder::new(4096, 0)
}
}
fn encode_size_update(val: usize, dst: &mut BytesMut) {
encode_int(val, 5, 0b0010_0000, dst)
}
fn encode_not_indexed(name: usize, value: &[u8], sensitive: bool, dst: &mut BytesMut) {
if sensitive {
encode_int(name, 4, 0b10000, dst);
} else {
encode_int(name, 4, 0, dst);
}
encode_str(value, dst);
}
fn encode_not_indexed2(name: &[u8], value: &[u8], sensitive: bool, dst: &mut BytesMut) {
if sensitive {
dst.put_u8(0b10000);
} else {
dst.put_u8(0);
}
encode_str(name, dst);
encode_str(value, dst);
}
fn encode_str(val: &[u8], dst: &mut BytesMut) {
if !val.is_empty() {
let idx = position(dst);
// Push a placeholder byte for the length header
dst.put_u8(0);
// Encode with huffman
huffman::encode(val, dst);
let huff_len = position(dst) - (idx + 1);
if encode_int_one_byte(huff_len, 7) {
// Write the string head
dst[idx] = 0x80 | huff_len as u8;
} else {
// Write the head to a placeholder
const PLACEHOLDER_LEN: usize = 8;
let mut buf = [0u8; PLACEHOLDER_LEN];
let head_len = {
let mut head_dst = &mut buf[..];
encode_int(huff_len, 7, 0x80, &mut head_dst);
PLACEHOLDER_LEN - head_dst.remaining_mut()
};
// This is just done to reserve space in the destination
dst.put_slice(&buf[1..head_len]);
// Shift the header forward
for i in 0..huff_len {
let src_i = idx + 1 + (huff_len - (i + 1));
let dst_i = idx + head_len + (huff_len - (i + 1));
dst[dst_i] = dst[src_i];
}
// Copy in the head
for i in 0..head_len {
dst[idx + i] = buf[i];
}
}
} else {
// Write an empty string
dst.put_u8(0);
}
}
/// Encode an integer into the given destination buffer
fn encode_int<B: BufMut>(
mut value: usize, // The integer to encode
prefix_bits: usize, // The number of bits in the prefix
first_byte: u8, // The base upon which to start encoding the int
dst: &mut B,
) {
if encode_int_one_byte(value, prefix_bits) {
dst.put_u8(first_byte | value as u8);
return;
}
let low = (1 << prefix_bits) - 1;
value -= low;
dst.put_u8(first_byte | low as u8);
while value >= 128 {
dst.put_u8(0b1000_0000 | value as u8);
value >>= 7;
}
dst.put_u8(value as u8);
}
/// Returns true if the in the int can be fully encoded in the first byte.
fn encode_int_one_byte(value: usize, prefix_bits: usize) -> bool {
value < (1 << prefix_bits) - 1
}
fn position(buf: &BytesMut) -> usize {
buf.len()
}
#[cfg(test)]
mod test {
use super::*;
use crate::hpack::Header;
use http::*;
#[test]
fn test_encode_method_get() {
let mut encoder = Encoder::default();
let res = encode(&mut encoder, vec![method("GET")]);
assert_eq!(*res, [0x80 | 2]);
assert_eq!(encoder.table.len(), 0);
}
#[test]
fn test_encode_method_post() {
let mut encoder = Encoder::default();
let res = encode(&mut encoder, vec![method("POST")]);
assert_eq!(*res, [0x80 | 3]);
assert_eq!(encoder.table.len(), 0);
}
#[test]
fn test_encode_method_patch() {
let mut encoder = Encoder::default();
let res = encode(&mut encoder, vec![method("PATCH")]);
assert_eq!(res[0], 0b01000000 | 2); // Incremental indexing w/ name pulled from table
assert_eq!(res[1], 0x80 | 5); // header value w/ huffman coding
assert_eq!("PATCH", huff_decode(&res[2..7]));
assert_eq!(encoder.table.len(), 1);
let res = encode(&mut encoder, vec![method("PATCH")]);
assert_eq!(1 << 7 | 62, res[0]);
assert_eq!(1, res.len());
}
#[test]
fn test_encode_indexed_name_literal_value() {
let mut encoder = Encoder::default();
let res = encode(&mut encoder, vec![header("content-language", "foo")]);
assert_eq!(res[0], 0b01000000 | 27); // Indexed name
assert_eq!(res[1], 0x80 | 2); // header value w/ huffman coding
assert_eq!("foo", huff_decode(&res[2..4]));
// Same name, new value should still use incremental
let res = encode(&mut encoder, vec![header("content-language", "bar")]);
assert_eq!(res[0], 0b01000000 | 27); // Indexed name
assert_eq!(res[1], 0x80 | 3); // header value w/ huffman coding
assert_eq!("bar", huff_decode(&res[2..5]));
}
#[test]
fn test_repeated_headers_are_indexed() {
let mut encoder = Encoder::default();
let res = encode(&mut encoder, vec![header("foo", "hello")]);
assert_eq!(&[0b01000000, 0x80 | 2], &res[0..2]);
assert_eq!("foo", huff_decode(&res[2..4]));
assert_eq!(0x80 | 4, res[4]);
assert_eq!("hello", huff_decode(&res[5..]));
assert_eq!(9, res.len());
assert_eq!(1, encoder.table.len());
let res = encode(&mut encoder, vec![header("foo", "hello")]);
assert_eq!([0x80 | 62], *res);
assert_eq!(encoder.table.len(), 1);
}
#[test]
fn test_evicting_headers() {
let mut encoder = Encoder::default();
// Fill the table
for i in 0..64 {
let key = format!("x-hello-world-{:02}", i);
let res = encode(&mut encoder, vec![header(&key, &key)]);
assert_eq!(&[0b01000000, 0x80 | 12], &res[0..2]);
assert_eq!(key, huff_decode(&res[2..14]));
assert_eq!(0x80 | 12, res[14]);
assert_eq!(key, huff_decode(&res[15..]));
assert_eq!(27, res.len());
// Make sure the header can be found...
let res = encode(&mut encoder, vec![header(&key, &key)]);
// Only check that it is found
assert_eq!(0x80, res[0] & 0x80);
}
assert_eq!(4096, encoder.table.size());
assert_eq!(64, encoder.table.len());
// Find existing headers
for i in 0..64 {
let key = format!("x-hello-world-{:02}", i);
let res = encode(&mut encoder, vec![header(&key, &key)]);
assert_eq!(0x80, res[0] & 0x80);
}
// Insert a new header
let key = "x-hello-world-64";
let res = encode(&mut encoder, vec![header(key, key)]);
assert_eq!(&[0b01000000, 0x80 | 12], &res[0..2]);
assert_eq!(key, huff_decode(&res[2..14]));
assert_eq!(0x80 | 12, res[14]);
assert_eq!(key, huff_decode(&res[15..]));
assert_eq!(27, res.len());
assert_eq!(64, encoder.table.len());
// Now try encoding entries that should exist in the table
for i in 1..65 {
let key = format!("x-hello-world-{:02}", i);
let res = encode(&mut encoder, vec![header(&key, &key)]);
assert_eq!(0x80 | (61 + (65 - i)), res[0]);
}
}
#[test]
fn test_large_headers_are_not_indexed() {
let mut encoder = Encoder::new(128, 0);
let key = "hello-world-hello-world-HELLO-zzz";
let res = encode(&mut encoder, vec![header(key, key)]);
assert_eq!(&[0, 0x80 | 25], &res[..2]);
assert_eq!(0, encoder.table.len());
assert_eq!(0, encoder.table.size());
}
#[test]
fn test_sensitive_headers_are_never_indexed() {
use http::header::HeaderValue;
let name = "my-password".parse().unwrap();
let mut value = HeaderValue::from_bytes(b"12345").unwrap();
value.set_sensitive(true);
let header = Header::Field {
name: Some(name),
value,
};
// Now, try to encode the sensitive header
let mut encoder = Encoder::default();
let res = encode(&mut encoder, vec![header]);
assert_eq!(&[0b10000, 0x80 | 8], &res[..2]);
assert_eq!("my-password", huff_decode(&res[2..10]));
assert_eq!(0x80 | 4, res[10]);
assert_eq!("12345", huff_decode(&res[11..]));
// Now, try to encode a sensitive header w/ a name in the static table
let name = "authorization".parse().unwrap();
let mut value = HeaderValue::from_bytes(b"12345").unwrap();
value.set_sensitive(true);
let header = Header::Field {
name: Some(name),
value,
};
let mut encoder = Encoder::default();
let res = encode(&mut encoder, vec![header]);
assert_eq!(&[0b11111, 8], &res[..2]);
assert_eq!(0x80 | 4, res[2]);
assert_eq!("12345", huff_decode(&res[3..]));
// Using the name component of a previously indexed header (without
// sensitive flag set)
let _ = encode(
&mut encoder,
vec![self::header("my-password", "not-so-secret")],
);
let name = "my-password".parse().unwrap();
let mut value = HeaderValue::from_bytes(b"12345").unwrap();
value.set_sensitive(true);
let header = Header::Field {
name: Some(name),
value,
};
let res = encode(&mut encoder, vec![header]);
assert_eq!(&[0b11111, 47], &res[..2]);
assert_eq!(0x80 | 4, res[2]);
assert_eq!("12345", huff_decode(&res[3..]));
}
#[test]
fn test_content_length_value_not_indexed() {
let mut encoder = Encoder::default();
let res = encode(&mut encoder, vec![header("content-length", "1234")]);
assert_eq!(&[15, 13, 0x80 | 3], &res[0..3]);
assert_eq!("1234", huff_decode(&res[3..]));
assert_eq!(6, res.len());
}
#[test]
fn test_encoding_headers_with_same_name() {
let mut encoder = Encoder::default();
let name = "hello";
// Encode first one
let _ = encode(&mut encoder, vec![header(name, "one")]);
// Encode second one
let res = encode(&mut encoder, vec![header(name, "two")]);
assert_eq!(&[0x40 | 62, 0x80 | 3], &res[0..2]);
assert_eq!("two", huff_decode(&res[2..]));
assert_eq!(5, res.len());
// Encode the first one again
let res = encode(&mut encoder, vec![header(name, "one")]);
assert_eq!(&[0x80 | 63], &res[..]);
// Now the second one
let res = encode(&mut encoder, vec![header(name, "two")]);
assert_eq!(&[0x80 | 62], &res[..]);
}
#[test]
fn test_evicting_headers_when_multiple_of_same_name_are_in_table() {
// The encoder only has space for 2 headers
let mut encoder = Encoder::new(76, 0);
let _ = encode(&mut encoder, vec![header("foo", "bar")]);
assert_eq!(1, encoder.table.len());
let _ = encode(&mut encoder, vec![header("bar", "foo")]);
assert_eq!(2, encoder.table.len());
// This will evict the first header, while still referencing the header
// name
let res = encode(&mut encoder, vec![header("foo", "baz")]);
assert_eq!(&[0x40 | 63, 0, 0x80 | 3], &res[..3]);
assert_eq!(2, encoder.table.len());
// Try adding the same header again
let res = encode(&mut encoder, vec![header("foo", "baz")]);
assert_eq!(&[0x80 | 62], &res[..]);
assert_eq!(2, encoder.table.len());
}
#[test]
fn test_max_size_zero() {
// Static table only
let mut encoder = Encoder::new(0, 0);
let res = encode(&mut encoder, vec![method("GET")]);
assert_eq!(*res, [0x80 | 2]);
assert_eq!(encoder.table.len(), 0);
let res = encode(&mut encoder, vec![header("foo", "bar")]);
assert_eq!(&[0, 0x80 | 2], &res[..2]);
assert_eq!("foo", huff_decode(&res[2..4]));
assert_eq!(0x80 | 3, res[4]);
assert_eq!("bar", huff_decode(&res[5..8]));
assert_eq!(0, encoder.table.len());
// Encode a custom value
let res = encode(&mut encoder, vec![header("transfer-encoding", "chunked")]);
assert_eq!(&[15, 42, 0x80 | 6], &res[..3]);
assert_eq!("chunked", huff_decode(&res[3..]));
}
#[test]
fn test_update_max_size_combos() {
let mut encoder = Encoder::default();
assert!(encoder.size_update.is_none());
assert_eq!(4096, encoder.table.max_size());
encoder.update_max_size(4096); // Default size
assert!(encoder.size_update.is_none());
encoder.update_max_size(0);
assert_eq!(Some(SizeUpdate::One(0)), encoder.size_update);
encoder.update_max_size(100);
assert_eq!(Some(SizeUpdate::Two(0, 100)), encoder.size_update);
let mut encoder = Encoder::default();
encoder.update_max_size(8000);
assert_eq!(Some(SizeUpdate::One(8000)), encoder.size_update);
encoder.update_max_size(100);
assert_eq!(Some(SizeUpdate::One(100)), encoder.size_update);
encoder.update_max_size(8000);
assert_eq!(Some(SizeUpdate::Two(100, 8000)), encoder.size_update);
encoder.update_max_size(4000);
assert_eq!(Some(SizeUpdate::Two(100, 4000)), encoder.size_update);
encoder.update_max_size(50);
assert_eq!(Some(SizeUpdate::One(50)), encoder.size_update);
}
#[test]
fn test_resizing_table() {
let mut encoder = Encoder::default();
// Add a header
let _ = encode(&mut encoder, vec![header("foo", "bar")]);
encoder.update_max_size(1);
assert_eq!(1, encoder.table.len());
let res = encode(&mut encoder, vec![method("GET")]);
assert_eq!(&[32 | 1, 0x80 | 2], &res[..]);
assert_eq!(0, encoder.table.len());
let res = encode(&mut encoder, vec![header("foo", "bar")]);
assert_eq!(0, res[0]);
encoder.update_max_size(100);
let res = encode(&mut encoder, vec![header("foo", "bar")]);
assert_eq!(&[32 | 31, 69, 64], &res[..3]);
encoder.update_max_size(0);
let res = encode(&mut encoder, vec![header("foo", "bar")]);
assert_eq!(&[32, 0], &res[..2]);
}
#[test]
fn test_decreasing_table_size_without_eviction() {
let mut encoder = Encoder::default();
// Add a header
let _ = encode(&mut encoder, vec![header("foo", "bar")]);
encoder.update_max_size(100);
assert_eq!(1, encoder.table.len());
let res = encode(&mut encoder, vec![header("foo", "bar")]);
assert_eq!(&[32 | 31, 69, 0x80 | 62], &res[..]);
}
#[test]
fn test_nameless_header() {
let mut encoder = Encoder::default();
let res = encode(
&mut encoder,
vec![
Header::Field {
name: Some("hello".parse().unwrap()),
value: HeaderValue::from_bytes(b"world").unwrap(),
},
Header::Field {
name: None,
value: HeaderValue::from_bytes(b"zomg").unwrap(),
},
],
);
assert_eq!(&[0x40, 0x80 | 4], &res[0..2]);
assert_eq!("hello", huff_decode(&res[2..6]));
assert_eq!(0x80 | 4, res[6]);
assert_eq!("world", huff_decode(&res[7..11]));
// Next is not indexed
assert_eq!(&[15, 47, 0x80 | 3], &res[11..14]);
assert_eq!("zomg", huff_decode(&res[14..]));
}
#[test]
fn test_large_size_update() {
let mut encoder = Encoder::default();
encoder.update_max_size(1912930560);
assert_eq!(Some(SizeUpdate::One(1912930560)), encoder.size_update);
let mut dst = BytesMut::with_capacity(6);
encoder.encode_size_updates(&mut dst);
assert_eq!([63, 225, 129, 148, 144, 7], &dst[..]);
}
#[test]
#[ignore]
fn test_evicted_overflow() {
// Not sure what the best way to do this is.
}
fn encode(e: &mut Encoder, hdrs: Vec<Header<Option<HeaderName>>>) -> BytesMut {
let mut dst = BytesMut::with_capacity(1024);
e.encode(&mut hdrs.into_iter(), &mut dst);
dst
}
fn method(s: &str) -> Header<Option<HeaderName>> {
Header::Method(Method::from_bytes(s.as_bytes()).unwrap())
}
fn header(name: &str, val: &str) -> Header<Option<HeaderName>> {
let name = HeaderName::from_bytes(name.as_bytes()).unwrap();
let value = HeaderValue::from_bytes(val.as_bytes()).unwrap();
Header::Field {
name: Some(name),
value,
}
}
fn huff_decode(src: &[u8]) -> BytesMut {
let mut buf = BytesMut::new();
huffman::decode(src, &mut buf).unwrap()
}
}
| 31.440443 | 106 | 0.524229 |
76731f20ac6ea8e48107c1366df586e057a4f645 | 2,205 | // structs3.rs
// Structs contain data, but can also have logic. In this exercise we have
// defined the Package struct and we want to test some logic attached to it.
// Make the code compile and the tests pass!
// If you have issues execute `rustlings hint structs3`
#[derive(Debug)]
struct Package {
sender_country: String,
recipient_country: String,
weight_in_grams: i32,
}
impl Package {
fn new(sender_country: String, recipient_country: String, weight_in_grams: i32) -> Self {
if weight_in_grams <= 0 {
// Something goes here...
panic!("Negative weight");
} else {
Package {
sender_country,
recipient_country,
weight_in_grams,
}
}
}
fn is_international(&self) -> bool {
self.sender_country != self.recipient_country
}
fn get_fees(&self, cents_per_gram: i32) -> i32 {
self.weight_in_grams * cents_per_gram
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
#[should_panic]
fn fail_creating_weightless_package() {
let sender_country = String::from("Spain");
let recipient_country = String::from("Austria");
Package::new(sender_country, recipient_country, -2210);
}
#[test]
fn create_international_package() {
let sender_country = String::from("Spain");
let recipient_country = String::from("Russia");
let package = Package::new(sender_country, recipient_country, 1200);
assert!(package.is_international());
}
#[test]
fn create_local_package() {
let sender_country = String::from("Canada");
let recipient_country = sender_country.clone();
let package = Package::new(sender_country, recipient_country, 1200);
assert!(!package.is_international());
}
#[test]
fn calculate_transport_fees() {
let sender_country = String::from("Spain");
let recipient_country = String::from("Spain");
let cents_per_gram = 3;
let package = Package::new(sender_country, recipient_country, 1500);
assert_eq!(package.get_fees(cents_per_gram), 4500);
}
}
| 26.566265 | 93 | 0.624943 |
dbb31b3f1757524ac155e30d0ca8f558b38dc3e6 | 418 | // compile-flags: -Zmir-opt-level=1
#![feature(rustc_attrs)]
#![allow(unused_attributes)]
#[rustc_layout_scalar_valid_range_start(1)]
#[repr(transparent)]
pub(crate) struct NonZero<T>(pub(crate) T);
fn main() {
// Make sure that we detect this even when no function call is happening along the way
let _x = Some(unsafe { NonZero(0) }); //~ ERROR encountered 0, but expected something greater or equal to 1
}
| 32.153846 | 111 | 0.717703 |
0164ebdcd5ce2628a8276b3f2c1df2e4d3c3741c | 19,296 | #![cfg(feature = "rustls")]
use std::{
io,
net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr, UdpSocket},
str,
sync::Arc,
};
use bytes::Bytes;
use futures::{future, StreamExt};
use rand::{rngs::StdRng, RngCore, SeedableRng};
use tokio::{
runtime::{Builder, Runtime},
time::{Duration, Instant},
};
use tracing::{info, info_span};
use tracing_futures::Instrument as _;
use tracing_subscriber::EnvFilter;
use super::{
ClientConfigBuilder, Endpoint, Incoming, NewConnection, RecvStream, SendStream,
ServerConfigBuilder, TransportConfig,
};
#[test]
fn handshake_timeout() {
let _guard = subscribe();
let runtime = rt_threaded();
let (client, _) = {
let _guard = runtime.enter();
Endpoint::builder()
.bind(&SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 0))
.unwrap()
};
let mut client_config = crate::ClientConfig::default();
const IDLE_TIMEOUT: Duration = Duration::from_millis(500);
let mut transport_config = crate::TransportConfig::default();
transport_config
.max_idle_timeout(Some(IDLE_TIMEOUT))
.unwrap()
.initial_rtt(Duration::from_millis(10));
client_config.transport = Arc::new(transport_config);
let start = Instant::now();
runtime.block_on(async move {
match client
.connect_with(
client_config,
&SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 1),
"localhost",
)
.unwrap()
.await
{
Err(crate::ConnectionError::TimedOut) => {}
Err(e) => panic!("unexpected error: {:?}", e),
Ok(_) => panic!("unexpected success"),
}
});
let dt = start.elapsed();
assert!(dt > IDLE_TIMEOUT && dt < 2 * IDLE_TIMEOUT);
}
#[tokio::test]
async fn close_endpoint() {
let _guard = subscribe();
let endpoint = Endpoint::builder();
let (endpoint, incoming) = endpoint
.bind(&SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 0))
.unwrap();
tokio::spawn(incoming.for_each(|_| future::ready(())));
let conn = endpoint
.connect(
&SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 1234),
"localhost",
)
.unwrap();
endpoint.close(0u32.into(), &[]);
match conn.await {
Err(crate::ConnectionError::LocallyClosed) => (),
Err(e) => panic!("unexpected error: {}", e),
Ok(_) => {
panic!("unexpected success");
}
}
}
#[test]
fn local_addr() {
let socket = UdpSocket::bind("[::1]:0").unwrap();
let addr = socket.local_addr().unwrap();
let runtime = rt_basic();
let (ep, _) = {
let _guard = runtime.enter();
Endpoint::builder().with_socket(socket).unwrap()
};
assert_eq!(
addr,
ep.local_addr()
.expect("Could not obtain our local endpoint")
);
}
#[test]
fn read_after_close() {
let _guard = subscribe();
let runtime = rt_basic();
let (endpoint, mut incoming) = {
let _guard = runtime.enter();
endpoint()
};
const MSG: &[u8] = b"goodbye!";
runtime.spawn(async move {
let new_conn = incoming
.next()
.await
.expect("endpoint")
.await
.expect("connection");
let mut s = new_conn.connection.open_uni().await.unwrap();
s.write_all(MSG).await.unwrap();
s.finish().await.unwrap();
});
runtime.block_on(async move {
let mut new_conn = endpoint
.connect(&endpoint.local_addr().unwrap(), "localhost")
.unwrap()
.await
.expect("connect");
tokio::time::sleep_until(Instant::now() + Duration::from_millis(100)).await;
let stream = new_conn
.uni_streams
.next()
.await
.expect("incoming streams")
.expect("missing stream");
let msg = stream
.read_to_end(usize::max_value())
.await
.expect("read_to_end");
assert_eq!(msg, MSG);
});
}
#[test]
fn export_keying_material() {
let _guard = subscribe();
let runtime = rt_basic();
let (endpoint, mut incoming) = {
let _guard = runtime.enter();
endpoint()
};
runtime.block_on(async move {
let outgoing_conn = endpoint
.connect(&endpoint.local_addr().unwrap(), "localhost")
.unwrap()
.await
.expect("connect");
let incoming_conn = incoming
.next()
.await
.expect("endpoint")
.await
.expect("connection");
let mut i_buf = [0u8; 64];
incoming_conn
.connection
.export_keying_material(&mut i_buf, b"asdf", b"qwer")
.unwrap();
let mut o_buf = [0u8; 64];
outgoing_conn
.connection
.export_keying_material(&mut o_buf, b"asdf", b"qwer")
.unwrap();
assert_eq!(&i_buf[..], &o_buf[..]);
});
}
#[tokio::test]
async fn accept_after_close() {
let _guard = subscribe();
let (endpoint, mut incoming) = endpoint();
const MSG: &[u8] = b"goodbye!";
let sender = endpoint
.connect(&endpoint.local_addr().unwrap(), "localhost")
.unwrap()
.await
.expect("connect")
.connection;
let mut s = sender.open_uni().await.unwrap();
s.write_all(MSG).await.unwrap();
s.finish().await.unwrap();
sender.close(0u32.into(), b"");
// Allow some time for the close to be sent and processed
tokio::time::sleep(Duration::from_millis(100)).await;
// Despite the connection having closed, we should be able to accept it...
let mut receiver = incoming
.next()
.await
.expect("endpoint")
.await
.expect("connection");
// ...and read what was sent.
let stream = receiver
.uni_streams
.next()
.await
.expect("incoming streams")
.expect("missing stream");
let msg = stream
.read_to_end(usize::max_value())
.await
.expect("read_to_end");
assert_eq!(msg, MSG);
// But it's still definitely closed.
assert!(receiver.connection.open_uni().await.is_err());
}
/// Construct an endpoint suitable for connecting to itself
fn endpoint() -> (Endpoint, Incoming) {
let mut endpoint = Endpoint::builder();
let mut server_config = ServerConfigBuilder::default();
let cert = rcgen::generate_simple_self_signed(vec!["localhost".into()]).unwrap();
let key = crate::PrivateKey::from_der(&cert.serialize_private_key_der()).unwrap();
let cert = crate::Certificate::from_der(&cert.serialize_der().unwrap()).unwrap();
let cert_chain = crate::CertificateChain::from_certs(vec![cert.clone()]);
server_config.certificate(cert_chain, key).unwrap();
endpoint.listen(server_config.build());
let mut client_config = ClientConfigBuilder::default();
client_config.add_certificate_authority(cert).unwrap();
endpoint.default_client_config(client_config.build());
let (x, y) = endpoint
.bind(&SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 0))
.unwrap();
(x, y)
}
#[tokio::test]
async fn zero_rtt() {
let _guard = subscribe();
let (endpoint, incoming) = endpoint();
const MSG: &[u8] = b"goodbye!";
tokio::spawn(incoming.take(2).for_each(|incoming| async {
let NewConnection {
mut uni_streams,
connection,
..
} = incoming.into_0rtt().unwrap_or_else(|_| unreachable!()).0;
tokio::spawn(async move {
while let Some(Ok(x)) = uni_streams.next().await {
let msg = x.read_to_end(usize::max_value()).await.unwrap();
assert_eq!(msg, MSG);
}
});
let mut s = connection.open_uni().await.expect("open_uni");
s.write_all(MSG).await.expect("write");
s.finish().await.expect("finish");
}));
let NewConnection {
mut uni_streams, ..
} = endpoint
.connect(&endpoint.local_addr().unwrap(), "localhost")
.unwrap()
.into_0rtt()
.err()
.expect("0-RTT succeeded without keys")
.await
.expect("connect");
tokio::spawn(async move {
// Buy time for the driver to process the server's NewSessionTicket
tokio::time::sleep_until(Instant::now() + Duration::from_millis(100)).await;
let stream = uni_streams
.next()
.await
.expect("incoming streams")
.expect("missing stream");
let msg = stream
.read_to_end(usize::max_value())
.await
.expect("read_to_end");
assert_eq!(msg, MSG);
});
endpoint.wait_idle().await;
info!("initial connection complete");
let (
NewConnection {
connection,
mut uni_streams,
..
},
zero_rtt,
) = endpoint
.connect(&endpoint.local_addr().unwrap(), "localhost")
.unwrap()
.into_0rtt()
.unwrap_or_else(|_| panic!("missing 0-RTT keys"));
// Send something ASAP to use 0-RTT
tokio::spawn(async move {
let mut s = connection.open_uni().await.expect("0-RTT open uni");
s.write_all(MSG).await.expect("0-RTT write");
s.finish().await.expect("0-RTT finish");
});
let stream = uni_streams
.next()
.await
.expect("incoming streams")
.expect("missing stream");
let msg = stream
.read_to_end(usize::max_value())
.await
.expect("read_to_end");
assert_eq!(msg, MSG);
assert_eq!(zero_rtt.await, true);
drop(uni_streams);
endpoint.wait_idle().await;
}
#[test]
fn echo_v6() {
run_echo(EchoArgs {
client_addr: SocketAddr::new(IpAddr::V6(Ipv6Addr::UNSPECIFIED), 0),
server_addr: SocketAddr::new(IpAddr::V6(Ipv6Addr::LOCALHOST), 0),
nr_streams: 1,
stream_size: 10 * 1024,
receive_window: None,
stream_receive_window: None,
});
}
#[test]
fn echo_v4() {
run_echo(EchoArgs {
client_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0),
server_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 0),
nr_streams: 1,
stream_size: 10 * 1024,
receive_window: None,
stream_receive_window: None,
});
}
#[test]
#[cfg(any(target_os = "linux", target_os = "macos"))] // Dual-stack sockets aren't the default anywhere else.
fn echo_dualstack() {
run_echo(EchoArgs {
client_addr: SocketAddr::new(IpAddr::V6(Ipv6Addr::UNSPECIFIED), 0),
server_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 0),
nr_streams: 1,
stream_size: 10 * 1024,
receive_window: None,
stream_receive_window: None,
});
}
#[test]
fn stress_receive_window() {
run_echo(EchoArgs {
client_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0),
server_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 0),
nr_streams: 50,
stream_size: 25 * 1024 + 11,
receive_window: Some(37),
stream_receive_window: Some(100 * 1024 * 1024),
});
}
#[test]
fn stress_stream_receive_window() {
// Note that there is no point in runnning this with too many streams,
// since the window is only active within a stream
run_echo(EchoArgs {
client_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0),
server_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 0),
nr_streams: 2,
stream_size: 250 * 1024 + 11,
receive_window: Some(100 * 1024 * 1024),
stream_receive_window: Some(37),
});
}
#[test]
fn stress_both_windows() {
run_echo(EchoArgs {
client_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0),
server_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 0),
nr_streams: 50,
stream_size: 25 * 1024 + 11,
receive_window: Some(37),
stream_receive_window: Some(37),
});
}
fn run_echo(args: EchoArgs) {
let _guard = subscribe();
let runtime = rt_basic();
let handle = {
// Use small receive windows
let mut transport_config = TransportConfig::default();
if let Some(receive_window) = args.receive_window {
transport_config.receive_window(receive_window).unwrap();
}
if let Some(stream_receive_window) = args.stream_receive_window {
transport_config
.stream_receive_window(stream_receive_window)
.unwrap();
}
transport_config.max_concurrent_bidi_streams(1).unwrap();
transport_config.max_concurrent_uni_streams(1).unwrap();
let transport_config = Arc::new(transport_config);
// We don't use the `endpoint` helper here because we want two different endpoints with
// different addresses.
let mut server_config = ServerConfigBuilder::default();
let cert = rcgen::generate_simple_self_signed(vec!["localhost".into()]).unwrap();
let key = crate::PrivateKey::from_der(&cert.serialize_private_key_der()).unwrap();
let cert = crate::Certificate::from_der(&cert.serialize_der().unwrap()).unwrap();
let cert_chain = crate::CertificateChain::from_certs(vec![cert.clone()]);
server_config.certificate(cert_chain, key).unwrap();
let mut server = Endpoint::builder();
let mut server_config = server_config.build();
server_config.transport = transport_config.clone();
server.listen(server_config);
let server_sock = UdpSocket::bind(args.server_addr).unwrap();
let server_addr = server_sock.local_addr().unwrap();
let (server, mut server_incoming) = {
let _guard = runtime.enter();
server.with_socket(server_sock).unwrap()
};
let mut client_config = ClientConfigBuilder::default();
client_config.add_certificate_authority(cert).unwrap();
client_config.enable_keylog();
let mut client_config = client_config.build();
client_config.transport = transport_config;
let mut client = Endpoint::builder();
client.default_client_config(client_config);
let (client, _) = {
let _guard = runtime.enter();
client.bind(&args.client_addr).unwrap()
};
let handle = runtime.spawn(async move {
let incoming = server_incoming.next().await.unwrap();
// Note for anyone modifying the platform support in this test:
// If `local_ip` gets available on additional platforms - which
// requires modifying this test - please update the list of supported
// platforms in the doc comments of the various `local_ip` functions.
if cfg!(target_os = "linux") {
let local_ip = incoming.local_ip().expect("Local IP must be available");
assert!(local_ip.is_loopback());
} else {
assert_eq!(None, incoming.local_ip());
}
let new_conn = incoming.instrument(info_span!("server")).await.unwrap();
tokio::spawn(
new_conn
.bi_streams
.take_while(|x| future::ready(x.is_ok()))
.for_each(|s| async {
tokio::spawn(echo(s.unwrap()));
}),
);
server.wait_idle().await;
});
info!(
"connecting from {} to {}",
args.client_addr, args.server_addr
);
runtime.block_on(async move {
let new_conn = client
.connect(&server_addr, "localhost")
.unwrap()
.instrument(info_span!("client"))
.await
.expect("connect");
/// This is just an arbitrary number to generate deterministic test data
const SEED: u64 = 0x12345678;
for i in 0..args.nr_streams {
println!("Opening stream {}", i);
let (mut send, recv) = new_conn.connection.open_bi().await.expect("stream open");
let msg = gen_data(args.stream_size, SEED);
let send_task = async {
send.write_all(&msg).await.expect("write");
send.finish().await.expect("finish");
};
let recv_task = async { recv.read_to_end(usize::max_value()).await.expect("read") };
let (_, data) = futures::join!(send_task, recv_task);
assert_eq!(data[..], msg[..], "Data mismatch");
}
new_conn.connection.close(0u32.into(), b"done");
client.wait_idle().await;
});
handle
};
runtime.block_on(handle).unwrap();
}
struct EchoArgs {
client_addr: SocketAddr,
server_addr: SocketAddr,
nr_streams: usize,
stream_size: usize,
receive_window: Option<u64>,
stream_receive_window: Option<u64>,
}
async fn echo((mut send, mut recv): (SendStream, RecvStream)) {
loop {
// These are 32 buffers, for reading approximately 32kB at once
#[rustfmt::skip]
let mut bufs = [
Bytes::new(), Bytes::new(), Bytes::new(), Bytes::new(),
Bytes::new(), Bytes::new(), Bytes::new(), Bytes::new(),
Bytes::new(), Bytes::new(), Bytes::new(), Bytes::new(),
Bytes::new(), Bytes::new(), Bytes::new(), Bytes::new(),
Bytes::new(), Bytes::new(), Bytes::new(), Bytes::new(),
Bytes::new(), Bytes::new(), Bytes::new(), Bytes::new(),
Bytes::new(), Bytes::new(), Bytes::new(), Bytes::new(),
Bytes::new(), Bytes::new(), Bytes::new(), Bytes::new(),
];
match recv.read_chunks(&mut bufs).await.expect("read chunks") {
Some(n) => {
send.write_all_chunks(&mut bufs[..n])
.await
.expect("write chunks");
}
None => break,
}
}
let _ = send.finish().await;
}
fn gen_data(size: usize, seed: u64) -> Vec<u8> {
let mut rng: StdRng = SeedableRng::seed_from_u64(seed);
let mut buf = vec![0; size];
rng.fill_bytes(&mut buf);
buf
}
pub fn subscribe() -> tracing::subscriber::DefaultGuard {
let sub = tracing_subscriber::FmtSubscriber::builder()
.with_env_filter(EnvFilter::from_default_env())
.with_writer(|| TestWriter)
.finish();
tracing::subscriber::set_default(sub)
}
struct TestWriter;
impl std::io::Write for TestWriter {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
print!(
"{}",
str::from_utf8(buf).expect("tried to log invalid UTF-8")
);
Ok(buf.len())
}
fn flush(&mut self) -> io::Result<()> {
io::stdout().flush()
}
}
fn rt_basic() -> Runtime {
Builder::new_current_thread().enable_all().build().unwrap()
}
fn rt_threaded() -> Runtime {
Builder::new_multi_thread().enable_all().build().unwrap()
}
| 32 | 109 | 0.574057 |
fb9210311f0d7655de70df7b9dfa7d25a6d5dbc3 | 297 | use std::path::PathBuf;
use std::{env, fs};
fn main() {
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
println!("cargo:rustc-link-search)={}", out_dir.display());
fs::copy("loader.x", out_dir.join("loader.x")).unwrap();
println!("cargo:rerun-if-changed=loader.x");
}
| 27 | 63 | 0.622896 |
fc594f7dbf3753dbe01ff3d7ccad761318405cf0 | 5,764 | use crate::prelude::*;
use num::{abs, clamp};
macro_rules! impl_shift_fill {
($self:ident, $periods:expr, $fill_value:expr) => {{
let periods = clamp($periods, -($self.len() as i64), $self.len() as i64);
let slice_offset = (-periods).max(0) as i64;
let length = $self.len() - abs(periods) as usize;
let mut slice = $self.slice(slice_offset, length);
let fill_length = abs(periods) as usize;
let mut fill = match $fill_value {
Some(val) => Self::full($self.name(), val, fill_length),
None => Self::full_null($self.name(), fill_length),
};
if periods < 0 {
slice.append(&fill);
slice
} else {
fill.append(&slice);
fill
}
}};
}
impl<T> ChunkShiftFill<T, Option<T::Native>> for ChunkedArray<T>
where
T: PolarsNumericType,
T::Native: Copy,
{
fn shift_and_fill(&self, periods: i64, fill_value: Option<T::Native>) -> ChunkedArray<T> {
impl_shift_fill!(self, periods, fill_value)
}
}
impl<T> ChunkShift<T> for ChunkedArray<T>
where
T: PolarsNumericType,
T::Native: Copy,
{
fn shift(&self, periods: i64) -> ChunkedArray<T> {
self.shift_and_fill(periods, None)
}
}
impl ChunkShiftFill<BooleanType, Option<bool>> for BooleanChunked {
fn shift_and_fill(&self, periods: i64, fill_value: Option<bool>) -> BooleanChunked {
impl_shift_fill!(self, periods, fill_value)
}
}
impl ChunkShift<BooleanType> for BooleanChunked {
fn shift(&self, periods: i64) -> Self {
self.shift_and_fill(periods, None)
}
}
impl ChunkShiftFill<Utf8Type, Option<&str>> for Utf8Chunked {
fn shift_and_fill(&self, periods: i64, fill_value: Option<&str>) -> Utf8Chunked {
impl_shift_fill!(self, periods, fill_value)
}
}
impl ChunkShift<Utf8Type> for Utf8Chunked {
fn shift(&self, periods: i64) -> Self {
self.shift_and_fill(periods, None)
}
}
impl ChunkShiftFill<ListType, Option<&Series>> for ListChunked {
fn shift_and_fill(&self, periods: i64, fill_value: Option<&Series>) -> ListChunked {
// This has its own implementation because a ListChunked cannot have a full-null without
// knowing the inner type
let periods = clamp(periods, -(self.len() as i64), self.len() as i64);
let slice_offset = (-periods).max(0) as i64;
let length = self.len() - abs(periods) as usize;
let mut slice = self.slice(slice_offset, length);
let fill_length = abs(periods) as usize;
let mut fill = match fill_value {
Some(val) => Self::full(self.name(), val, fill_length),
None => {
ListChunked::full_null_with_dtype(self.name(), fill_length, &self.inner_dtype())
}
};
if periods < 0 {
slice.append(&fill);
slice
} else {
fill.append(&slice);
fill
}
}
}
impl ChunkShift<ListType> for ListChunked {
fn shift(&self, periods: i64) -> Self {
self.shift_and_fill(periods, None)
}
}
impl ChunkShift<CategoricalType> for CategoricalChunked {
fn shift(&self, periods: i64) -> Self {
self.cast::<UInt32Type>()
.unwrap()
.shift(periods)
.cast()
.unwrap()
}
}
#[cfg(feature = "object")]
impl<T> ChunkShiftFill<ObjectType<T>, Option<ObjectType<T>>> for ObjectChunked<T> {
fn shift_and_fill(
&self,
_periods: i64,
_fill_value: Option<ObjectType<T>>,
) -> ChunkedArray<ObjectType<T>> {
todo!()
}
}
#[cfg(feature = "object")]
impl<T> ChunkShift<ObjectType<T>> for ObjectChunked<T> {
fn shift(&self, periods: i64) -> Self {
self.shift_and_fill(periods, None)
}
}
#[cfg(test)]
mod test {
use crate::prelude::*;
#[test]
fn test_shift() {
let ca = Int32Chunked::new_from_slice("", &[1, 2, 3]);
// shift by 0, 1, 2, 3, 4
let shifted = ca.shift_and_fill(0, Some(5));
assert_eq!(Vec::from(&shifted), &[Some(1), Some(2), Some(3)]);
let shifted = ca.shift_and_fill(1, Some(5));
assert_eq!(Vec::from(&shifted), &[Some(5), Some(1), Some(2)]);
let shifted = ca.shift_and_fill(2, Some(5));
assert_eq!(Vec::from(&shifted), &[Some(5), Some(5), Some(1)]);
let shifted = ca.shift_and_fill(3, Some(5));
assert_eq!(Vec::from(&shifted), &[Some(5), Some(5), Some(5)]);
let shifted = ca.shift_and_fill(4, Some(5));
assert_eq!(Vec::from(&shifted), &[Some(5), Some(5), Some(5)]);
// shift by -1, -2, -3, -4
let shifted = ca.shift_and_fill(-1, Some(5));
assert_eq!(Vec::from(&shifted), &[Some(2), Some(3), Some(5)]);
let shifted = ca.shift_and_fill(-2, Some(5));
assert_eq!(Vec::from(&shifted), &[Some(3), Some(5), Some(5)]);
let shifted = ca.shift_and_fill(-3, Some(5));
assert_eq!(Vec::from(&shifted), &[Some(5), Some(5), Some(5)]);
let shifted = ca.shift_and_fill(-4, Some(5));
assert_eq!(Vec::from(&shifted), &[Some(5), Some(5), Some(5)]);
// fill with None
let shifted = ca.shift_and_fill(1, None);
assert_eq!(Vec::from(&shifted), &[None, Some(1), Some(2)]);
let shifted = ca.shift_and_fill(10, None);
assert_eq!(Vec::from(&shifted), &[None, None, None]);
let shifted = ca.shift_and_fill(-2, None);
assert_eq!(Vec::from(&shifted), &[Some(3), None, None]);
// string
let s = Series::new("a", ["a", "b", "c"]);
let shifted = s.shift(-1);
assert_eq!(
Vec::from(shifted.utf8().unwrap()),
&[Some("b"), Some("c"), None]
);
}
}
| 32.564972 | 96 | 0.574601 |
90ff23fe5e4336a66a5ca2fe6de189fe3caf18e9 | 3,695 | use core::ops::Deref;
use proc_macro2::{Ident, Span, TokenStream};
use quote::quote;
use syn::parse::ParseStream;
use syn::parse::Parse;
use syn::*;
#[derive(Debug)]
enum AttributeType {
Table,
Unknown,
}
struct NamedField(Field);
impl Deref for NamedField {
type Target = Field;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<Field> for NamedField {
fn from(field: Field) -> Self {
Self(field)
}
}
impl Into<Field> for NamedField {
fn into(self) -> Field {
self.0
}
}
impl Parse for NamedField {
fn parse(input: ParseStream) -> Result<Self> {
Field::parse_named(input).map(NamedField::from)
}
}
#[derive(Debug)]
struct DbDataAttributes {
table: Option<String>,
}
impl From<&Ident> for AttributeType {
fn from(ident: &Ident) -> Self {
let str = ident.to_string();
match str.as_str() {
"table" => AttributeType::Table,
_ => AttributeType::Unknown,
}
}
}
impl DbDataAttributes {
fn default() -> Self {
Self { table: None }
}
}
fn lit_string(lit: &Lit) -> String {
if let Lit::Str(lit_str) = lit {
lit_str.value()
} else {
panic!("Invalid attr syntax {:?}", lit);
}
}
fn parse_orma_attrs(attrs: &[NestedMeta]) -> DbDataAttributes {
let mut ctx = DbDataAttributes::default();
for attr in attrs {
let meta = if let NestedMeta::Meta(meta) = attr {
meta
} else {
panic!("{:?} is not a Meta", attr);
};
if let Meta::NameValue(name_value) = meta {
let attr_type = AttributeType::from(name_value.path.get_ident().unwrap());
if let AttributeType::Table = attr_type {
ctx.table = Some(lit_string(&name_value.lit))
}
};
}
ctx
}
pub fn impl_orma(attrs: &[NestedMeta], input: &mut DeriveInput) -> TokenStream {
let dbdata_attrs = parse_orma_attrs(attrs);
let table_name = if let Some(table_name) = dbdata_attrs.table {
table_name
} else {
panic!("No table name provided, {:?}", attrs);
};
let data = if let Data::Struct(ref mut it) = input.data {
it
} else {
return Error::new(input.ident.span(), "Only structs are supported").to_compile_error();
};
let fields = if let Fields::Named(ref mut it) = data.fields {
it
} else {
return Error::new(input.ident.span(), "Tuple structs are not supported")
.to_compile_error();
};
let skip: Attribute = parse_quote! {#[serde(skip)]};
fields.named.push(Field {
attrs: vec![skip],
vis: parse_quote! {pub},
ident: Some(Ident::new("orma_id", Span::call_site())),
colon_token: None,
ty: parse_quote! {Option<::uuid::Uuid>},
});
let skip: Attribute = parse_quote! {#[serde(skip)]};
fields.named.push(Field {
attrs: vec![skip],
vis: parse_quote! {pub},
ident: Some(Ident::new("orma_version", Span::call_site())),
colon_token: None,
ty: parse_quote! {Option<i32>},
});
let ident = &input.ident;
quote! {
#input
impl ::orma::DbData for #ident {
fn table_name() -> &'static str {
#table_name
}
fn id(&self) -> Option<::uuid::Uuid> {self.orma_id}
fn version(&self) -> Option<i32> {self.orma_version}
fn set_id(&mut self, uuid: ::uuid::Uuid) {
self.orma_id = Some(uuid);
}
fn set_version(&mut self, version: i32) {
self.orma_version = Some(version);
}
}
}
}
| 25.308219 | 95 | 0.555074 |
e535d453438f5a7f4005620a6034bb0c30b1bfa9 | 5,050 | use super::Error;
impl crate::ScalarKind {
pub(super) fn to_hlsl_cast(self) -> &'static str {
match self {
Self::Float => "asfloat",
Self::Sint => "asint",
Self::Uint => "asuint",
Self::Bool => unreachable!(),
}
}
/// Helper function that returns scalar related strings
///
/// <https://docs.microsoft.com/en-us/windows/win32/direct3dhlsl/dx-graphics-hlsl-scalar>
pub(super) fn to_hlsl_str(self, width: crate::Bytes) -> Result<&'static str, Error> {
match self {
Self::Sint => Ok("int"),
Self::Uint => Ok("uint"),
Self::Float => match width {
2 => Ok("half"),
4 => Ok("float"),
8 => Ok("double"),
_ => Err(Error::UnsupportedScalar(self, width)),
},
Self::Bool => Ok("bool"),
}
}
}
impl crate::TypeInner {
pub(super) fn is_matrix(&self) -> bool {
match *self {
Self::Matrix { .. } => true,
_ => false,
}
}
}
impl crate::StorageFormat {
pub(super) fn to_hlsl_str(self) -> &'static str {
match self {
Self::R16Float => "float",
Self::R8Unorm => "unorm float",
Self::R8Snorm => "snorm float",
Self::R8Uint | Self::R16Uint => "uint",
Self::R8Sint | Self::R16Sint => "int",
Self::Rg16Float => "float2",
Self::Rg8Unorm => "unorm float2",
Self::Rg8Snorm => "snorm float2",
Self::Rg8Sint | Self::Rg16Sint => "int2",
Self::Rg8Uint | Self::Rg16Uint => "uint2",
Self::Rg11b10Float => "float3",
Self::Rgba16Float | Self::R32Float | Self::Rg32Float | Self::Rgba32Float => "float4",
Self::Rgba8Unorm | Self::Rgb10a2Unorm => "unorm float4",
Self::Rgba8Snorm => "snorm float4",
Self::Rgba8Uint
| Self::Rgba16Uint
| Self::R32Uint
| Self::Rg32Uint
| Self::Rgba32Uint => "uint4",
Self::Rgba8Sint
| Self::Rgba16Sint
| Self::R32Sint
| Self::Rg32Sint
| Self::Rgba32Sint => "int4",
}
}
}
impl crate::BuiltIn {
pub(super) fn to_hlsl_str(self) -> Result<&'static str, Error> {
Ok(match self {
Self::Position => "SV_Position",
// vertex
Self::ClipDistance => "SV_ClipDistance",
Self::CullDistance => "SV_CullDistance",
Self::InstanceIndex => "SV_InstanceID",
// based on this page https://docs.microsoft.com/en-us/windows/uwp/gaming/glsl-to-hlsl-reference#comparing-opengl-es-20-with-direct3d-11
// No meaning unless you target Direct3D 9
Self::PointSize => "PSIZE",
Self::VertexIndex => "SV_VertexID",
// fragment
Self::FragDepth => "SV_Depth",
Self::FrontFacing => "SV_IsFrontFace",
Self::PrimitiveIndex => "SV_PrimitiveID",
Self::SampleIndex => "SV_SampleIndex",
Self::SampleMask => "SV_Coverage",
// compute
Self::GlobalInvocationId => "SV_DispatchThreadID",
Self::LocalInvocationId => "SV_GroupThreadID",
Self::LocalInvocationIndex => "SV_GroupIndex",
Self::WorkGroupId => "SV_GroupID",
// The specific semantic we use here doesn't matter, because references
// to this field will get replaced with references to `SPECIAL_CBUF_VAR`
// in `Writer::write_expr`.
Self::NumWorkGroups => "SV_GroupID",
Self::BaseInstance | Self::BaseVertex | Self::WorkGroupSize => {
return Err(Error::Unimplemented(format!("builtin {:?}", self)))
}
})
}
}
impl crate::Interpolation {
/// Helper function that returns the string corresponding to the HLSL interpolation qualifier
pub(super) fn to_hlsl_str(self) -> &'static str {
match self {
Self::Perspective => "linear",
Self::Linear => "noperspective",
Self::Flat => "nointerpolation",
}
}
}
impl crate::Sampling {
/// Return the HLSL auxiliary qualifier for the given sampling value.
pub(super) fn to_hlsl_str(self) -> Option<&'static str> {
match self {
Self::Center => None,
Self::Centroid => Some("centroid"),
Self::Sample => Some("sample"),
}
}
}
impl crate::AtomicFunction {
/// Return the HLSL suffix for the `InterlockedXxx` method.
pub(super) fn to_hlsl_suffix(self) -> &'static str {
match self {
Self::Add => "Add",
Self::And => "And",
Self::InclusiveOr => "Or",
Self::ExclusiveOr => "Xor",
Self::Min => "Min",
Self::Max => "Max",
Self::Exchange { compare: None } => "Exchange",
Self::Exchange { .. } => "", //TODO
}
}
}
| 34.353741 | 148 | 0.522376 |
efc20944ded82309653ed842a62caa21350e29d1 | 6,531 | use std::any::{Any, TypeId};
use std::cell::{Ref, RefCell};
use std::fmt::{Debug, Error, Formatter};
use std::ops::Deref;
use crate::atn::INVALID_ALT;
use crate::int_stream::EOF;
use crate::interval_set::Interval;
use crate::parser::Parser;
use crate::parser_rule_context::{BaseParserRuleContext, cast, ParserRuleContext, ParserRuleContextType};
use crate::rule_context::CustomRuleContext;
use crate::token::{OwningToken, Token};
use crate::trees;
//todo try to make in more generic
pub trait Tree: NodeText {
fn get_parent(&self) -> Option<ParserRuleContextType>;
fn has_parent(&self) -> bool;
fn get_payload(&self) -> Box<dyn Any>;
fn get_child(&self, i: usize) -> Option<ParserRuleContextType>;
fn get_child_count(&self) -> usize;
fn get_children(&self) -> Ref<Vec<ParserRuleContextType>>;
fn get_children_full(&self) -> &RefCell<Vec<ParserRuleContextType>>;
}
pub trait ParseTree: Tree {
/// Returns interval in input string which corresponds to this subtree
fn get_source_interval(&self) -> Interval;
/// Return combined text of this AST node.
/// To create resulting string it does traverse whole subtree,
/// also it includes only tokens added to the parse tree
///
/// Since tokens on hidden channels (e.g. whitespace or comments) are not
/// added to the parse trees, they will not appear in the output of this
/// method.
fn get_text(&self) -> String;
/// Print out a whole tree, not just a node, in LISP format
/// (root child1 .. childN). Print just a node if this is a leaf.
/// We have to know the recognizer so we can get rule names.
fn to_string_tree(&self, r: &dyn Parser) -> String {
trees::string_tree(self, r.get_rule_names())
}
}
pub trait NodeText {
fn get_node_text(&self, rule_names: &[&str]) -> String;
}
impl<T: Tree> NodeText for T {
default fn get_node_text(&self, _rule_names: &[&str]) -> String {
"<unknown>".to_owned()
}
}
impl<T: ParserRuleContext> NodeText for T {
default fn get_node_text(&self, rule_names: &[&str]) -> String {
let rule_index = self.get_rule_index();
let rule_name = rule_names[rule_index];
let alt_number = self.get_alt_number();
if alt_number != INVALID_ALT {
return format!("{}:{}", rule_name, alt_number);
}
return rule_name.to_owned();
}
}
//todo unify code for terminal and error nodes
/// AST leaf
pub type TerminalNode = BaseParserRuleContext<TerminalNodeCtx>;
pub struct TerminalNodeCtx {
pub symbol: OwningToken
}
impl CustomRuleContext for TerminalNodeCtx {
fn get_rule_index(&self) -> usize {
unimplemented!()
}
}
impl NodeText for TerminalNode {
fn get_node_text(&self, _rule_names: &[&str]) -> String {
self.symbol.get_text().to_owned()
}
}
impl ParseTree for TerminalNode {
fn get_text(&self) -> String {
self.symbol.text.to_owned()
}
}
/// # Error Leaf
/// Created for each token created or consumed during recovery
pub type ErrorNode = BaseParserRuleContext<ErrorNodeCtx>;
//not type alias because we would like to use it in downcasting
pub struct ErrorNodeCtx(pub TerminalNodeCtx);
impl CustomRuleContext for ErrorNodeCtx {
fn get_rule_index(&self) -> usize {
unimplemented!()
}
}
impl Deref for ErrorNodeCtx {
type Target = TerminalNodeCtx;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl NodeText for ErrorNode {
fn get_node_text(&self, _rule_names: &[&str]) -> String {
self.symbol.get_text().to_owned()
}
}
impl ParseTree for ErrorNode {
fn get_text(&self) -> String {
self.symbol.text.to_owned()
}
}
impl Debug for BaseParserRuleContext<TerminalNodeCtx> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
f.write_str(if self.symbol.get_token_type() == EOF {
"<EOF>"
} else {
self.symbol.get_text()
}
);
Ok(())
}
}
impl Debug for BaseParserRuleContext<ErrorNodeCtx> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
f.write_str(if self.symbol.get_token_type() == EOF {
"<EOF>"
} else {
self.symbol.get_text()
}
);
Ok(())
}
}
//pub trait TerminalNode: ParseTree {
// fn get_symbol(&self) -> &dyn Token;
//}
//pub trait ErrorNode: TerminalNode {
// fn error_node(&self);
//}
//pub trait ParseTreeVisitor {
// fn visit(&self, tree: &ParseTree) -> interface;
// fn visit_children(&self, node: &RuleNode) -> interface;
// fn visit_terminal(&self, node: &TerminalNode) -> interface;
// fn visit_error_node(&self, node: &ErrorNode) -> interface;
//}
//
//pub struct BaseParseTreeVisitor { }
//
//impl BaseParseTreeVisitor {
// fn visit(&self, tree: ParseTree) -> interface { unimplemented!() }
// fn visit_children(&self, node: RuleNode) -> interface { unimplemented!() }
// fn visit_terminal(&self, node: TerminalNode) -> interface { unimplemented!() }
// fn visit_error_node(&self, node: ErrorNode) -> interface { unimplemented!() }
//}
pub trait ParseTreeListener: 'static {
fn visit_terminal(&mut self, _node: &TerminalNode) {}
fn visit_error_node(&mut self, _node: &ErrorNode) {}
fn enter_every_rule(&mut self, _ctx: &dyn ParserRuleContext) {}
fn exit_every_rule(&mut self, _ctx: &dyn ParserRuleContext) {}
}
//impl<T:ParseTreeListener> AsRef<dyn ParseTreeListener> for T{
// fn as_ref(&self) -> &dyn ParseTreeListener {
// self
// }
//}
/// Helper struct to accept parse listener on already generated tree
pub struct ParseTreeWalker;
impl ParseTreeWalker {
// fn new() -> ParseTreeWalker { ParseTreeWalker }
pub fn walk<T: ParseTreeListener + ?Sized, Ctx: ParserRuleContext + ?Sized>(&self, listener: &mut Box<T>, t: &Ctx) {
if t.type_id() == TypeId::of::<ErrorNode>() {
let err = cast::<_, ErrorNode>(t);
listener.visit_error_node(err);
return
}
if t.type_id() == TypeId::of::<TerminalNode>() {
let leaf = cast::<_, TerminalNode>(t);
listener.visit_terminal(leaf);
return
}
listener.enter_every_rule(t.upcast());
t.enter_rule(listener as &mut dyn Any);
for child in t.get_children().iter() {
self.walk(listener, child.deref())
}
t.exit_rule(listener as &mut dyn Any);
listener.exit_every_rule(t.upcast());
}
}
| 29.418919 | 120 | 0.640178 |
62a0ab80eb9b95a81899f02fa36e468e54a8f7ac | 412 | #![no_std]
extern crate scene_graph;
extern crate scene_renderer;
extern crate gl;
extern crate gl_renderer_plugin;
extern crate geometry;
extern crate camera_components;
extern crate sprite_component;
extern crate transform_components;
#[macro_use]
extern crate vector;
extern crate mat3;
extern crate mat4;
extern crate shared;
mod sprite_gl_renderer;
pub use sprite_gl_renderer::SpriteGLRenderer;
| 14.206897 | 45 | 0.81068 |
509d50865357df40648176384cf17df95594f786 | 58,575 | use crate::debugger::ConsoleLog;
use crate::debugger::CoreData;
use crate::DebuggerError;
use crate::{dap_types, rtt::DataFormat};
use anyhow::{anyhow, Result};
use dap_types::*;
use parse_int::parse;
use probe_rs::{
debug::{ColumnType, VariableKind},
CoreStatus, HaltReason, MemoryInterface,
};
use serde::{de::DeserializeOwned, Serialize};
use std::{collections::HashMap, string::ToString};
use std::{
convert::TryInto,
path::{Path, PathBuf},
str, thread,
time::Duration,
};
use crate::protocol::ProtocolAdapter;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum DebugAdapterType {
CommandLine,
DapClient,
}
/// Progress ID used for progress reporting when the debug adapter protocol is used.
type ProgressId = i64;
pub struct DebugAdapter<P: ProtocolAdapter> {
/// Track the last_known_status of the probe.
/// The debug client needs to be notified when the probe changes state,
/// and the only way is to poll the probe status periodically.
/// For instance, when the client sets the probe running,
/// and the probe halts because of a breakpoint, we need to notify the client.
pub(crate) last_known_status: CoreStatus,
pub(crate) halt_after_reset: bool,
/// `scope_map` stores a list of all MS DAP Scopes with a each stack frame's unique id as key.
/// It is cleared by `threads()`, populated by stack_trace(), for later re-use by `scopes()`.
scope_map: HashMap<i64, Vec<Scope>>,
/// `variable_map` stores a list of all MS DAP Variables with a unique per-level reference.
/// It is cleared by `threads()`, populated by stack_trace(), for later nested re-use by `variables()`.
variable_map_key_seq: i64, // Used to create unique values for `self.variable_map` keys.
variable_map: HashMap<i64, Vec<Variable>>,
progress_id: ProgressId,
/// Flag to indicate if the connected client supports progress reporting.
pub(crate) supports_progress_reporting: bool,
adapter: P,
}
impl<P: ProtocolAdapter> DebugAdapter<P> {
pub fn new(adapter: P) -> DebugAdapter<P> {
DebugAdapter {
last_known_status: CoreStatus::Unknown,
halt_after_reset: false,
scope_map: HashMap::new(),
variable_map: HashMap::new(),
variable_map_key_seq: -1,
progress_id: 0,
supports_progress_reporting: false,
adapter,
}
}
pub(crate) fn adapter_type(&self) -> DebugAdapterType {
P::ADAPTER_TYPE
}
pub(crate) fn status(&mut self, core_data: &mut CoreData, request: Request) -> Result<()> {
let status = match core_data.target_core.status() {
Ok(status) => {
self.last_known_status = status;
status
}
Err(error) => {
return self.send_response::<()>(
request,
Err(DebuggerError::Other(anyhow!(
"Could not read core status. {:?}",
error
))),
)
}
};
if status.is_halted() {
let pc = core_data
.target_core
.read_core_reg(core_data.target_core.registers().program_counter());
match pc {
Ok(pc) => self.send_response(
request,
Ok(Some(format!(
"Status: {:?} at address {:#010x}",
status.short_long_status().1,
pc
))),
),
Err(error) => self
.send_response::<()>(request, Err(DebuggerError::Other(anyhow!("{}", error)))),
}
} else {
self.send_response(request, Ok(Some(status.short_long_status().1.to_string())))
}
}
pub(crate) fn pause(&mut self, core_data: &mut CoreData, request: Request) -> Result<()> {
// let args: PauseArguments = get_arguments(&request)?;
match core_data.target_core.halt(Duration::from_millis(500)) {
Ok(cpu_info) => {
let event_body = Some(StoppedEventBody {
reason: "pause".to_owned(),
description: Some(self.last_known_status.short_long_status().1.to_owned()),
thread_id: Some(core_data.target_core.id() as i64),
preserve_focus_hint: Some(false),
text: None,
all_threads_stopped: Some(true),
hit_breakpoint_ids: None,
});
self.send_event("stopped", event_body)?;
self.send_response(
request,
Ok(Some(format!(
"Core stopped at address 0x{:08x}",
cpu_info.pc
))),
)?;
self.last_known_status = CoreStatus::Halted(HaltReason::Request);
Ok(())
}
Err(error) => {
self.send_response::<()>(request, Err(DebuggerError::Other(anyhow!("{}", error))))
}
}
// TODO: This is from original probe_rs_cli 'halt' function ... disasm code at memory location
/*
let mut code = [0u8; 16 * 2];
core_data.target_core.read(cpu_info.pc, &mut code)?;
let instructions = core_data
.capstone
.disasm_all(&code, u64::from(cpu_info.pc))
.unwrap();
for i in instructions.iter() {
println!("{}", i);
}
for (offset, instruction) in code.iter().enumerate() {
println!(
"{:#010x}: {:010x}",
cpu_info.pc + offset as u32,
instruction
);
}
*/
}
pub(crate) fn read_memory(&mut self, core_data: &mut CoreData, request: Request) -> Result<()> {
let arguments: ReadMemoryArguments = match self.adapter_type() {
DebugAdapterType::CommandLine => match request.arguments.as_ref().unwrap().try_into() {
Ok(arguments) => arguments,
Err(error) => return self.send_response::<()>(request, Err(error)),
},
DebugAdapterType::DapClient => match get_arguments(&request) {
Ok(arguments) => arguments,
Err(error) => return self.send_response::<()>(request, Err(error)),
},
};
let address: u32 = parse(arguments.memory_reference.as_ref()).unwrap();
let num_words = arguments.count as usize;
let mut buff = vec![0u32; num_words];
if num_words > 1 {
core_data.target_core.read_32(address, &mut buff).unwrap();
} else {
buff[0] = core_data.target_core.read_word_32(address).unwrap();
}
if !buff.is_empty() {
let mut response = "".to_string();
for (offset, word) in buff.iter().enumerate() {
response.push_str(
format!("0x{:08x} = 0x{:08x}\n", address + (offset * 4) as u32, word).as_str(),
);
}
self.send_response::<String>(request, Ok(Some(response)))
} else {
self.send_response::<()>(
request,
Err(DebuggerError::Other(anyhow!(
"Could not read any data at address 0x{:08x}",
address
))),
)
}
}
pub(crate) fn write(&mut self, core_data: &mut CoreData, request: Request) -> Result<()> {
let address = match get_int_argument(request.arguments.as_ref(), "address", 0) {
Ok(address) => address,
Err(error) => return self.send_response::<()>(request, Err(error)),
};
let data = match get_int_argument(request.arguments.as_ref(), "data", 1) {
Ok(data) => data,
Err(error) => return self.send_response::<()>(request, Err(error)),
};
match core_data
.target_core
.write_word_32(address, data)
.map_err(DebuggerError::ProbeRs)
{
Ok(_) => Ok(()),
Err(error) => self.send_response::<()>(request, Err(error)),
}
}
pub(crate) fn set_breakpoint(
&mut self,
core_data: &mut CoreData,
request: Request,
) -> Result<()> {
let address = match get_int_argument(request.arguments.as_ref(), "address", 0) {
Ok(address) => address,
Err(error) => return self.send_response::<()>(request, Err(error)),
};
match core_data
.target_core
.set_hw_breakpoint(address)
.map_err(DebuggerError::ProbeRs)
{
Ok(_) => {
return self.send_response(
request,
Ok(Some(format!(
"Set new breakpoint at address {:#08x}",
address
))),
);
}
Err(error) => self.send_response::<()>(request, Err(error)),
}
}
pub(crate) fn clear_breakpoint(
&mut self,
core_data: &mut CoreData,
request: Request,
) -> Result<()> {
let address = match get_int_argument(request.arguments.as_ref(), "address", 0) {
Ok(address) => address,
Err(error) => return self.send_response::<()>(request, Err(error)),
};
match core_data
.target_core
.clear_hw_breakpoint(address)
.map_err(DebuggerError::ProbeRs)
{
Ok(_) => Ok(()),
Err(error) => self.send_response::<()>(request, Err(error)),
}
}
pub(crate) fn show_cpu_register_values(
&mut self,
_core_data: &mut CoreData,
_request: &Request,
) -> Result<()> {
todo!();
// let register_file = core_data.target_core.registers();
// for register in register_file.registers() {
// let value = match core_data.target_core.read_core_reg(register) {
// Ok(value) => {
// println!("{}: {:#010x}", register.name(), value);
// }
// Err(error) => return Err(DebuggerError::Other(anyhow!("{}", error))),
// };
// }
// true
}
pub(crate) fn dump_cpu_state(
&mut self,
_core_data: &mut CoreData,
_requestt: &Request,
) -> Result<()> {
todo!();
// dump all relevant data, stack and regs for now..
//
// stack beginning -> assume beginning to be hardcoded
// let stack_top: u32 = 0x2000_0000 + 0x4000;
// let regs = core_data.target_core.registers();
// let stack_bot: u32 = core_data.target_core.read_core_reg(regs.stack_pointer())?;
// let pc: u32 = core_data
// .target_core
// .read_core_reg(regs.program_counter())?;
// let mut stack = vec![0u8; (stack_top - stack_bot) as usize];
// core_data.target_core.read(stack_bot, &mut stack[..])?;
// let mut dump = Dump::new(stack_bot, stack);
// for i in 0..12 {
// dump.regs[i as usize] = core_data
// .target_core
// .read_core_reg(Into::<CoreRegisterAddress>::into(i))?;
// }
// dump.regs[13] = stack_bot;
// dump.regs[14] = core_data.target_core.read_core_reg(regs.return_address())?;
// dump.regs[15] = pc;
// let serialized = ron::ser::to_string(&dump).expect("Failed to serialize dump");
// let mut dump_file = File::create("dump.txt").expect("Failed to create file");
// dump_file
// .write_all(serialized.as_bytes())
// .expect("Failed to write dump file");
// true
}
pub(crate) fn restart(
&mut self,
core_data: &mut CoreData,
request: Option<Request>,
) -> Result<()> {
match core_data.target_core.halt(Duration::from_millis(500)) {
Ok(_) => {}
Err(error) => {
if let Some(request) = request {
return self.send_response::<()>(
request,
Err(DebuggerError::Other(anyhow!("{}", error))),
);
} else {
return self.send_error_response(&DebuggerError::Other(anyhow!("{}", error)));
}
}
}
if request.is_some() || self.adapter_type() == DebugAdapterType::CommandLine {
match core_data.target_core.reset() {
Ok(_) => {
self.last_known_status = CoreStatus::Running;
let event_body = Some(ContinuedEventBody {
all_threads_continued: Some(true),
thread_id: core_data.target_core.id() as i64,
});
self.send_event("continued", event_body)
}
Err(error) => {
return self.send_response::<()>(
request.unwrap(), // Checked above
Err(DebuggerError::Other(anyhow!("{}", error))),
);
}
}
} else if self.halt_after_reset || self.adapter_type() == DebugAdapterType::DapClient
// The DAP Client will always do a `reset_and_halt`, and then will consider `halt_after_reset` value after the `configuration_done` request.
// Otherwise the probe will run past the `main()` before the DAP Client has had a chance to set breakpoints in `main()`.
{
match core_data
.target_core
.reset_and_halt(Duration::from_millis(500))
{
Ok(_) => {
match self.adapter_type() {
DebugAdapterType::CommandLine => {}
DebugAdapterType::DapClient => {
if let Some(request) = request {
return self.send_response::<()>(request, Ok(None));
}
}
}
// Only notify the DAP client if we are NOT in initialization stage (`CoreStatus::Unknown`).
if self.last_known_status != CoreStatus::Unknown {
let event_body = Some(StoppedEventBody {
reason: "reset".to_owned(),
description: Some(
CoreStatus::Halted(HaltReason::External)
.short_long_status()
.1
.to_string(),
),
thread_id: Some(core_data.target_core.id() as i64),
preserve_focus_hint: None,
text: None,
all_threads_stopped: Some(true),
hit_breakpoint_ids: None,
});
self.send_event("stopped", event_body)?;
self.last_known_status = CoreStatus::Halted(HaltReason::External);
}
Ok(())
}
Err(error) => {
if let Some(request) = request {
return self.send_response::<()>(
request,
Err(DebuggerError::Other(anyhow!("{}", error))),
);
} else {
return self
.send_error_response(&DebuggerError::Other(anyhow!("{}", error)));
}
}
}
} else {
Ok(())
}
}
pub(crate) fn configuration_done(
&mut self,
core_data: &mut CoreData,
request: Request,
) -> Result<()> {
// Make sure the DAP Client and the DAP Server are in sync with the status of the core.
match core_data.target_core.status() {
Ok(core_status) => {
self.last_known_status = core_status;
if core_status.is_halted() {
if self.halt_after_reset
|| core_status == CoreStatus::Halted(HaltReason::Breakpoint)
{
self.send_response::<()>(request, Ok(None))?;
let event_body = Some(StoppedEventBody {
reason: core_status.short_long_status().0.to_owned(),
description: Some(core_status.short_long_status().1.to_string()),
thread_id: Some(core_data.target_core.id() as i64),
preserve_focus_hint: None,
text: None,
all_threads_stopped: Some(true),
hit_breakpoint_ids: None,
});
self.send_event("stopped", event_body)
} else {
self.r#continue(core_data, request)
}
} else {
self.send_response::<()>(request, Ok(None))
}
}
Err(error) => {
self.send_response::<()>(
request,
Err(DebuggerError::Other(anyhow!(
"Could not read core status to synchronize the client and the probe. {:?}",
error
))),
)?;
Err(anyhow!("Failed to get core status."))
}
}
}
pub(crate) fn threads(&mut self, core_data: &mut CoreData, request: Request) -> Result<()> {
// TODO: Implement actual thread resolution. For now, we just use the core id as the thread id.
let single_thread = Thread {
id: core_data.target_core.id() as i64,
name: core_data.target_name.clone(),
};
let threads = vec![single_thread];
self.scope_map.clear();
self.variable_map.clear();
self.variable_map_key_seq = -1;
self.send_response(request, Ok(Some(ThreadsResponseBody { threads })))
}
pub(crate) fn set_breakpoints(
&mut self,
core_data: &mut CoreData,
request: Request,
) -> Result<()> {
let args: SetBreakpointsArguments = match get_arguments(&request) {
Ok(arguments) => arguments,
Err(error) => {
return self.send_response::<()>(
request,
Err(DebuggerError::Other(anyhow!(
"Could not read arguments : {}",
error
))),
)
}
};
let mut created_breakpoints: Vec<Breakpoint> = Vec::new(); // For returning in the Response
let source_path = args.source.path.as_ref().map(Path::new);
// Always clear existing breakpoints before setting new ones. The DAP Specification doesn't make allowances for deleting and setting individual breakpoints.
match core_data.target_core.clear_all_hw_breakpoints() {
Ok(_) => {}
Err(error) => {
return self.send_response::<()>(
request,
Err(DebuggerError::Other(anyhow!(
"Failed to clear existing breakpoints before setting new ones : {}",
error
))),
)
}
}
if let Some(requested_breakpoints) = args.breakpoints.as_ref() {
for bp in requested_breakpoints {
// Try to find source code location
let source_location: Option<u64> = core_data.debug_info.as_ref().and_then(|di| {
di.get_breakpoint_location(
source_path.unwrap(),
bp.line as u64,
bp.column.map(|c| c as u64),
)
.unwrap_or(None)
});
if let Some(location) = source_location {
let (verified, reason_msg) =
match core_data.target_core.set_hw_breakpoint(location as u32) {
Ok(_) => (
true,
Some(format!("Breakpoint at memory address: 0x{:08x}", location)),
),
Err(err) => {
let message = format!(
"WARNING: Could not set breakpoint at memory address: 0x{:08x}: {}",
location, err
)
.to_string();
// In addition to sending the error to the 'Hover' message, also write it to the Debug Console Log.
self.log_to_console(format!("WARNING: {}", message));
self.show_message(MessageSeverity::Warning, message.clone());
(false, Some(message))
}
};
created_breakpoints.push(Breakpoint {
column: bp.column,
end_column: None,
end_line: None,
id: None,
line: Some(bp.line),
message: reason_msg,
source: None,
instruction_reference: Some(location.to_string()),
offset: None,
verified,
});
} else {
let message = "No source location for breakpoint. Try reducing `opt-level` in `Cargo.toml` ".to_string();
// In addition to sending the error to the 'Hover' message, also write it to the Debug Console Log.
self.log_to_console(format!("WARNING: {}", message));
self.show_message(MessageSeverity::Warning, message.clone());
created_breakpoints.push(Breakpoint {
column: bp.column,
end_column: None,
end_line: None,
id: None,
line: Some(bp.line),
message: Some(message),
source: None,
instruction_reference: None,
offset: None,
verified: false,
});
}
}
}
let breakpoint_body = SetBreakpointsResponseBody {
breakpoints: created_breakpoints,
};
self.send_response(request, Ok(Some(breakpoint_body)))
}
pub(crate) fn stack_trace(&mut self, core_data: &mut CoreData, request: Request) -> Result<()> {
let _status = match core_data.target_core.status() {
Ok(status) => {
if !status.is_halted() {
return self.send_response::<()>(
request,
Err(DebuggerError::Other(anyhow!(
"Core must be halted before requesting a stack trace"
))),
);
}
}
Err(error) => {
return self.send_response::<()>(request, Err(DebuggerError::ProbeRs(error)))
}
};
let regs = core_data.target_core.registers();
let pc = match core_data.target_core.read_core_reg(regs.program_counter()) {
Ok(pc) => pc,
Err(error) => {
return self.send_response::<()>(request, Err(DebuggerError::ProbeRs(error)))
}
};
let _arguments: StackTraceArguments = match self.adapter_type() {
DebugAdapterType::CommandLine => StackTraceArguments {
format: None,
levels: None,
start_frame: None,
thread_id: core_data.target_core.id() as i64,
},
DebugAdapterType::DapClient => match get_arguments(&request) {
Ok(arguments) => arguments,
Err(error) => {
return self.send_response::<()>(
request,
Err(DebuggerError::Other(anyhow!(
"Could not read arguments : {}",
error
))),
)
}
},
};
if let Some(debug_info) = core_data.debug_info.as_ref() {
// Evaluate the static scoped variables.
let static_variables =
match debug_info.get_stack_statics(&mut core_data.target_core, u64::from(pc)) {
Ok(static_variables) => static_variables,
Err(err) => {
let mut error_variable = probe_rs::debug::Variable::new();
error_variable.name = "ERROR".to_string();
error_variable
.set_value(format!("Failed to retrieve static variables: {:?}", err));
vec![error_variable]
}
};
// Store the static variables for later calls to `variables()` to retrieve.
let (static_scope_reference, named_static_variables_cnt, indexed_static_variables_cnt) =
self.create_variable_map(&static_variables);
let current_stackframes =
debug_info.try_unwind(&mut core_data.target_core, u64::from(pc));
match self.adapter_type() {
DebugAdapterType::CommandLine => {
let mut body = "".to_string();
// TODO: Update the code to include static variables.
for frame in current_stackframes {
body.push_str(format!("{}\n", frame).as_str());
}
self.send_response(request, Ok(Some(body)))
}
DebugAdapterType::DapClient => {
let mut frame_list: Vec<StackFrame> = current_stackframes
.map(|frame| {
let column = frame
.source_location
.as_ref()
.and_then(|sl| sl.column)
.map(|col| match col {
ColumnType::LeftEdge => 0,
ColumnType::Column(c) => c,
})
.unwrap_or(0);
let source = if let Some(source_location) = &frame.source_location {
let path: Option<PathBuf> =
source_location.directory.as_ref().map(|path| {
let mut path = if path.is_relative() {
std::env::current_dir().unwrap().join(path)
} else {
path.to_owned()
};
if let Some(file) = &source_location.file {
path.push(file);
}
path
});
Some(Source {
name: source_location.file.clone(),
path: path.map(|p| p.to_string_lossy().to_string()),
source_reference: None,
presentation_hint: None,
origin: None,
sources: None,
adapter_data: None,
checksums: None,
})
} else {
log::debug!("No source location present for frame!");
None
};
let line = frame
.source_location
.as_ref()
.and_then(|sl| sl.line)
.unwrap_or(0) as i64;
// MS DAP requests happen in the order Threads -> StackFrames -> Scopes -> Variables (recursive).
// We build & extract all the info during this `stack_trace()` method, and re-use it when MS DAP requests come in.
let mut scopes = vec![];
// Build the locals scope.
// Extract all the variables from the `StackFrame` for later MS DAP calls to retrieve.
let (variables_reference, named_variables_cnt, indexed_variables_cnt) =
self.create_variable_map(&frame.variables);
scopes.push(Scope {
line: Some(line),
column: frame.source_location.as_ref().and_then(|l| {
l.column.map(|c| match c {
ColumnType::LeftEdge => 0,
ColumnType::Column(c) => c as i64,
})
}),
end_column: None,
end_line: None,
expensive: false,
indexed_variables: Some(indexed_variables_cnt),
name: "Locals".to_string(),
presentation_hint: Some("locals".to_string()),
named_variables: Some(named_variables_cnt),
source: source.clone(),
variables_reference,
});
// The static variables are mapped and stored before iterating the frames. Store a reference to them here.
scopes.push(Scope {
line: None,
column: None,
end_column: None,
end_line: None,
expensive: true, // VSCode won't open this tree by default.
indexed_variables: Some(indexed_static_variables_cnt),
name: "Static".to_string(),
presentation_hint: Some("statics".to_string()),
named_variables: Some(named_static_variables_cnt),
source: None,
variables_reference: if indexed_static_variables_cnt
+ named_variables_cnt
== 0
{
0
} else {
static_scope_reference
},
});
// Build the registers scope and add its variables.
// TODO: Consider expanding beyond core registers to add other architectue registers.
let register_scope_reference = self.new_variable_map_key();
// TODO: This is ARM specific, but should be generalized
let variables: Vec<Variable> = frame
.registers
.registers()
.map(|(register_number, value)| Variable {
name: match register_number {
7 => "R7: THUMB Frame Pointer".to_owned(),
11 => "R11: ARM Frame Pointer".to_owned(),
13 => "SP".to_owned(),
14 => "LR".to_owned(),
15 => "PC".to_owned(),
other => format!("R{}", other),
},
value: format!("0x{:08x}", value),
type_: Some("Core Register".to_owned()),
presentation_hint: None,
evaluate_name: None,
variables_reference: 0,
named_variables: None,
indexed_variables: None,
memory_reference: None,
})
.collect();
let register_count = variables.len();
self.variable_map
.insert(register_scope_reference, variables);
scopes.push(Scope {
line: None,
column: None,
end_column: None,
end_line: None,
expensive: true, // VSCode won't open this tree by default.
indexed_variables: Some(0),
name: "Registers".to_string(),
presentation_hint: Some("registers".to_string()),
named_variables: Some(register_count as i64),
source: None,
variables_reference: if register_count > 0 {
register_scope_reference
} else {
0
},
});
// Finally, store the scopes for this frame.
self.scope_map.insert(frame.id as i64, scopes);
// TODO: Can we add more meaningful info to `module_id`, etc.
StackFrame {
id: frame.id as i64,
name: frame.function_name.clone(),
source,
line,
column: column as i64,
end_column: None,
end_line: None,
module_id: None,
presentation_hint: Some("normal".to_owned()),
can_restart: Some(false),
instruction_pointer_reference: Some(format!("0x{:08x}", frame.pc)),
}
})
.collect();
// If we get an empty stack frame list,
// add a frame so that something is visible in the
// debugger.
if frame_list.is_empty() {
frame_list.push(StackFrame {
can_restart: None,
column: 0,
end_column: None,
end_line: None,
id: pc as i64,
instruction_pointer_reference: None,
line: 0,
module_id: None,
name: format!("<unknown function @ {:#010x}>", pc),
presentation_hint: None,
source: None,
})
}
let frame_len = frame_list.len();
let body = StackTraceResponseBody {
stack_frames: frame_list,
total_frames: Some(frame_len as i64),
};
self.send_response(request, Ok(Some(body)))
}
}
} else {
// No debug information, so we cannot send stack trace information
self.send_response::<()>(
request,
Err(DebuggerError::Other(anyhow!("No debug information found!"))),
)
}
}
/// Retrieve available scopes
/// - local scope : Variables defined between start of current frame, and the current pc (program counter)
/// - static scope : Variables with `static` modifier
/// - registers : Currently supports core registers 0-15
pub(crate) fn scopes(&mut self, _core_data: &mut CoreData, request: Request) -> Result<()> {
let arguments: ScopesArguments = match get_arguments(&request) {
Ok(arguments) => arguments,
Err(error) => return self.send_response::<()>(request, Err(error)),
};
match self.scope_map.clone().get(&(arguments.frame_id)) {
Some(dap_scopes) => self.send_response(
request,
Ok(Some(ScopesResponseBody {
scopes: dap_scopes.clone(),
})),
),
None => self.send_response::<()>(
request,
Err(DebuggerError::Other(anyhow!(
"No variable information available"
))),
),
}
}
pub(crate) fn source(&mut self, _core_data: &mut CoreData, request: Request) -> Result<()> {
let arguments: SourceArguments = match get_arguments(&request) {
Ok(arguments) => arguments,
Err(error) => return self.send_response::<()>(request, Err(error)),
};
let result = if let Some(path) = arguments.source.and_then(|s| s.path) {
let mut source_path = PathBuf::from(path);
if source_path.is_relative() {
source_path = std::env::current_dir().unwrap().join(source_path);
}
match std::fs::read_to_string(&source_path) {
Ok(source_code) => Ok(Some(SourceResponseBody {
content: source_code,
mime_type: None,
})),
Err(error) => {
return self.send_response::<()>(
request,
Err(DebuggerError::ReadSourceError {
source_file_name: (&source_path.to_string_lossy()).to_string(),
original_error: error,
}),
)
}
}
} else {
return self.send_response::<()>(
request,
Err(DebuggerError::Other(anyhow!("Unable to open resource"))),
);
};
self.send_response(request, result)
}
pub(crate) fn variables(&mut self, _core_data: &mut CoreData, request: Request) -> Result<()> {
let arguments: VariablesArguments = match get_arguments(&request) {
Ok(arguments) => arguments,
Err(error) => return self.send_response::<()>(request, Err(error)),
};
return self.send_response(
request,
match self
.variable_map
.clone()
.get(&(arguments.variables_reference))
{
Some(dap_variables) => {
match arguments.filter {
Some(filter) => {
match filter.as_str() {
// TODO: Use `probe_rs::Variables` for the `variable_map`, and then transform them here before serving them up.
// That way we can actually track indexed versus named variables (The DAP protocol doesn't have Variable fields to do so).
"indexed" => Ok(Some(VariablesResponseBody {
variables: dap_variables.clone(),
})),
"named" => Ok(Some(VariablesResponseBody {
variables: dap_variables.clone(),
})),
other => Err(DebuggerError::Other(anyhow!(
"ERROR: Received invalid variable filter: {}",
other
))),
}
}
None => Ok(Some(VariablesResponseBody {
variables: dap_variables.clone(),
})),
}
}
None => Err(DebuggerError::Other(anyhow!(
"No variable information found!"
))),
},
);
}
pub(crate) fn r#continue(&mut self, core_data: &mut CoreData, request: Request) -> Result<()> {
match core_data.target_core.run() {
Ok(_) => {
self.last_known_status = core_data
.target_core
.status()
.unwrap_or(CoreStatus::Unknown);
match self.adapter_type() {
DebugAdapterType::CommandLine => self.send_response(
request,
Ok(Some(self.last_known_status.short_long_status().1)),
),
DebugAdapterType::DapClient => {
self.send_response(
request,
Ok(Some(ContinueResponseBody {
all_threads_continued: if self.last_known_status
== CoreStatus::Running
{
Some(true)
} else {
Some(false)
},
})),
)?;
// We have to consider the fact that sometimes the `run()` is successfull,
// but "immediately" after the MCU hits a breakpoint or exception.
// So we have to check the status again to be sure.
thread::sleep(Duration::from_millis(100)); // Small delay to make sure the MCU hits user breakpoints early in `main()`.
let core_status = match core_data.target_core.status() {
Ok(new_status) => match new_status {
CoreStatus::Halted(_) => {
let event_body = Some(StoppedEventBody {
reason: new_status.short_long_status().0.to_owned(),
description: Some(
new_status.short_long_status().1.to_string(),
),
thread_id: Some(core_data.target_core.id() as i64),
preserve_focus_hint: None,
text: None,
all_threads_stopped: Some(true),
hit_breakpoint_ids: None,
});
self.send_event("stopped", event_body)?;
new_status
}
other => other,
},
Err(_) => CoreStatus::Unknown,
};
self.last_known_status = core_status;
Ok(())
}
}
}
Err(error) => {
self.last_known_status = CoreStatus::Halted(HaltReason::Unknown);
self.send_response::<()>(request, Err(DebuggerError::Other(anyhow!("{}", error))))?;
Err(error.into())
}
}
}
/// Steps at 'instruction' granularity ONLY.
pub(crate) fn next(&mut self, core_data: &mut CoreData, request: Request) -> Result<()> {
// TODO: Implement 'statement' granularity, then update DAP `Capabilities` and read `NextArguments`.
// let args: NextArguments = get_arguments(&request)?;
match core_data.target_core.step() {
Ok(cpu_info) => {
let new_status = match core_data.target_core.status() {
Ok(new_status) => new_status,
Err(error) => {
self.send_response::<()>(request, Err(DebuggerError::ProbeRs(error)))?;
return Err(anyhow!("Failed to retrieve core status"));
}
};
self.last_known_status = new_status;
self.send_response::<()>(request, Ok(None))?;
let event_body = Some(StoppedEventBody {
reason: "step".to_owned(),
description: Some(format!(
"{} at address 0x{:08x}",
new_status.short_long_status().1,
cpu_info.pc
)),
thread_id: Some(core_data.target_core.id() as i64),
preserve_focus_hint: None,
text: None,
all_threads_stopped: Some(true),
hit_breakpoint_ids: None,
});
self.send_event("stopped", event_body)
}
Err(error) => {
self.send_response::<()>(request, Err(DebuggerError::Other(anyhow!("{}", error))))
}
}
}
/// return a newly allocated id for a register scope reference
fn new_variable_map_key(&mut self) -> i64 {
self.variable_map_key_seq += 1;
self.variable_map_key_seq
}
/// recurse through each variable and add children with parent reference to self.variables_map
/// returns a tuple containing the parent's (variables_map_key, named_child_variables_cnt, indexed_child_variables_cnt)
fn create_variable_map(&mut self, variables: &[probe_rs::debug::Variable]) -> (i64, i64, i64) {
let mut named_child_variables_cnt = 0;
let mut indexed_child_variables_cnt = 0;
let dap_variables: Vec<Variable> = variables
.iter()
.map(|variable| {
// TODO: The DAP Protocol doesn't seem to have an easy way to indicate if a variable is `Named` or `Indexed`.
// Figure out what needs to be done to improve this.
if variable.kind == VariableKind::Indexed {
indexed_child_variables_cnt += 1;
} else {
named_child_variables_cnt += 1;
}
let (variables_reference, named_variables_cnt, indexed_variables_cnt) =
match &variable.children {
Some(children) => self.create_variable_map(children),
None => (0, 0, 0),
};
Variable {
name: variable.name.clone(),
value: variable.get_value(),
type_: Some(variable.type_name.clone()),
presentation_hint: None,
evaluate_name: None,
variables_reference,
named_variables: Some(named_variables_cnt),
indexed_variables: Some(indexed_variables_cnt),
memory_reference: Some(format!("0x{:08x}", variable.memory_location)),
}
})
.collect();
if named_child_variables_cnt > 0 || indexed_child_variables_cnt > 0 {
let variable_map_key = self.new_variable_map_key();
match self.variable_map.insert(variable_map_key, dap_variables) {
Some(_) => {
log::warn!("Failed to create a unique `variable_map_key`. Variables shown in this frame may be incomplete or corrupted. Please report this as a bug!");
(0, 0, 0)
}
None => (
variable_map_key,
named_child_variables_cnt,
indexed_child_variables_cnt,
),
}
} else {
// Returning 0's allows VSCode DAP Client to behave correctly for frames that have no variables, and variables that have no children.
(0, 0, 0)
}
}
/// Returns one of the standard DAP Requests if all goes well, or a "error" request, which should indicate that the calling function should return.
/// When preparing to return an "error" request, we will send a Response containing the DebuggerError encountered.
pub fn listen_for_request(&mut self) -> anyhow::Result<Option<Request>> {
self.adapter.listen_for_request()
}
/// Sends either the success response or an error response if passed a
/// DebuggerError. For the DAP Client, it forwards the response, while for
/// the CLI, it will print the body for success, or the message for
/// failure.
pub fn send_response<S: Serialize>(
&mut self,
request: Request,
response: Result<Option<S>, DebuggerError>,
) -> Result<()> {
self.adapter.send_response(request, response)
}
pub fn send_error_response(&mut self, response: &DebuggerError) -> Result<()> {
if self
.adapter
.show_message(MessageSeverity::Error, response.to_string())
{
Ok(())
} else {
Err(anyhow!("Failed to send error response"))
}
}
pub fn send_event<S: Serialize>(
&mut self,
event_type: &str,
event_body: Option<S>,
) -> Result<()> {
self.adapter.send_event(event_type, event_body)
}
pub fn log_to_console<S: Into<String>>(&mut self, message: S) -> bool {
self.adapter.log_to_console(message)
/*
if self.adapter_type == DebugAdapterType::DapClient {
let event_body = match serde_json::to_value(OutputEventBody {
output: format!("{}\n", message.into()),
category: Some("console".to_owned()),
variables_reference: None,
source: None,
line: None,
column: None,
data: None,
group: Some("probe-rs-debug".to_owned()),
}) {
Ok(event_body) => event_body,
Err(_) => {
return false;
}
};
self.send_event("output", Some(event_body))
} else {
println!("{}", message.into());
true
}
*/
}
/// Send a custom "probe-rs-show-message" event to the MS DAP Client.
/// The `severity` field can be one of `information`, `warning`, or `error`.
pub fn show_message(&mut self, severity: MessageSeverity, message: impl Into<String>) -> bool {
self.adapter.show_message(severity, message)
}
/// Send a custom `probe-rs-rtt-channel-config` event to the MS DAP Client, to create a window for a specific RTT channel.
pub fn rtt_window(
&mut self,
channel_number: usize,
channel_name: String,
data_format: DataFormat,
) -> bool {
if self.adapter_type() == DebugAdapterType::DapClient {
let event_body = match serde_json::to_value(RttChannelEventBody {
channel_number,
channel_name,
data_format,
}) {
Ok(event_body) => event_body,
Err(_) => {
return false;
}
};
self.send_event("probe-rs-rtt-channel-config", Some(event_body))
.is_ok()
} else {
true
}
}
/// Send a custom `probe-rs-rtt-data` event to the MS DAP Client, to
pub fn rtt_output(&mut self, channel_number: usize, rtt_data: String) -> bool {
if self.adapter_type() == DebugAdapterType::DapClient {
let event_body = match serde_json::to_value(RttDataEventBody {
channel_number,
data: rtt_data,
}) {
Ok(event_body) => event_body,
Err(_) => {
return false;
}
};
self.send_event("probe-rs-rtt-data", Some(event_body))
.is_ok()
} else {
println!("RTT Channel {}: {}", channel_number, rtt_data);
true
}
}
fn new_progress_id(&mut self) -> ProgressId {
let id = self.progress_id;
self.progress_id += 1;
id
}
pub fn start_progress(&mut self, title: &str, request_id: Option<i64>) -> Result<ProgressId> {
anyhow::ensure!(
self.supports_progress_reporting,
"Progress reporting is not supported by client."
);
let progress_id = self.new_progress_id();
self.send_event(
"progressStart",
Some(ProgressStartEventBody {
cancellable: Some(false),
message: None,
percentage: None,
progress_id: progress_id.to_string(),
request_id,
title: title.to_owned(),
}),
)?;
Ok(progress_id)
}
pub fn end_progress(&mut self, progress_id: ProgressId) -> Result<()> {
anyhow::ensure!(
self.supports_progress_reporting,
"Progress reporting is not supported by client."
);
self.send_event(
"progressEnd",
Some(ProgressEndEventBody {
message: None,
progress_id: progress_id.to_string(),
}),
)
}
/// Update the progress report in VSCode.
/// The progress has the range [0..1].
pub fn update_progress(
&mut self,
progress: f64,
message: Option<impl Into<String>>,
progress_id: i64,
) -> Result<ProgressId> {
anyhow::ensure!(
self.supports_progress_reporting,
"Progress reporting is not supported by client."
);
let _ok = self.send_event(
"progressUpdate",
Some(ProgressUpdateEventBody {
message: message.map(|v| v.into()),
percentage: Some(progress * 100.0),
progress_id: progress_id.to_string(),
}),
)?;
Ok(progress_id)
}
pub(crate) fn set_console_log_level(&mut self, error: ConsoleLog) {
self.adapter.set_console_log_level(error)
}
}
/// Provides halt functionality that is re-used elsewhere, in context of multiple DAP Requests
pub(crate) fn halt_core(
target_core: &mut probe_rs::Core,
) -> Result<probe_rs::CoreInformation, DebuggerError> {
match target_core.halt(Duration::from_millis(100)) {
Ok(cpu_info) => Ok(cpu_info),
Err(error) => Err(DebuggerError::Other(anyhow!("{}", error))),
}
}
pub fn get_arguments<T: DeserializeOwned>(req: &Request) -> Result<T, crate::DebuggerError> {
let value = req
.arguments
.as_ref()
.ok_or(crate::DebuggerError::InvalidRequest)?;
serde_json::from_value(value.to_owned()).map_err(|e| e.into())
}
pub(crate) trait DapStatus {
fn short_long_status(&self) -> (&'static str, &'static str);
}
impl DapStatus for CoreStatus {
/// Return a tuple with short and long descriptions of the core status for human machine interface / hmi. The short status matches with the strings implemented by the Microsoft DAP protocol, e.g. `let (short_status, long status) = CoreStatus::short_long_status(core_status)`
fn short_long_status(&self) -> (&'static str, &'static str) {
match self {
CoreStatus::Running => ("continued", "Core is running"),
CoreStatus::Sleeping => ("sleeping", "Core is in SLEEP mode"),
CoreStatus::LockedUp => (
"lockedup",
"Core is in LOCKUP status - encountered an unrecoverable exception",
),
CoreStatus::Halted(halt_reason) => match halt_reason {
HaltReason::Breakpoint => (
"breakpoint",
"Core halted due to a breakpoint (software or hardware)",
),
HaltReason::Exception => (
"exception",
"Core halted due to an exception, e.g. interupt handler",
),
HaltReason::Watchpoint => (
"data breakpoint",
"Core halted due to a watchpoint or data breakpoint",
),
HaltReason::Step => ("step", "Core halted after a 'step' instruction"),
HaltReason::Request => (
"pause",
"Core halted due to a user (debugger client) request",
),
HaltReason::External => ("external", "Core halted due to an external request"),
_other => ("unrecognized", "Core halted: unrecognized cause"),
},
CoreStatus::Unknown => ("unknown", "Core status cannot be determined"),
}
}
}
| 42.231435 | 278 | 0.464567 |
eb5ffeaf888866712075f36d91c00ecafd816718 | 696 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern:unreachable pattern
#![feature(box_patterns)]
#![feature(box_syntax)]
enum foo { a(Box<foo>, isize), b(usize), }
fn main() { match foo::b(1_usize) { foo::b(_) | foo::a(box _, 1) => { } foo::a(_, 1) => { } } }
| 36.631579 | 95 | 0.691092 |
3307fcccba7d042115da088fb5b4b082db81150c | 2,203 | // Copyright 2020 Nym Technologies SA
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::rest_requests::{PathParam, QueryParam, RESTRequest, RESTRequestError};
use crate::DefaultRESTResponse;
use reqwest::{Method, Url};
pub struct Request {
url: Url,
}
impl RESTRequest for Request {
const METHOD: Method = Method::PATCH;
const RELATIVE_PATH: &'static str = "/api/mixmining/reputation";
type JsonPayload = ();
type ExpectedJsonResponse = DefaultRESTResponse;
fn new(
base_url: &str,
path_params: Option<Vec<PathParam>>,
query_params: Option<Vec<QueryParam>>,
_: Option<Self::JsonPayload>,
) -> Result<Self, RESTRequestError> {
// set reputation requires single path param - the node id
// and single query param - what reputation should it be set to
let path_params = path_params.ok_or_else(|| RESTRequestError::InvalidPathParams)?;
if path_params.len() != 1 {
return Err(RESTRequestError::InvalidPathParams);
}
let query_params = query_params.ok_or_else(|| RESTRequestError::InvalidQueryParams)?;
if query_params.len() != 1 {
return Err(RESTRequestError::InvalidQueryParams);
}
// <base_url>/api/mixmining/reputation/{id}
let base = format!("{}{}/{}", base_url, Self::RELATIVE_PATH, path_params[0]);
let url = Url::parse_with_params(&base, query_params)
.map_err(|err| RESTRequestError::MalformedUrl(err.to_string()))?;
Ok(Request { url })
}
fn url(&self) -> &Url {
&self.url
}
fn query_param_keys() -> Vec<&'static str> {
vec!["reputation"]
}
}
| 34.421875 | 93 | 0.665002 |
c150009e50b6d3f98f5a32cbd99b1dd5df755e2a | 96 | mod cal;
mod seq;
mod seq_date;
pub use cal::Cal;
pub use seq::Seq;
pub use seq_date::SeqDate;
| 12 | 26 | 0.708333 |
71a7b80f6cb4c0a0c3c437295dcfb4b1ace6a9a5 | 554 | //! An object database delegating object access to multiple contained object databases with loose and packed objects.
use crate::{pack, store::loose};
///
pub mod find;
///
pub mod init;
mod write;
/// An object database with tiered lookup packs and loose objects.
/// This is a typical git database as used in git repositories, sans 'alternates'.
pub struct Store {
/// A loose object database into which new objects are written
pub loose: loose::Store,
/// All packs in the `objects/packs` directory
pub bundles: Vec<pack::Bundle>,
}
| 30.777778 | 117 | 0.720217 |
b91eea8ef2b2e02f48bf446ff562742e1d164b52 | 7,111 | use crate::avm2::activation::Activation;
use crate::avm2::bytearray::ByteArrayStorage;
use crate::avm2::class::Class;
use crate::avm2::names::{Namespace, QName};
use crate::avm2::object::script_object::{ScriptObjectClass, ScriptObjectData};
use crate::avm2::object::{Object, ObjectPtr, TObject};
use crate::avm2::scope::Scope;
use crate::avm2::string::AvmString;
use crate::avm2::traits::Trait;
use crate::avm2::value::Value;
use crate::avm2::Error;
use crate::impl_avm2_custom_object;
use gc_arena::{Collect, GcCell, MutationContext};
use std::cell::{Ref, RefMut};
#[derive(Clone, Collect, Debug, Copy)]
#[collect(no_drop)]
pub struct ByteArrayObject<'gc>(GcCell<'gc, ByteArrayObjectData<'gc>>);
#[derive(Clone, Collect, Debug)]
#[collect(no_drop)]
pub struct ByteArrayObjectData<'gc> {
/// Base script object
base: ScriptObjectData<'gc>,
storage: ByteArrayStorage,
}
impl<'gc> ByteArrayObject<'gc> {
pub fn construct(mc: MutationContext<'gc, '_>, base_proto: Option<Object<'gc>>) -> Object<'gc> {
let base = ScriptObjectData::base_new(base_proto, ScriptObjectClass::NoClass);
ByteArrayObject(GcCell::allocate(
mc,
ByteArrayObjectData {
base,
storage: ByteArrayStorage::new(),
},
))
.into()
}
pub fn derive(
base_proto: Object<'gc>,
mc: MutationContext<'gc, '_>,
class: GcCell<'gc, Class<'gc>>,
scope: Option<GcCell<'gc, Scope<'gc>>>,
) -> Result<Object<'gc>, Error> {
let base = ScriptObjectData::base_new(
Some(base_proto),
ScriptObjectClass::InstancePrototype(class, scope),
);
Ok(ByteArrayObject(GcCell::allocate(
mc,
ByteArrayObjectData {
base,
storage: ByteArrayStorage::new(),
},
))
.into())
}
}
impl<'gc> TObject<'gc> for ByteArrayObject<'gc> {
impl_avm2_custom_object!(base);
fn get_property_local(
self,
receiver: Object<'gc>,
name: &QName<'gc>,
activation: &mut Activation<'_, 'gc, '_>,
) -> Result<Value<'gc>, Error> {
let read = self.0.read();
if name.namespace().is_public() {
if let Ok(index) = name.local_name().parse::<usize>() {
return Ok(if let Some(val) = read.storage.get(index) {
Value::Unsigned(val as u32)
} else {
Value::Undefined
});
}
}
let rv = read.base.get_property_local(receiver, name, activation)?;
drop(read);
rv.resolve(activation)
}
fn set_property_local(
self,
receiver: Object<'gc>,
name: &QName<'gc>,
value: Value<'gc>,
activation: &mut Activation<'_, 'gc, '_>,
) -> Result<(), Error> {
let mut write = self.0.write(activation.context.gc_context);
if name.namespace().is_public() {
if let Ok(index) = name.local_name().parse::<usize>() {
write
.storage
.set(index, value.coerce_to_u32(activation)? as u8);
return Ok(());
}
}
let rv = write
.base
.set_property_local(receiver, name, value, activation)?;
drop(write);
rv.resolve(activation)?;
Ok(())
}
fn init_property_local(
self,
receiver: Object<'gc>,
name: &QName<'gc>,
value: Value<'gc>,
activation: &mut Activation<'_, 'gc, '_>,
) -> Result<(), Error> {
let mut write = self.0.write(activation.context.gc_context);
if name.namespace().is_public() {
if let Ok(index) = name.local_name().parse::<usize>() {
write
.storage
.set(index, value.coerce_to_u32(activation)? as u8);
return Ok(());
}
}
let rv = write
.base
.init_property_local(receiver, name, value, activation)?;
drop(write);
rv.resolve(activation)?;
Ok(())
}
fn is_property_overwritable(
self,
gc_context: MutationContext<'gc, '_>,
name: &QName<'gc>,
) -> bool {
self.0.write(gc_context).base.is_property_overwritable(name)
}
fn delete_property(&self, gc_context: MutationContext<'gc, '_>, name: &QName<'gc>) -> bool {
if name.namespace().is_public() {
if let Ok(index) = name.local_name().parse::<usize>() {
self.0.write(gc_context).storage.delete(index);
return true;
}
}
self.0.write(gc_context).base.delete_property(name)
}
fn has_own_property(self, name: &QName<'gc>) -> Result<bool, Error> {
if name.namespace().is_public() {
if let Ok(index) = name.local_name().parse::<usize>() {
return Ok(self.0.read().storage.get(index).is_some());
}
}
self.0.read().base.has_own_property(name)
}
fn resolve_any(self, local_name: AvmString<'gc>) -> Result<Option<Namespace<'gc>>, Error> {
if let Ok(index) = local_name.parse::<usize>() {
if self.0.read().storage.get(index).is_some() {
return Ok(Some(Namespace::public()));
}
}
self.0.read().base.resolve_any(local_name)
}
fn resolve_any_trait(
self,
local_name: AvmString<'gc>,
) -> Result<Option<Namespace<'gc>>, Error> {
self.0.read().base.resolve_any_trait(local_name)
}
fn construct(
&self,
activation: &mut Activation<'_, 'gc, '_>,
_args: &[Value<'gc>],
) -> Result<Object<'gc>, Error> {
let this: Object<'gc> = Object::ByteArrayObject(*self);
Ok(ByteArrayObject::construct(
activation.context.gc_context,
Some(this),
))
}
fn derive(
&self,
activation: &mut Activation<'_, 'gc, '_>,
class: GcCell<'gc, Class<'gc>>,
scope: Option<GcCell<'gc, Scope<'gc>>>,
) -> Result<Object<'gc>, Error> {
let this: Object<'gc> = Object::ByteArrayObject(*self);
let base = ScriptObjectData::base_new(
Some(this),
ScriptObjectClass::InstancePrototype(class, scope),
);
Ok(ByteArrayObject(GcCell::allocate(
activation.context.gc_context,
ByteArrayObjectData {
base,
storage: ByteArrayStorage::new(),
},
))
.into())
}
fn value_of(&self, _mc: MutationContext<'gc, '_>) -> Result<Value<'gc>, Error> {
Ok(Value::Object(Object::from(*self)))
}
fn as_bytearray(&self) -> Option<Ref<ByteArrayStorage>> {
Some(Ref::map(self.0.read(), |d| &d.storage))
}
fn as_bytearray_mut(&self, mc: MutationContext<'gc, '_>) -> Option<RefMut<ByteArrayStorage>> {
Some(RefMut::map(self.0.write(mc), |d| &mut d.storage))
}
}
| 29.263374 | 100 | 0.545774 |
ef0cc592759ba334e0bd28b5517ac0231aa42216 | 910 | pub mod evm_types;
/// List of possible error as occurs from the operations
#[derive(Debug)]
pub enum Error {
/// Internal error for generic error combined altogether
/// Contain optional error message
ErrorInternalGeneric(Option<String>),
/// Internal error from parsing Url
ErrorInternalUrlParsing,
/// Error in sending HTTP request
/// Contains optional error message
ErrorSendingHttpRequest(Option<String>),
/// Error JSON parsing
/// Contain optional error message
ErrorJsonParsing(Option<String>),
/// Error from Api response back from upstream API server containing the error message
ErrorApiResponse(String),
/// Parameter to function error
ErrorParameter(Option<String>),
}
/// Chain type
#[derive(Copy, Clone)]
pub enum ChainType {
/// Binance Smart Chain
BSC,
/// Ethereum
Ethereum,
/// Polygon
Polygon,
}
| 22.75 | 90 | 0.69011 |
fe56157d9c10a2624f88f3b846621387daf331eb | 8,260 | use std::{
io,
io::prelude::Write,
};
use crate::{
types::TestDesc,
time,
test_result::TestResult,
types::NamePadding,
console::{ConsoleTestState, OutputLocation},
bench::fmt_bench_samples,
};
use super::OutputFormatter;
// insert a '\n' after 100 tests in quiet mode
const QUIET_MODE_MAX_COLUMN: usize = 100;
pub(crate) struct TerseFormatter<T> {
out: OutputLocation<T>,
use_color: bool,
is_multithreaded: bool,
/// Number of columns to fill when aligning names
max_name_len: usize,
test_count: usize,
total_test_count: usize,
}
impl<T: Write> TerseFormatter<T> {
pub fn new(
out: OutputLocation<T>,
use_color: bool,
max_name_len: usize,
is_multithreaded: bool,
) -> Self {
TerseFormatter {
out,
use_color,
max_name_len,
is_multithreaded,
test_count: 0,
total_test_count: 0, // initialized later, when write_run_start is called
}
}
pub fn write_ok(&mut self) -> io::Result<()> {
self.write_short_result(".", term::color::GREEN)
}
pub fn write_failed(&mut self) -> io::Result<()> {
self.write_short_result("F", term::color::RED)
}
pub fn write_ignored(&mut self) -> io::Result<()> {
self.write_short_result("i", term::color::YELLOW)
}
pub fn write_allowed_fail(&mut self) -> io::Result<()> {
self.write_short_result("a", term::color::YELLOW)
}
pub fn write_bench(&mut self) -> io::Result<()> {
self.write_pretty("bench", term::color::CYAN)
}
pub fn write_short_result(
&mut self,
result: &str,
color: term::color::Color,
) -> io::Result<()> {
self.write_pretty(result, color)?;
if self.test_count % QUIET_MODE_MAX_COLUMN == QUIET_MODE_MAX_COLUMN - 1 {
// we insert a new line every 100 dots in order to flush the
// screen when dealing with line-buffered output (e.g., piping to
// `stamp` in the rust CI).
let out = format!(" {}/{}\n", self.test_count+1, self.total_test_count);
self.write_plain(&out)?;
}
self.test_count += 1;
Ok(())
}
pub fn write_pretty(&mut self, word: &str, color: term::color::Color) -> io::Result<()> {
match self.out {
OutputLocation::Pretty(ref mut term) => {
if self.use_color {
term.fg(color)?;
}
term.write_all(word.as_bytes())?;
if self.use_color {
term.reset()?;
}
term.flush()
}
OutputLocation::Raw(ref mut stdout) => {
stdout.write_all(word.as_bytes())?;
stdout.flush()
}
}
}
pub fn write_plain<S: AsRef<str>>(&mut self, s: S) -> io::Result<()> {
let s = s.as_ref();
self.out.write_all(s.as_bytes())?;
self.out.flush()
}
pub fn write_outputs(&mut self, state: &ConsoleTestState) -> io::Result<()> {
self.write_plain("\nsuccesses:\n")?;
let mut successes = Vec::new();
let mut stdouts = String::new();
for &(ref f, ref stdout) in &state.not_failures {
successes.push(f.name.to_string());
if !stdout.is_empty() {
stdouts.push_str(&format!("---- {} stdout ----\n", f.name));
let output = String::from_utf8_lossy(stdout);
stdouts.push_str(&output);
stdouts.push_str("\n");
}
}
if !stdouts.is_empty() {
self.write_plain("\n")?;
self.write_plain(&stdouts)?;
}
self.write_plain("\nsuccesses:\n")?;
successes.sort();
for name in &successes {
self.write_plain(&format!(" {}\n", name))?;
}
Ok(())
}
pub fn write_failures(&mut self, state: &ConsoleTestState) -> io::Result<()> {
self.write_plain("\nfailures:\n")?;
let mut failures = Vec::new();
let mut fail_out = String::new();
for &(ref f, ref stdout) in &state.failures {
failures.push(f.name.to_string());
if !stdout.is_empty() {
fail_out.push_str(&format!("---- {} stdout ----\n", f.name));
let output = String::from_utf8_lossy(stdout);
fail_out.push_str(&output);
fail_out.push_str("\n");
}
}
if !fail_out.is_empty() {
self.write_plain("\n")?;
self.write_plain(&fail_out)?;
}
self.write_plain("\nfailures:\n")?;
failures.sort();
for name in &failures {
self.write_plain(&format!(" {}\n", name))?;
}
Ok(())
}
fn write_test_name(&mut self, desc: &TestDesc) -> io::Result<()> {
let name = desc.padded_name(self.max_name_len, desc.name.padding());
self.write_plain(&format!("test {} ... ", name))?;
Ok(())
}
}
impl<T: Write> OutputFormatter for TerseFormatter<T> {
fn write_run_start(&mut self, test_count: usize) -> io::Result<()> {
self.total_test_count = test_count;
let noun = if test_count != 1 { "tests" } else { "test" };
self.write_plain(&format!("\nrunning {} {}\n", test_count, noun))
}
fn write_test_start(&mut self, desc: &TestDesc) -> io::Result<()> {
// Remnants from old libtest code that used the padding value
// in order to indicate benchmarks.
// When running benchmarks, terse-mode should still print their name as if
// it is the Pretty formatter.
if !self.is_multithreaded && desc.name.padding() == NamePadding::PadOnRight {
self.write_test_name(desc)?;
}
Ok(())
}
fn write_result(
&mut self,
desc: &TestDesc,
result: &TestResult,
_: Option<&time::TestExecTime>,
_: &[u8],
_: &ConsoleTestState,
) -> io::Result<()> {
match *result {
TestResult::TrOk => self.write_ok(),
TestResult::TrFailed
| TestResult::TrFailedMsg(_)
| TestResult::TrTimedFail => self.write_failed(),
TestResult::TrIgnored => self.write_ignored(),
TestResult::TrAllowedFail => self.write_allowed_fail(),
TestResult::TrBench(ref bs) => {
if self.is_multithreaded {
self.write_test_name(desc)?;
}
self.write_bench()?;
self.write_plain(&format!(": {}\n", fmt_bench_samples(bs)))
}
}
}
fn write_timeout(&mut self, desc: &TestDesc) -> io::Result<()> {
self.write_plain(&format!(
"test {} has been running for over {} seconds\n",
desc.name, time::TEST_WARN_TIMEOUT_S
))
}
fn write_run_finish(&mut self, state: &ConsoleTestState) -> io::Result<bool> {
if state.options.display_output {
self.write_outputs(state)?;
}
let success = state.failed == 0;
if !success {
self.write_failures(state)?;
}
self.write_plain("\ntest result: ")?;
if success {
// There's no parallelism at this point so it's safe to use color
self.write_pretty("ok", term::color::GREEN)?;
} else {
self.write_pretty("FAILED", term::color::RED)?;
}
let s = if state.allowed_fail > 0 {
format!(
". {} passed; {} failed ({} allowed); {} ignored; {} measured; {} filtered out\n\n",
state.passed,
state.failed + state.allowed_fail,
state.allowed_fail,
state.ignored,
state.measured,
state.filtered_out
)
} else {
format!(
". {} passed; {} failed; {} ignored; {} measured; {} filtered out\n\n",
state.passed, state.failed, state.ignored, state.measured, state.filtered_out
)
};
self.write_plain(&s)?;
Ok(success)
}
}
| 31.64751 | 100 | 0.525182 |
d996a25ad1a6a9d50fae9f7fda5264e0a1109df2 | 20,362 | //! AVM2 classes
use crate::avm2::method::{Method, NativeMethod};
use crate::avm2::names::{Multiname, Namespace, QName};
use crate::avm2::script::TranslationUnit;
use crate::avm2::string::AvmString;
use crate::avm2::traits::{Trait, TraitKind};
use crate::avm2::{Avm2, Error};
use bitflags::bitflags;
use gc_arena::{Collect, GcCell, MutationContext};
use swf::avm2::types::{
Class as AbcClass, Instance as AbcInstance, Method as AbcMethod, MethodBody as AbcMethodBody,
};
bitflags! {
/// All possible attributes for a given class.
pub struct ClassAttributes: u8 {
/// Class is sealed, attempts to set or init dynamic properties on an
/// object will generate a runtime error.
const SEALED = 1 << 0;
/// Class is final, attempts to construct child classes from it will
/// generate a verification error.
const FINAL = 1 << 1;
/// Class is an interface.
const INTERFACE = 1 << 2;
}
}
/// A loaded ABC Class which can be used to construct objects with.
#[derive(Clone, Debug, Collect)]
#[collect(no_drop)]
pub struct Class<'gc> {
/// The name of the class.
name: QName<'gc>,
/// The name of this class's superclass.
super_class: Option<Multiname<'gc>>,
/// Attributes of the given class.
#[collect(require_static)]
attributes: ClassAttributes,
/// The namespace that protected traits of this class are stored into.
protected_namespace: Option<Namespace<'gc>>,
/// The list of interfaces this class implements.
interfaces: Vec<Multiname<'gc>>,
/// The instance initializer for this class.
///
/// Must be called each time a new class instance is constructed.
instance_init: Method<'gc>,
/// Instance traits for a given class.
///
/// These are accessed as normal instance properties; they should not be
/// present on prototypes, but instead should shadow any prototype
/// properties that would match.
instance_traits: Vec<Trait<'gc>>,
/// The class initializer for this class.
///
/// Must be called once prior to any use of this class.
class_init: Method<'gc>,
/// Static traits for a given class.
///
/// These are accessed as constructor properties.
class_traits: Vec<Trait<'gc>>,
/// Whether or not this `Class` has loaded its traits or not.
traits_loaded: bool,
}
/// Find traits in a list of traits matching a name.
///
/// This function also enforces final/override bits on the traits, and will
/// raise `VerifyError`s as needed.
///
/// TODO: This is an O(n^2) algorithm, it sucks.
fn do_trait_lookup<'gc>(
name: &QName<'gc>,
known_traits: &mut Vec<Trait<'gc>>,
all_traits: &[Trait<'gc>],
) -> Result<(), Error> {
for trait_entry in all_traits {
if name == trait_entry.name() {
for known_trait in known_traits.iter() {
match (&trait_entry.kind(), &known_trait.kind()) {
(TraitKind::Getter { .. }, TraitKind::Setter { .. }) => continue,
(TraitKind::Setter { .. }, TraitKind::Getter { .. }) => continue,
_ => {}
};
if known_trait.is_final() {
return Err("Attempting to override a final definition".into());
}
if !trait_entry.is_override() {
return Err("Definition override is not marked as override".into());
}
}
known_traits.push(trait_entry.clone());
}
}
Ok(())
}
/// Find traits in a list of traits matching a slot ID.
fn do_trait_lookup_by_slot<'gc>(
id: u32,
all_traits: &[Trait<'gc>],
) -> Result<Option<Trait<'gc>>, Error> {
for trait_entry in all_traits {
let trait_id = match trait_entry.kind() {
TraitKind::Slot { slot_id, .. } => slot_id,
TraitKind::Const { slot_id, .. } => slot_id,
TraitKind::Class { slot_id, .. } => slot_id,
TraitKind::Function { slot_id, .. } => slot_id,
_ => continue,
};
if id == *trait_id {
return Ok(Some(trait_entry.clone()));
}
}
Ok(None)
}
impl<'gc> Class<'gc> {
/// Create a new class.
///
/// This function is primarily intended for use by native code to define
/// builtin classes. The absolute minimum necessary to define a class is
/// required here; further methods allow further changes to the class.
///
/// Classes created in this way cannot have traits loaded from an ABC file
/// using `load_traits`.
pub fn new(
name: QName<'gc>,
super_class: Option<Multiname<'gc>>,
instance_init: Method<'gc>,
class_init: Method<'gc>,
mc: MutationContext<'gc, '_>,
) -> GcCell<'gc, Self> {
GcCell::allocate(
mc,
Self {
name,
super_class,
attributes: ClassAttributes::empty(),
protected_namespace: None,
interfaces: Vec::new(),
instance_init,
instance_traits: Vec::new(),
class_init,
class_traits: Vec::new(),
traits_loaded: true,
},
)
}
/// Set the attributes of the class (sealed/final/interface status).
pub fn set_attributes(&mut self, attributes: ClassAttributes) {
self.attributes = attributes;
}
/// Add a protected namespace to this class.
pub fn set_protected_namespace(&mut self, ns: Namespace<'gc>) {
self.protected_namespace = Some(ns)
}
/// Construct a class from a `TranslationUnit` and its class index.
///
/// The returned class will be allocated, but no traits will be loaded. The
/// caller is responsible for storing the class in the `TranslationUnit`
/// and calling `load_traits` to complete the trait-loading process.
pub fn from_abc_index(
unit: TranslationUnit<'gc>,
class_index: u32,
mc: MutationContext<'gc, '_>,
) -> Result<GcCell<'gc, Self>, Error> {
let abc = unit.abc();
let abc_class: Result<&AbcClass, Error> = abc
.classes
.get(class_index as usize)
.ok_or_else(|| "LoadError: Class index not valid".into());
let abc_class = abc_class?;
let abc_instance: Result<&AbcInstance, Error> = abc
.instances
.get(class_index as usize)
.ok_or_else(|| "LoadError: Instance index not valid".into());
let abc_instance = abc_instance?;
let name = QName::from_abc_multiname(unit, abc_instance.name.clone(), mc)?;
let super_class = if abc_instance.super_name.0 == 0 {
None
} else {
Some(Multiname::from_abc_multiname_static(
unit,
abc_instance.super_name.clone(),
mc,
)?)
};
let protected_namespace = if let Some(ns) = &abc_instance.protected_namespace {
Some(Namespace::from_abc_namespace(unit, ns.clone(), mc)?)
} else {
None
};
let mut interfaces = Vec::new();
for interface_name in abc_instance.interfaces.iter() {
interfaces.push(Multiname::from_abc_multiname_static(
unit,
interface_name.clone(),
mc,
)?);
}
let instance_init = unit.load_method(abc_instance.init_method.0, mc)?;
let class_init = unit.load_method(abc_class.init_method.0, mc)?;
let mut attributes = ClassAttributes::empty();
attributes.set(ClassAttributes::SEALED, abc_instance.is_sealed);
attributes.set(ClassAttributes::FINAL, abc_instance.is_final);
attributes.set(ClassAttributes::INTERFACE, abc_instance.is_interface);
Ok(GcCell::allocate(
mc,
Self {
name,
super_class,
attributes,
protected_namespace,
interfaces,
instance_init,
instance_traits: Vec::new(),
class_init,
class_traits: Vec::new(),
traits_loaded: false,
},
))
}
/// Finish the class-loading process by loading traits.
///
/// This process must be done after the `Class` has been stored in the
/// `TranslationUnit`. Failing to do so runs the risk of runaway recursion
/// or double-borrows. It should be done before the class is actually
/// instantiated into an `Object`.
pub fn load_traits(
&mut self,
unit: TranslationUnit<'gc>,
class_index: u32,
avm2: &mut Avm2<'gc>,
mc: MutationContext<'gc, '_>,
) -> Result<(), Error> {
if self.traits_loaded {
return Ok(());
}
self.traits_loaded = true;
let abc = unit.abc();
let abc_class: Result<&AbcClass, Error> = abc
.classes
.get(class_index as usize)
.ok_or_else(|| "LoadError: Class index not valid".into());
let abc_class = abc_class?;
let abc_instance: Result<&AbcInstance, Error> = abc
.instances
.get(class_index as usize)
.ok_or_else(|| "LoadError: Instance index not valid".into());
let abc_instance = abc_instance?;
for abc_trait in abc_instance.traits.iter() {
self.instance_traits
.push(Trait::from_abc_trait(unit, abc_trait, avm2, mc)?);
}
for abc_trait in abc_class.traits.iter() {
self.class_traits
.push(Trait::from_abc_trait(unit, abc_trait, avm2, mc)?);
}
Ok(())
}
pub fn from_method_body(
avm2: &mut Avm2<'gc>,
mc: MutationContext<'gc, '_>,
translation_unit: TranslationUnit<'gc>,
method: &AbcMethod,
body: &AbcMethodBody,
) -> Result<GcCell<'gc, Self>, Error> {
let name = translation_unit.pool_string(method.name.as_u30(), mc)?;
let mut traits = Vec::new();
for trait_entry in body.traits.iter() {
traits.push(Trait::from_abc_trait(
translation_unit,
trait_entry,
avm2,
mc,
)?);
}
Ok(GcCell::allocate(
mc,
Self {
name: QName::dynamic_name(name),
super_class: None,
attributes: ClassAttributes::empty(),
protected_namespace: None,
interfaces: Vec::new(),
instance_init: Method::from_builtin(|_, _, _| {
Err("Do not call activation initializers!".into())
}),
instance_traits: traits,
class_init: Method::from_builtin(|_, _, _| {
Err("Do not call activation class initializers!".into())
}),
class_traits: Vec::new(),
traits_loaded: true,
},
))
}
pub fn name(&self) -> &QName<'gc> {
&self.name
}
pub fn super_class_name(&self) -> &Option<Multiname<'gc>> {
&self.super_class
}
#[inline(never)]
pub fn define_public_constant_string_class_traits(
&mut self,
items: &[(&'static str, &'static str)],
) {
for &(name, value) in items {
self.define_class_trait(Trait::from_const(
QName::new(Namespace::public(), name),
QName::new(Namespace::public(), "String").into(),
Some(value.into()),
));
}
}
#[inline(never)]
pub fn define_public_constant_number_class_traits(&mut self, items: &[(&'static str, f64)]) {
for &(name, value) in items {
self.define_class_trait(Trait::from_const(
QName::new(Namespace::public(), name),
QName::new(Namespace::public(), "Number").into(),
Some(value.into()),
));
}
}
#[inline(never)]
pub fn define_public_constant_uint_class_traits(&mut self, items: &[(&'static str, u32)]) {
for &(name, value) in items {
self.define_class_trait(Trait::from_const(
QName::new(Namespace::public(), name),
QName::new(Namespace::public(), "uint").into(),
Some(value.into()),
));
}
}
#[inline(never)]
pub fn define_public_builtin_instance_methods(
&mut self,
items: &[(&'static str, NativeMethod)],
) {
for &(name, value) in items {
self.define_instance_trait(Trait::from_method(
QName::new(Namespace::public(), name),
Method::from_builtin(value),
));
}
}
#[inline(never)]
pub fn define_as3_builtin_instance_methods(&mut self, items: &[(&'static str, NativeMethod)]) {
for &(name, value) in items {
self.define_instance_trait(Trait::from_method(
QName::new(Namespace::as3_namespace(), name),
Method::from_builtin(value),
));
}
}
#[inline(never)]
pub fn define_public_builtin_class_methods(&mut self, items: &[(&'static str, NativeMethod)]) {
for &(name, value) in items {
self.define_class_trait(Trait::from_method(
QName::new(Namespace::public(), name),
Method::from_builtin(value),
));
}
}
#[inline(never)]
pub fn define_public_builtin_instance_properties(
&mut self,
items: &[(&'static str, Option<NativeMethod>, Option<NativeMethod>)],
) {
for &(name, getter, setter) in items {
if let Some(getter) = getter {
self.define_instance_trait(Trait::from_getter(
QName::new(Namespace::public(), name),
Method::from_builtin(getter),
));
}
if let Some(setter) = setter {
self.define_instance_trait(Trait::from_setter(
QName::new(Namespace::public(), name),
Method::from_builtin(setter),
));
}
}
}
/// Define a trait on the class.
///
/// Class traits will be accessible as properties on the class constructor
/// function.
pub fn define_class_trait(&mut self, my_trait: Trait<'gc>) {
self.class_traits.push(my_trait);
}
/// Given a name, append class traits matching the name to a list of known
/// traits.
///
/// This function adds its result onto the list of known traits, with the
/// caveat that duplicate entries will be replaced (if allowed). As such, this
/// function should be run on the class hierarchy from top to bottom.
///
/// If a given trait has an invalid name, attempts to override a final trait,
/// or overlaps an existing trait without being an override, then this function
/// returns an error.
pub fn lookup_class_traits(
&self,
name: &QName<'gc>,
known_traits: &mut Vec<Trait<'gc>>,
) -> Result<(), Error> {
do_trait_lookup(name, known_traits, &self.class_traits)
}
/// Given a slot ID, append class traits matching the slot to a list of
/// known traits.
///
/// This function adds its result onto the list of known traits, with the
/// caveat that duplicate entries will be replaced (if allowed). As such, this
/// function should be run on the class hierarchy from top to bottom.
///
/// If a given trait has an invalid name, attempts to override a final trait,
/// or overlaps an existing trait without being an override, then this function
/// returns an error.
pub fn lookup_class_traits_by_slot(&self, id: u32) -> Result<Option<Trait<'gc>>, Error> {
do_trait_lookup_by_slot(id, &self.class_traits)
}
/// Determines if this class provides a given trait on itself.
pub fn has_class_trait(&self, name: &QName<'gc>) -> bool {
for trait_entry in self.class_traits.iter() {
if name == trait_entry.name() {
return true;
}
}
false
}
/// Look for a class trait with a given local name, and return its
/// namespace.
///
/// TODO: Matching multiple namespaces with the same local name is at least
/// claimed by the AVM2 specification to be a `VerifyError`.
pub fn resolve_any_class_trait(&self, local_name: AvmString<'gc>) -> Option<Namespace<'gc>> {
for trait_entry in self.class_traits.iter() {
if local_name == trait_entry.name().local_name() {
return Some(trait_entry.name().namespace().clone());
}
}
None
}
/// Define a trait on instances of the class.
///
/// Instance traits will be accessible as properties on instances of the
/// class. They will not be accessible on the class prototype, and any
/// properties defined on the prototype will be shadowed by these traits.
pub fn define_instance_trait(&mut self, my_trait: Trait<'gc>) {
self.instance_traits.push(my_trait);
}
/// Given a name, append instance traits matching the name to a list of
/// known traits.
///
/// This function adds its result onto the list of known traits, with the
/// caveat that duplicate entries will be replaced (if allowed). As such, this
/// function should be run on the class hierarchy from top to bottom.
///
/// If a given trait has an invalid name, attempts to override a final trait,
/// or overlaps an existing trait without being an override, then this function
/// returns an error.
pub fn lookup_instance_traits(
&self,
name: &QName<'gc>,
known_traits: &mut Vec<Trait<'gc>>,
) -> Result<(), Error> {
do_trait_lookup(name, known_traits, &self.instance_traits)
}
/// Given a slot ID, append instance traits matching the slot to a list of
/// known traits.
///
/// This function adds its result onto the list of known traits, with the
/// caveat that duplicate entries will be replaced (if allowed). As such, this
/// function should be run on the class hierarchy from top to bottom.
///
/// If a given trait has an invalid name, attempts to override a final trait,
/// or overlaps an existing trait without being an override, then this function
/// returns an error.
pub fn lookup_instance_traits_by_slot(&self, id: u32) -> Result<Option<Trait<'gc>>, Error> {
do_trait_lookup_by_slot(id, &self.instance_traits)
}
/// Determines if this class provides a given trait on its instances.
pub fn has_instance_trait(&self, name: &QName<'gc>) -> bool {
for trait_entry in self.instance_traits.iter() {
if name == trait_entry.name() {
return true;
}
}
false
}
/// Look for an instance trait with a given local name, and return its
/// namespace.
///
/// TODO: Matching multiple namespaces with the same local name is at least
/// claimed by the AVM2 specification to be a `VerifyError`.
pub fn resolve_any_instance_trait(&self, local_name: AvmString<'gc>) -> Option<Namespace<'gc>> {
for trait_entry in self.instance_traits.iter() {
if local_name == trait_entry.name().local_name() {
return Some(trait_entry.name().namespace().clone());
}
}
None
}
/// Get this class's instance initializer.
pub fn instance_init(&self) -> Method<'gc> {
self.instance_init.clone()
}
/// Get this class's class initializer.
pub fn class_init(&self) -> Method<'gc> {
self.class_init.clone()
}
pub fn interfaces(&self) -> &[Multiname<'gc>] {
&self.interfaces
}
pub fn implements(&mut self, iface: Multiname<'gc>) {
self.interfaces.push(iface)
}
/// Determine if this class is sealed (no dynamic properties)
pub fn is_sealed(&self) -> bool {
self.attributes.contains(ClassAttributes::SEALED)
}
}
| 34.806838 | 100 | 0.578823 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.